Eliminated file seeking and removed bytebuffers instead using raw memory under the assumption the system is always little endian
This commit is contained in:
@@ -40,6 +40,12 @@ public class MemoryBuffer extends TrackedObject {
|
|||||||
UnsafeUtil.memcpy(this.address, dst, this.size);
|
UnsafeUtil.memcpy(this.address, dst, this.size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public MemoryBuffer cpyFrom(long src) {
|
||||||
|
super.assertNotFreed();
|
||||||
|
UnsafeUtil.memcpy(src, this.address, this.size);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void free() {
|
public void free() {
|
||||||
super.free0();
|
super.free0();
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ package me.cortex.voxy.commonImpl.importers;
|
|||||||
import com.mojang.serialization.Codec;
|
import com.mojang.serialization.Codec;
|
||||||
import me.cortex.voxy.common.util.ByteBufferBackedInputStream;
|
import me.cortex.voxy.common.util.ByteBufferBackedInputStream;
|
||||||
import me.cortex.voxy.common.Logger;
|
import me.cortex.voxy.common.Logger;
|
||||||
|
import me.cortex.voxy.common.util.MemoryBuffer;
|
||||||
|
import me.cortex.voxy.common.util.UnsafeUtil;
|
||||||
import me.cortex.voxy.common.voxelization.VoxelizedSection;
|
import me.cortex.voxy.common.voxelization.VoxelizedSection;
|
||||||
import me.cortex.voxy.common.voxelization.WorldConversionFactory;
|
import me.cortex.voxy.common.voxelization.WorldConversionFactory;
|
||||||
import me.cortex.voxy.common.world.WorldEngine;
|
import me.cortex.voxy.common.world.WorldEngine;
|
||||||
@@ -192,32 +194,39 @@ public class WorldImporter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void importRegionFile(Path file, int x, int z) throws IOException {
|
private void importRegionFile(Path file, int x, int z) throws IOException {
|
||||||
//if (true) return;
|
|
||||||
try (var fileStream = FileChannel.open(file, StandardOpenOption.READ)) {
|
try (var fileStream = FileChannel.open(file, StandardOpenOption.READ)) {
|
||||||
var sectorsSavesBB = MemoryUtil.memAlloc(8192);
|
var fileData = new MemoryBuffer(fileStream.size());
|
||||||
if (fileStream.read(sectorsSavesBB, 0) != 8192) {
|
if (fileStream.read(fileData.asByteBuffer(), 0) < 8192) {
|
||||||
|
fileData.free();
|
||||||
System.err.println("Header of region file invalid");
|
System.err.println("Header of region file invalid");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
sectorsSavesBB.rewind();
|
this.importRegionFile(fileData, x, z);
|
||||||
var sectorsSaves = sectorsSavesBB.order(ByteOrder.BIG_ENDIAN).asIntBuffer();
|
fileData.free();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private void importRegionFile(MemoryBuffer regionFile, int x, int z) throws IOException {
|
||||||
|
//if (true) return;
|
||||||
|
|
||||||
//Find and load all saved chunks
|
//Find and load all saved chunks
|
||||||
for (int idx = 0; idx < 1024; idx++) {
|
for (int idx = 0; idx < 1024; idx++) {
|
||||||
int sectorMeta = sectorsSaves.get(idx);
|
int sectorMeta = Integer.reverseBytes(MemoryUtil.memGetInt(regionFile.address+idx*4));//Assumes little endian
|
||||||
if (sectorMeta == 0) {
|
if (sectorMeta == 0) {
|
||||||
//Empty chunk
|
//Empty chunk
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
int sectorStart = sectorMeta>>>8;
|
int sectorStart = sectorMeta>>>8;
|
||||||
int sectorCount = sectorMeta&((1<<8)-1);
|
int sectorCount = sectorMeta&((1<<8)-1);
|
||||||
var data = MemoryUtil.memAlloc(sectorCount*4096).order(ByteOrder.BIG_ENDIAN);
|
|
||||||
fileStream.read(data, sectorStart*4096L);
|
//TODO: create memory copy for each section
|
||||||
data.flip();
|
var data = new MemoryBuffer(sectorCount*4096).cpyFrom(regionFile.address+sectorStart*4096L);
|
||||||
|
|
||||||
boolean addedToQueue = false;
|
boolean addedToQueue = false;
|
||||||
{
|
{
|
||||||
int m = data.getInt();
|
int m = Integer.reverseBytes(MemoryUtil.memGetInt(data.address));
|
||||||
byte b = data.get();
|
byte b = MemoryUtil.memGetByte(data.address+4L);
|
||||||
if (m == 0) {
|
if (m == 0) {
|
||||||
System.err.println("Chunk is allocated, but stream is missing");
|
System.err.println("Chunk is allocated, but stream is missing");
|
||||||
} else {
|
} else {
|
||||||
@@ -227,8 +236,8 @@ public class WorldImporter {
|
|||||||
System.err.println("Chunk has both internal and external streams");
|
System.err.println("Chunk has both internal and external streams");
|
||||||
}
|
}
|
||||||
System.err.println("Chunk has external stream which is not supported");
|
System.err.println("Chunk has external stream which is not supported");
|
||||||
} else if (n > data.remaining()) {
|
} else if (n > data.size-5) {
|
||||||
System.err.println("Chunk stream is truncated: expected "+n+" but read " + data.remaining());
|
System.err.println("Chunk stream is truncated: expected "+n+" but read " + (data.size-5));
|
||||||
} else if (n < 0) {
|
} else if (n < 0) {
|
||||||
System.err.println("Declared size of chunk is negative");
|
System.err.println("Declared size of chunk is negative");
|
||||||
} else {
|
} else {
|
||||||
@@ -238,9 +247,30 @@ public class WorldImporter {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
try (var decompressedData = this.decompress(b, new ByteBufferBackedInputStream(data))) {
|
try (var decompressedData = this.decompress(b, new InputStream() {
|
||||||
|
private long offset = 5;//For the initial 5 offset
|
||||||
|
@Override
|
||||||
|
public int read() {
|
||||||
|
return MemoryUtil.memGetByte(data.address + (this.offset++)) & 0xFF;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int read(byte[] b, int off, int len) {
|
||||||
|
len = Math.min(len, this.available());
|
||||||
|
if (len == 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
UnsafeUtil.memcpy(data.address+this.offset, len, b, off); this.offset+=len;
|
||||||
|
return len;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int available() {
|
||||||
|
return (int) (data.size-this.offset);
|
||||||
|
}
|
||||||
|
})) {
|
||||||
if (decompressedData == null) {
|
if (decompressedData == null) {
|
||||||
System.err.println("Error decompressing chunk data");
|
Logger.error("Error decompressing chunk data");
|
||||||
} else {
|
} else {
|
||||||
var nbt = NbtIo.readCompound(decompressedData);
|
var nbt = NbtIo.readCompound(decompressedData);
|
||||||
this.importChunkNBT(nbt, x, z);
|
this.importChunkNBT(nbt, x, z);
|
||||||
@@ -249,7 +279,7 @@ public class WorldImporter {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
} finally {
|
||||||
MemoryUtil.memFree(data);
|
data.free();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
this.totalChunks.incrementAndGet();
|
this.totalChunks.incrementAndGet();
|
||||||
@@ -259,12 +289,9 @@ public class WorldImporter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!addedToQueue) {
|
if (!addedToQueue) {
|
||||||
MemoryUtil.memFree(data);
|
data.free();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
MemoryUtil.memFree(sectorsSavesBB);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataInputStream decompress(byte flags, InputStream stream) throws IOException {
|
private DataInputStream decompress(byte flags, InputStream stream) throws IOException {
|
||||||
@@ -363,5 +390,4 @@ public class WorldImporter {
|
|||||||
|
|
||||||
this.world.insertUpdate(csec);
|
this.world.insertUpdate(csec);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user