Working on leaf

This commit is contained in:
mcrcortex
2024-08-12 08:28:34 +10:00
parent f055234463
commit 1d1e244f03
14 changed files with 289 additions and 51 deletions

View File

@@ -55,7 +55,7 @@ public class RenderService<T extends AbstractSectionRenderer<J, ?>, J extends Vi
this.renderGen = new RenderGenerationService(world, this.modelService, serviceThreadPool, this.sectionUpdateQueue::add, this.sectionRenderer.getGeometryManager() instanceof IUsesMeshlets); this.renderGen = new RenderGenerationService(world, this.modelService, serviceThreadPool, this.sectionUpdateQueue::add, this.sectionRenderer.getGeometryManager() instanceof IUsesMeshlets);
positionFilterForwarder.setCallbacks(this.renderGen::enqueueTask, section -> { positionFilterForwarder.setCallbacks(this.renderGen::enqueueTask, section -> {
long time = System.nanoTime(); long time = SectionUpdate.getTime();
byte childExistence = section.getNonEmptyChildren(); byte childExistence = section.getNonEmptyChildren();
this.sectionUpdateQueue.add(new SectionUpdate(section.key, time, null, childExistence)); this.sectionUpdateQueue.add(new SectionUpdate(section.key, time, null, childExistence));

View File

@@ -67,7 +67,7 @@ public class RenderGenerationService {
synchronized (this.taskQueue) { synchronized (this.taskQueue) {
task = this.taskQueue.removeFirst(); task = this.taskQueue.removeFirst();
} }
long time = System.nanoTime(); long time = SectionUpdate.getTime();
var section = task.sectionSupplier.get(); var section = task.sectionSupplier.get();
if (section == null) { if (section == null) {
this.resultConsumer.accept(new SectionUpdate(task.position, time, BuiltSection.empty(task.position), (byte) 0)); this.resultConsumer.accept(new SectionUpdate(task.position, time, BuiltSection.empty(task.position), (byte) 0));

View File

@@ -3,4 +3,7 @@ package me.cortex.voxy.client.core.rendering.building;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
public record SectionUpdate(long position, long buildTime, @Nullable BuiltSection geometry, byte childExistence) { public record SectionUpdate(long position, long buildTime, @Nullable BuiltSection geometry, byte childExistence) {
public static long getTime() {
return System.currentTimeMillis();
}
} }

View File

@@ -1,7 +1,9 @@
package me.cortex.voxy.client.core.rendering.hierachical2; package me.cortex.voxy.client.core.rendering.hierachical2;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.longs.Long2IntOpenHashMap; import it.unimi.dsi.fastutil.longs.Long2IntOpenHashMap;
import me.cortex.voxy.client.core.rendering.building.BuiltSection;
import me.cortex.voxy.client.core.rendering.building.SectionPositionUpdateFilterer; import me.cortex.voxy.client.core.rendering.building.SectionPositionUpdateFilterer;
import me.cortex.voxy.client.core.rendering.building.SectionUpdate; import me.cortex.voxy.client.core.rendering.building.SectionUpdate;
import me.cortex.voxy.client.core.rendering.section.AbstractSectionGeometryManager; import me.cortex.voxy.client.core.rendering.section.AbstractSectionGeometryManager;
@@ -16,6 +18,7 @@ public class HierarchicalNodeManager {
public final int maxNodeCount; public final int maxNodeCount;
private final NodeStore nodeData; private final NodeStore nodeData;
private final Long2IntOpenHashMap activeSectionMap = new Long2IntOpenHashMap(); private final Long2IntOpenHashMap activeSectionMap = new Long2IntOpenHashMap();
private final IntOpenHashSet nodeUpdates = new IntOpenHashSet();
private final ExpandingObjectAllocationList<LeafExpansionRequest> leafRequests = new ExpandingObjectAllocationList<>(LeafExpansionRequest[]::new); private final ExpandingObjectAllocationList<LeafExpansionRequest> leafRequests = new ExpandingObjectAllocationList<>(LeafExpansionRequest[]::new);
private final AbstractSectionGeometryManager geometryManager; private final AbstractSectionGeometryManager geometryManager;
private final SectionPositionUpdateFilterer updateFilterer; private final SectionPositionUpdateFilterer updateFilterer;
@@ -52,48 +55,51 @@ public class HierarchicalNodeManager {
} }
this.nodeData.markRequestInFlight(node); this.nodeData.markRequestInFlight(node);
long pos = this.nodeData.nodePosition(node);
//2 branches, either its a leaf node -> emit a leaf request //2 branches, either its a leaf node -> emit a leaf request
// or the nodes geometry must be empty (i.e. culled from the graph/tree) so add to tracker and watch // or the nodes geometry must be empty (i.e. culled from the graph/tree) so add to tracker and watch
if (this.nodeData.isLeafNode(node)) { if (this.nodeData.isLeafNode(node)) {
//TODO: the localNodeData should have a bitset of what children are definitely empty this.makeLeafRequest(node);
// use that to msk the request, HOWEVER there is a race condition e.g.
// leaf node is requested and has only 1 child marked as non empty
// however then an update occures and a different child now becomes non empty,
// this will trigger a processBuildResult for parent
// so need to ensure that when that happens, if the parent has an inflight leaf expansion request
// for the leaf request to be updated to account for the new maybe child node
// NOTE: a section can have empty geometry but some of its children might not, so need to mark and
// submit a node at that level but with empty section, (specially marked) so that the traversal
// can recurse into those children as needed
//Enqueue a leaf expansion request
var request = new LeafExpansionRequest(pos);
int requestId = this.leafRequests.put(request);
for (int i = 0; i < 8; i++) {
long childPos = makeChildPos(pos, i);
//Insert all the children into the tracking map with the node id
this.activeSectionMap.put(childPos, 0);
}
} else { } else {
//Verify that the node section is not in the section store. if it is then it is a state desynchonization //Verify that the node section is not in the section store. if it is then it is a state desynchonization
// Note that a section can be "empty" but some of its children might not be // Note that a section can be "empty" but some of its children might not be
} }
} }
public void processResult(SectionUpdate update) { private void makeLeafRequest(int node, byte childExistence) {
if (update.geometry() != null) { long pos = this.nodeData.nodePosition(node);
if (!update.geometry().isEmpty()) { //TODO: the localNodeData should have a bitset of what children are definitely empty
HierarchicalOcclusionTraverser.HACKY_SECTION_COUNT++; // use that to msk the request, HOWEVER there is a race condition e.g.
this.geometryManager.uploadSection(update.geometry()); // leaf node is requested and has only 1 child marked as non empty
} else { // however then an update occures and a different child now becomes non empty,
update.geometry().free(); // this will trigger a processBuildResult for parent
} // so need to ensure that when that happens, if the parent has an inflight leaf expansion request
// for the leaf request to be updated to account for the new maybe child node
// NOTE: a section can have empty geometry but some of its children might not, so need to mark and
// submit a node at that level but with empty section, (specially marked) so that the traversal
// can recurse into those children as needed
//Enqueue a leaf expansion request
var request = new LeafExpansionRequest(pos);
int requestId = this.leafRequests.put(request);
for (int i = 0; i < 8; i++) {
long childPos = makeChildPos(pos, i);
//Insert all the children into the tracking map with the node id
this.activeSectionMap.put(childPos, 0);
} }
if (true) }
return;
public void processResult(SectionUpdate update) {
//Need to handle cases
// geometry update, leaf node, leaf request node, internal node
//Child emptiness update!!! this is the hard bit
// if it is an internal node
// if emptiness adds node, need to then send a mesh request and wait
// when mesh result, need to remove the old child allocation block and make a new block to fit the
// new count of children
int nodeId = this.activeSectionMap.get(update.position()); int nodeId = this.activeSectionMap.get(update.position());
if (nodeId == -1) { if (nodeId == -1) {
@@ -104,15 +110,49 @@ public class HierarchicalNodeManager {
} else { } else {
//Part of a request (top bit is set to 1) //Part of a request (top bit is set to 1)
if ((nodeId&(1<<31))!=0) { if ((nodeId&(1<<31))!=0) {
nodeId &= ~(1<<31);
var request = this.leafRequests.get(nodeId);
} else { } else {
//Not part of a request, just a node update, //Not part of a request, just a node update, if node is currently a leaf node, it might have a
// however could result in a reallocation if it needs to mark a child position as being possibly visible // leaf request associated with it, which might need an update if
} }
} }
} }
private int updateNodeGeometry(int node, BuiltSection geometry) {
int previousGeometry = -1;
int newGeometry = -1;
if (this.nodeData.hasGeometry(node)) {
previousGeometry = this.nodeData.getNodeGeometry(node);
if (!geometry.isEmpty()) {
newGeometry = this.geometryManager.uploadReplaceSection(previousGeometry, geometry);
} else {
this.geometryManager.removeSection(previousGeometry);
}
} else {
if (!geometry.isEmpty()) {
newGeometry = this.geometryManager.uploadSection(geometry);
}
}
if (previousGeometry != newGeometry) {
this.nodeData.setNodeGeometry(node, newGeometry);
this.nodeUpdates.add(node);
}
if (previousGeometry == newGeometry) {
return 0;//No change
} else if (previousGeometry == -1) {
return 1;//Became non-empty
} else {
return 2;//Became empty
}
}
private static long makeChildPos(long basePos, int addin) { private static long makeChildPos(long basePos, int addin) {
int lvl = WorldEngine.getLevel(basePos); int lvl = WorldEngine.getLevel(basePos);
if (lvl == 0) { if (lvl == 0) {

View File

@@ -40,7 +40,6 @@ public class HierarchicalOcclusionTraverser {
} }
public static int HACKY_SECTION_COUNT = 0;
public void doTraversal(Viewport<?> viewport, int depthBuffer) { public void doTraversal(Viewport<?> viewport, int depthBuffer) {
//Compute the mip chain //Compute the mip chain
this.hiZBuffer.buildMipChain(depthBuffer, viewport.width, viewport.height); this.hiZBuffer.buildMipChain(depthBuffer, viewport.width, viewport.height);
@@ -51,6 +50,7 @@ public class HierarchicalOcclusionTraverser {
//Use a chain of glDispatchComputeIndirect (5 times) with alternating read/write buffers //Use a chain of glDispatchComputeIndirect (5 times) with alternating read/write buffers
// TODO: swap to persistent gpu thread instead // TODO: swap to persistent gpu thread instead
/*
if (HACKY_SECTION_COUNT != 0) { if (HACKY_SECTION_COUNT != 0) {
long uploadPtr = UploadStream.INSTANCE.upload(this.renderList, 0, HACKY_SECTION_COUNT*4L+4); long uploadPtr = UploadStream.INSTANCE.upload(this.renderList, 0, HACKY_SECTION_COUNT*4L+4);
@@ -61,6 +61,7 @@ public class HierarchicalOcclusionTraverser {
UploadStream.INSTANCE.commit(); UploadStream.INSTANCE.commit();
} }
*/
this.downloadResetRequestQueue(); this.downloadResetRequestQueue();
} }

View File

@@ -5,9 +5,59 @@ class LeafExpansionRequest {
//Child states contain micrometadata in the top bits //Child states contain micrometadata in the top bits
// such as isEmpty, and isEmptyButEventuallyHasNonEmptyChild // such as isEmpty, and isEmptyButEventuallyHasNonEmptyChild
private final long nodePos; private final long nodePos;
private final int[] childStates = new int[8];
private final int[] childStates = new int[]{-1,-1,-1,-1,-1,-1,-1,-1};
private byte results;
private byte mask;
LeafExpansionRequest(long nodePos) { LeafExpansionRequest(long nodePos) {
this.nodePos = nodePos; this.nodePos = nodePos;
} }
public int putChildResult(int childIdx, int mesh) {
if ((this.mask&(1<<childIdx))==0) {
throw new IllegalStateException("Tried putting child into leaf which doesnt match mask");
}
//Note the mesh can be -ve meaning empty mesh, but we should still mark that node as having a result
boolean isFirstInsert = (this.results&(1<<childIdx))==0;
this.results |= (byte) (1<<childIdx);
int prev = this.childStates[childIdx];
this.childStates[childIdx] = mesh;
if (isFirstInsert) {
return -1;
} else {
return prev;
}
}
public int removeAndUnRequire(int childIdx) {
byte MSK = (byte) (1<<childIdx);
if ((this.mask&MSK)==0) {
throw new IllegalStateException("Tried removing and unmasking child that was never masked");
}
byte prev = this.results;
this.results &= (byte) ~MSK;
this.mask &= (byte) ~MSK;
int mesh = this.childStates[childIdx];
this.childStates[childIdx] = -1;
if ((prev&MSK)==0) {
return -1;
} else {
return mesh;
}
}
public void addChildRequirement(int childIdx) {
byte MSK = (byte) (1<<childIdx);
if ((this.mask&MSK)!=0) {
throw new IllegalStateException("Child already required!");
}
this.mask |= MSK;
}
public boolean isSatisfied() {
return (this.results&this.mask)==this.mask;
}
} }

View File

@@ -3,13 +3,51 @@ package me.cortex.voxy.client.core.rendering.hierachical2;
import me.cortex.voxy.common.util.HierarchicalBitSet; import me.cortex.voxy.common.util.HierarchicalBitSet;
public final class NodeStore { public final class NodeStore {
private static final int LONGS_PER_NODE = 4;
private static final int INCREMENT_SIZE = 1<<16;
private final HierarchicalBitSet allocationSet; private final HierarchicalBitSet allocationSet;
private final long[] localNodeData; private long[] localNodeData;
public NodeStore(int maxNodeCount) { public NodeStore(int maxNodeCount) {
this.localNodeData = new long[maxNodeCount*4]; //Initial count is 1024
this.localNodeData = new long[INCREMENT_SIZE*LONGS_PER_NODE];
this.allocationSet = new HierarchicalBitSet(maxNodeCount); this.allocationSet = new HierarchicalBitSet(maxNodeCount);
} }
public int allocate() {
int id = this.allocationSet.allocateNext();
if (id < 0) {
throw new IllegalStateException("Failed to allocate node slot!");
}
this.ensureSized(id);
return id;
}
public int allocate(int count) {
if (count <= 0) {
throw new IllegalArgumentException("Count cannot be <= 0");
}
int id = this.allocationSet.allocateNextConsecutiveCounted(count);
if (id < 0) {
throw new IllegalStateException("Failed to allocate " + count + " consecutive nodes!!");
}
this.ensureSized(id + (count-1));
return id;
}
//Ensures that index is within the array, if not, resizes to contain it + buffer zone
private void ensureSized(int index) {
if (index*LONGS_PER_NODE > this.localNodeData.length) {
int newSize = Math.min((index+INCREMENT_SIZE), this.allocationSet.getLimit());
long[] newStore = new long[newSize * LONGS_PER_NODE];
System.arraycopy(this.localNodeData, 0, newStore, 0, this.localNodeData.length);
this.localNodeData = newStore;
}
}
public long nodePosition(int nodeId) { public long nodePosition(int nodeId) {
return this.localNodeData[nodeId<<2]; return this.localNodeData[nodeId<<2];
} }
@@ -19,6 +57,17 @@ public final class NodeStore {
} }
public boolean hasGeometry(int node) {
return false;
}
public int getNodeGeometry(int node) {
return 0;
}
public void setNodeGeometry(int node, int geometryId) {
}
public void markRequestInFlight(int nodeId) { public void markRequestInFlight(int nodeId) {
} }
@@ -31,10 +80,11 @@ public final class NodeStore {
return false; return false;
} }
public byte getNodeChildExistence(int nodeId) {return 0;}
//Writes out a nodes data to the ptr in the compacted/reduced format //Writes out a nodes data to the ptr in the compacted/reduced format
public void writeNode(long ptr, int nodeId) { public void writeNode(long ptr, int nodeId) {
}
}
} }

View File

@@ -107,8 +107,10 @@ public class HiZBuffer {
public void free() { public void free() {
this.fb.free(); this.fb.free();
this.texture.free(); if (this.texture != null) {
this.texture = null; this.texture.free();
this.texture = null;
}
glDeleteSamplers(this.sampler); glDeleteSamplers(this.sampler);
this.hiz.free(); this.hiz.free();
} }

View File

@@ -0,0 +1,23 @@
package me.cortex.voxy.client.mixin.sodium;
import me.jellysquid.mods.sodium.client.gl.shader.ShaderParser;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Redirect;
import java.util.Collection;
import java.util.List;
@Mixin(value = ShaderParser.class, remap = false)
public class MixinShaderParser {
/*
@Redirect(method = "parseShader(Ljava/lang/String;)Ljava/util/List;", at = @At(value = "INVOKE", target = "Ljava/util/List;addAll(Ljava/util/Collection;)Z"))
private static boolean injectLineNumbers(List<String> lines, Collection<? extends String> add) {
lines.add("#line 1");
int cc = lines.size();
lines.addAll(add);
lines.add("#line " + cc);
return true;
}
*/
}

View File

@@ -132,6 +132,9 @@ public class HierarchicalBitSet {
public int getCount() { public int getCount() {
return this.cnt; return this.cnt;
} }
public int getLimit() {
return this.limit;
}
public boolean isSet(int idx) { public boolean isSet(int idx) {
return (this.D[idx>>6]&(1L<<(idx&0x3f)))!=0; return (this.D[idx>>6]&(1L<<(idx&0x3f)))!=0;

View File

@@ -5,6 +5,7 @@ import me.cortex.voxy.common.storage.StorageBackend;
import net.minecraft.block.Block; import net.minecraft.block.Block;
import net.minecraft.block.BlockState; import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks; import net.minecraft.block.Blocks;
import net.minecraft.block.RedstoneWireBlock;
import net.minecraft.nbt.NbtCompound; import net.minecraft.nbt.NbtCompound;
import net.minecraft.nbt.NbtIo; import net.minecraft.nbt.NbtIo;
import net.minecraft.nbt.NbtOps; import net.minecraft.nbt.NbtOps;
@@ -138,7 +139,7 @@ public class Mapper {
bentries.stream().sorted(Comparator.comparing(a->a.id)).forEach(entry -> { bentries.stream().sorted(Comparator.comparing(a->a.id)).forEach(entry -> {
if (this.biomeId2biomeEntry.size() != entry.id) { if (this.biomeId2biomeEntry.size() != entry.id) {
throw new IllegalStateException("Biome entry not ordered"); throw new IllegalStateException("Biome entry not ordered. got " + entry.biome + " with id " + entry.id + " expected id " + this.biomeId2biomeEntry.size());
} }
this.biomeId2biomeEntry.add(entry); this.biomeId2biomeEntry.add(entry);
}); });
@@ -162,7 +163,7 @@ public class Mapper {
} }
private synchronized BiomeEntry registerNewBiome(String biome) { private synchronized BiomeEntry registerNewBiome(String biome) {
BiomeEntry entry = new BiomeEntry(this.biome2biomeEntry.size(), biome); BiomeEntry entry = new BiomeEntry(this.biomeId2biomeEntry.size(), biome);
//this.biome2biomeEntry.put(biome, entry); //this.biome2biomeEntry.put(biome, entry);
this.biomeId2biomeEntry.add(entry); this.biomeId2biomeEntry.add(entry);
@@ -248,7 +249,7 @@ public class Mapper {
continue; continue;
} }
if (this.blockId2stateEntry.indexOf(entry) != entry.id) { if (this.blockId2stateEntry.indexOf(entry) != entry.id) {
throw new IllegalStateException("State Id NOT THE SAME, very critically bad"); throw new IllegalStateException("State Id NOT THE SAME, very critically bad. arr:" + this.blockId2stateEntry.indexOf(entry) + " entry: " + entry.id);
} }
byte[] serialized = entry.serialize(); byte[] serialized = entry.serialize();
ByteBuffer buffer = MemoryUtil.memAlloc(serialized.length); ByteBuffer buffer = MemoryUtil.memAlloc(serialized.length);

View File

@@ -0,0 +1,67 @@
layout(binding = HIZ_BINDING_INDEX) uniform sampler2DShadow hizDepthSampler;
vec3 minBB;
vec3 maxBB;
vec2 size;
//Sets up screenspace with the given node id, returns true on success false on failure/should not continue
//Accesses data that is setup in the main traversal and is just shared to here
void setupScreenspace(in UnpackedNode node) {
//TODO: implment transform support
Transform transform = transforms[getTransformIndex(node)];
vec4 base = VP*vec4(vec3(((node.pos<<node.lodLevel)-camSecPos)<<5)-camSubSecPos, 1);
//TODO: AABB SIZES not just a max cube
//vec3 minPos = minSize + basePos;
//vec3 maxPos = maxSize + basePos;
minBB = base.xyz/base.w;
maxBB = minBB;
for (int i = 1; i < 8; i++) {
//NOTE!: cant this be precomputed and put in an array?? in the scene uniform??
vec4 pPoint = (VP*vec4(vec3((i&1)!=0,(i&2)!=0,(i&4)!=0),1))*(32<<node.lodLevel);//Size of section is 32x32x32 (need to change it to a bounding box in the future)
pPoint += base;
vec3 point = pPoint.xyz/pPoint.w;
//TODO: CLIP TO VIEWPORT
minBB = min(minBB, point);
maxBB = max(maxBB, point);
}
//TODO: MORE ACCURATLY DETERMIN SCREENSPACE AREA, this can be done by computing and adding
// the projected surface area of each face/quad which winding order faces the camera
// (this is just the dot product of 2 projected vectors)
//can do a funny by not doing the perspective divide except on the output of the area
//printf("Screenspace MIN: %f, %f, %f MAX: %f, %f, %f", minBB.x,minBB.y,minBB.z, maxBB.x,maxBB.y,maxBB.z);
size = maxBB.xy - minBB.xy;
}
//Checks if the node is implicitly culled (outside frustum)
bool outsideFrustum() {
return any(lessThanEqual(maxBB, vec3(-1f, -1f, 0f))) || any(lessThanEqual(vec3(1f, 1f, 1f), minBB));
}
bool isCulledByHiz() {
if (minBB.z < 0) {//Minpoint is behind the camera, its always going to pass
return false;
}
vec2 ssize = size.xy * vec2(ivec2(screenW, screenH));
float miplevel = ceil(log2(max(max(ssize.x, ssize.y),1)));
vec2 midpoint = (maxBB.xy + minBB.xy)*0.5;
return textureLod(hizDepthSampler, vec3(midpoint, minBB.z), miplevel) > 0.0001;
}
//Returns if we should decend into its children or not
bool shouldDecend() {
//printf("Screen area %f: %f, %f", (size.x*size.y*float(screenW)*float(screenH)), float(screenW), float(screenH));
return (size.x*size.y*screenW*screenH) > decendSSS;
}

View File

@@ -61,15 +61,12 @@ layout(binding = 2, std430) restrict buffer QueueData {
uint[] queue; uint[] queue;
} queue; } queue;
*/ */
#line 1
#import <voxy:lod/hierarchical/transform.glsl> #import <voxy:lod/hierarchical/transform.glsl>
#line 1
#import <voxy:lod/hierarchical/node.glsl> #import <voxy:lod/hierarchical/node.glsl>
#line 1
//Contains all the screenspace computation //Contains all the screenspace computation
#import <voxy:lod/hierarchical/screenspace.glsl> #import <voxy:lod/hierarchical/screenspace.glsl>
#line 58
//If a request is successfully added to the RequestQueue, must update NodeData to mark that the node has been put into the request queue //If a request is successfully added to the RequestQueue, must update NodeData to mark that the node has been put into the request queue
// to prevent it from being requested every frame and blocking the queue // to prevent it from being requested every frame and blocking the queue

View File

@@ -11,7 +11,8 @@
"nvidium.MixinRenderPipeline", "nvidium.MixinRenderPipeline",
"sodium.MixinDefaultChunkRenderer", "sodium.MixinDefaultChunkRenderer",
"sodium.MixinRenderSectionManager", "sodium.MixinRenderSectionManager",
"sodium.MixinSodiumWorldRender" "sodium.MixinSodiumWorldRender",
"sodium.MixinShaderParser"
], ],
"injectors": { "injectors": {
"defaultRequire": 1 "defaultRequire": 1