Merge remote-tracking branch 'upstream/1.20/dev' into feat/multi-loader-1.21

# Conflicts:
#	.github/workflows/build.yml
#	forge/src/main/resources/META-INF/neoforge.mods.toml
This commit is contained in:
IThundxr 2024-11-12 19:53:13 -05:00
commit fd786b0ad7
Failed to generate hash of commit
33 changed files with 1242 additions and 638 deletions

View file

@ -9,6 +9,9 @@ charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true insert_final_newline = true
[*.yml]
indent_size = 2
[*.json] [*.json]
indent_size = 2 indent_size = 2
max_line_length = 500 max_line_length = 500

View file

@ -1,43 +1,42 @@
name: build name: Build
on: [ pull_request, push ] on: [ workflow_dispatch, pull_request, push ]
env:
JAVA_VERSION: 21
jobs: jobs:
build: build:
strategy: runs-on: ubuntu-latest
matrix:
java: [
21 # Current Java LTS & minimum supported by Minecraft
]
os: [ ubuntu-latest ]
runs-on: ${{ matrix.os }}
steps: steps:
- name: Checkout - name: Checkout Repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Validate Gradle Wrapper
uses: gradle/actions/wrapper-validation@v3 - name: Setup Java
- name: Gradle Cache run: echo "JAVA_HOME=$JAVA_HOME_${{ env.JAVA_VERSION }}_X64" >> "$GITHUB_ENV"
- name: Loom Cache
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
path: | path: "**/.gradle/loom-cache"
~/.gradle/caches key: "${{ runner.os }}-gradle-${{ hashFiles('**/libs.versions.*', '**/*.gradle*', '**/gradle-wrapper.properties') }}"
~/.gradle/wrapper restore-keys: "${{ runner.os }}-gradle-"
.gradle/loom-cache
build/ - name: Setup Gradle
key: ${{ runner.os }}-jdk${{ matrix.java }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle.properties', '**/gradle-wrapper.properties', '.github/workflows/build.yml') }} uses: gradle/actions/setup-gradle@v3
- name: Setup JDK ${{ matrix.java }}
uses: actions/setup-java@v4
with: with:
distribution: 'temurin' gradle-home-cache-cleanup: true
java-version: ${{ matrix.java }} cache-read-only: ${{ !endsWith(github.ref_name, '/dev') }}
- name: Make Gradle Wrapper Executable
if: ${{ runner.os != 'Windows' }} - name: Validate Gradle Wrapper Integrity
run: chmod +x ./gradlew uses: gradle/wrapper-validation-action@v2
- name: Build - name: Build
# doesn't actually publish, as no secrets are passed in, just makes sure that publishing works # Doesn't actually publish, as no secrets are passed in, just makes sure that publishing works
run: ./gradlew publish --no-daemon # Also generate the mod jars for the test job
run: ./gradlew remapTestModJar publish --no-daemon
- name: Capture Build Artifacts - name: Capture Build Artifacts
if: ${{ runner.os == 'Linux' && matrix.java == '17' }}
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: Artifacts name: Artifacts
@ -45,3 +44,42 @@ jobs:
common/build/libs/ common/build/libs/
fabric/build/libs/ fabric/build/libs/
forge/build/libs/ forge/build/libs/
test:
strategy:
fail-fast: false
matrix:
loader: [ forge, fabric ]
needs: build
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Download build artifact
uses: actions/download-artifact@v4
with:
name: Artifacts
- name: Setup Environment Variables
run: |
echo "MOD_VERSION=$(grep '^mod_version =' gradle.properties | cut -d'=' -f2 | tr -d ' ')" >> "$GITHUB_ENV"
echo "MINECRAFT_VERSION=$(grep '^minecraft_version =' gradle.properties | cut -d'=' -f2 | tr -d ' ')" >> "$GITHUB_ENV"
echo "FABRIC_API_VERSION=$(grep '^fabric_api_version =' gradle.properties | cut -d'=' -f2 | tr -d ' ' | sed 's/+.*//')" >> "$GITHUB_ENV"
- name: Move Test Mod and Flywheel into run/mods
run: |
mkdir -p run/mods
cp ${{ matrix.loader }}/build/libs/flywheel-${{ matrix.loader }}-${{ env.MINECRAFT_VERSION }}-${{ env.MOD_VERSION }}.jar run/mods
cp ${{ matrix.loader }}/build/libs/flywheel-${{ matrix.loader }}-${{ env.MINECRAFT_VERSION }}-${{ env.MOD_VERSION }}-testmod.jar run/mods
# Lock to a specific commit, it would be bad if the tag is re-pushed with unwanted changes
- name: Run the MC client
uses: 3arthqu4ke/mc-runtime-test@e72f8fe1134aabf6fc749a2a8c09bb56dd7d283e
with:
mc: ${{ env.MINECRAFT_VERSION }}
modloader: ${{ matrix.loader }}
regex: .*${{ matrix.loader }}.*
mc-runtime-test: none
java: ${{ env.JAVA_VERSION }}
fabric-api: ${{ matrix.loader == 'fabric' && env.FABRIC_API_VERSION || 'none' }}

View file

@ -2,17 +2,17 @@ package dev.engine_room.gradle.platform
import dev.engine_room.gradle.jarset.JarTaskSet import dev.engine_room.gradle.jarset.JarTaskSet
import net.fabricmc.loom.api.LoomGradleExtensionAPI import net.fabricmc.loom.api.LoomGradleExtensionAPI
import net.fabricmc.loom.task.RemapJarTask
import org.gradle.api.Project import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.SourceSetContainer import org.gradle.api.tasks.SourceSetContainer
import org.gradle.api.tasks.compile.JavaCompile import org.gradle.api.tasks.compile.JavaCompile
import org.gradle.api.tasks.javadoc.Javadoc import org.gradle.api.tasks.javadoc.Javadoc
import org.gradle.jvm.tasks.Jar import org.gradle.jvm.tasks.Jar
import org.gradle.kotlin.dsl.named import org.gradle.kotlin.dsl.*
import org.gradle.kotlin.dsl.provideDelegate
import org.gradle.kotlin.dsl.the
import org.gradle.kotlin.dsl.withType
import org.gradle.language.jvm.tasks.ProcessResources import org.gradle.language.jvm.tasks.ProcessResources
import java.io.File
import kotlin.properties.ReadWriteProperty import kotlin.properties.ReadWriteProperty
import kotlin.reflect.KProperty import kotlin.reflect.KProperty
@ -102,6 +102,29 @@ open class PlatformExtension(val project: Project) {
} }
} }
fun setupTestMod(sourceSet: SourceSet) {
project.tasks.apply {
val testModJar = register<Jar>("testModJar") {
from(sourceSet.output)
val file = File(project.layout.buildDirectory.asFile.get(), "devlibs");
destinationDirectory.set(file)
archiveClassifier = "testmod"
}
val remapTestModJar = register<RemapJarTask>("remapTestModJar") {
dependsOn(testModJar)
inputFile.set(testModJar.get().archiveFile)
archiveClassifier = "testmod"
addNestedDependencies = false
classpath.from(sourceSet.compileClasspath)
}
named<Task>("build").configure {
dependsOn(remapTestModJar)
}
}
}
private class DependentProject(private val thisProject: Project) : ReadWriteProperty<Any?, Project> { private class DependentProject(private val thisProject: Project) : ReadWriteProperty<Any?, Project> {
private var value: Project? = null private var value: Project? = null

View file

@ -1,55 +0,0 @@
package dev.engine_room.flywheel.backend.compile;
// TODO: recycle to be invoked by the shader compiler
public class SourceChecks {
// public static final BiConsumer<ErrorReporter, SourceFile> LAYOUT_VERTEX = checkFunctionArity("flw_layoutVertex", 0);
// public static final BiConsumer<ErrorReporter, SourceFile> INSTANCE_VERTEX = checkFunctionParameterTypeExists("flw_instanceVertex", 1, 0);
// public static final BiConsumer<ErrorReporter, SourceFile> MATERIAL_VERTEX = checkFunctionArity("flw_materialVertex", 0);
// public static final BiConsumer<ErrorReporter, SourceFile> MATERIAL_FRAGMENT = checkFunctionArity("flw_materialFragment", 0);
// public static final BiConsumer<ErrorReporter, SourceFile> CONTEXT_VERTEX = checkFunctionArity("flw_contextVertex", 0);
// public static final BiConsumer<ErrorReporter, SourceFile> CONTEXT_FRAGMENT = checkFunctionArity("flw_contextFragment", 0).andThen(checkFunctionArity("flw_initFragment", 0));
// public static final BiConsumer<ErrorReporter, SourceFile> PIPELINE = checkFunctionArity("main", 0);
//
// public static BiConsumer<ErrorReporter, SourceFile> checkFunctionArity(String name, int arity) {
// return (errorReporter, file) -> checkFunctionArity(errorReporter, file, name, arity);
// }
//
// public static BiConsumer<ErrorReporter, SourceFile> checkFunctionParameterTypeExists(String name, int arity, int param) {
// return (errorReporter, file) -> {
// var func = checkFunctionArity(errorReporter, file, name, arity);
//
// if (func == null) {
// return;
// }
//
// var maybeStruct = func.getParameterType(param)
// .findStruct();
//
// if (maybeStruct.isEmpty()) {
// errorReporter.generateMissingStruct(file, func.getParameterType(param), "struct not defined");
// }
// };
// }
//
// /**
// * @return {@code null} if the function doesn't exist, or if the function has the wrong arity.
// */
// @Nullable
// private static ShaderFunction checkFunctionArity(ErrorReporter errorReporter, SourceFile file, String name, int arity) {
// Optional<ShaderFunction> maybeFunc = file.findFunction(name);
//
// if (maybeFunc.isEmpty()) {
// errorReporter.generateMissingFunction(file, name, "\"" + name + "\" function not defined");
// return null;
// }
//
// ShaderFunction func = maybeFunc.get();
// ImmutableList<ShaderVariable> params = func.getParameters();
// if (params.size() != arity) {
// errorReporter.generateFunctionArgumentCountError(name, arity, func.getArgs());
// return null;
// }
//
// return func;
// }
}

View file

@ -62,7 +62,7 @@ public abstract class InstanceAssemblerComponent implements SourceComponent {
FLOAT_UNPACKING_FUNCS.put(FloatRepr.UNSIGNED_INT, e -> e.cast("float")); FLOAT_UNPACKING_FUNCS.put(FloatRepr.UNSIGNED_INT, e -> e.cast("float"));
FLOAT_UNPACKING_FUNCS.put(FloatRepr.NORMALIZED_UNSIGNED_INT, e -> e.cast("float").div(4294967295f)); FLOAT_UNPACKING_FUNCS.put(FloatRepr.NORMALIZED_UNSIGNED_INT, e -> e.cast("float").div(4294967295f));
FLOAT_UNPACKING_FUNCS.put(FloatRepr.FLOAT, e -> e.callFunction("uintBitsToFloat")); // FIXME: GLSL 330+ FLOAT_UNPACKING_FUNCS.put(FloatRepr.FLOAT, e -> e.callFunction("uintBitsToFloat"));
} }
protected final Layout layout; protected final Layout layout;

View file

@ -1,27 +1,14 @@
package dev.engine_room.flywheel.backend.engine; package dev.engine_room.flywheel.backend.engine;
import java.util.ArrayList;
import org.jetbrains.annotations.Nullable;
import dev.engine_room.flywheel.api.instance.Instance; import dev.engine_room.flywheel.api.instance.Instance;
import dev.engine_room.flywheel.api.instance.InstanceType; import dev.engine_room.flywheel.api.instance.InstanceType;
import dev.engine_room.flywheel.api.instance.Instancer; import dev.engine_room.flywheel.api.instance.Instancer;
import dev.engine_room.flywheel.backend.engine.embed.Environment; import dev.engine_room.flywheel.backend.engine.embed.Environment;
import dev.engine_room.flywheel.backend.util.AtomicBitSet;
public abstract class AbstractInstancer<I extends Instance> implements Instancer<I>, InstanceHandleImpl.State<I> { public abstract class AbstractInstancer<I extends Instance> implements Instancer<I> {
public final InstanceType<I> type; public final InstanceType<I> type;
public final Environment environment; public final Environment environment;
private final Recreate<I> recreate; public final Recreate<I> recreate;
// Lock for all instances, only needs to be used in methods that may run on the TaskExecutor.
protected final Object lock = new Object();
protected final ArrayList<I> instances = new ArrayList<>();
protected final ArrayList<InstanceHandleImpl<I>> handles = new ArrayList<>();
protected final AtomicBitSet changed = new AtomicBitSet();
protected final AtomicBitSet deleted = new AtomicBitSet();
protected AbstractInstancer(InstancerKey<I> key, Recreate<I> recreate) { protected AbstractInstancer(InstancerKey<I> key, Recreate<I> recreate) {
this.type = key.type(); this.type = key.type();
@ -29,218 +16,16 @@ public abstract class AbstractInstancer<I extends Instance> implements Instancer
this.recreate = recreate; this.recreate = recreate;
} }
@Override public abstract InstanceHandleImpl.State<I> revealInstance(InstanceHandleImpl<I> handle, I instance);
public InstanceHandleImpl.State<I> setChanged(int index) {
notifyDirty(index);
return this;
}
@Override public abstract int instanceCount();
public InstanceHandleImpl.State<I> setDeleted(int index) {
notifyRemoval(index);
return InstanceHandleImpl.Deleted.instance();
}
@Override public abstract void parallelUpdate();
public InstanceHandleImpl.State<I> setVisible(InstanceHandleImpl<I> handle, int index, boolean visible) {
if (visible) {
return this;
}
notifyRemoval(index);
I instance;
synchronized (lock) {
// I think we need to lock to prevent wacky stuff from happening if the array gets resized.
instance = instances.get(index);
}
return new InstanceHandleImpl.Hidden<>(recreate, instance);
}
@Override
public I createInstance() {
var handle = new InstanceHandleImpl<>(this);
I instance = type.create(handle);
synchronized (lock) {
handle.index = instances.size();
addLocked(instance, handle);
return instance;
}
}
public void revealInstance(InstanceHandleImpl<I> handle, I instance) {
synchronized (lock) {
handle.index = instances.size();
addLocked(instance, handle);
}
}
@Override
public void stealInstance(@Nullable I instance) {
if (instance == null) {
return;
}
var instanceHandle = instance.handle();
if (!(instanceHandle instanceof InstanceHandleImpl<?>)) {
// UB: do nothing
return;
}
// Should InstanceType have an isInstance method?
@SuppressWarnings("unchecked")
var handle = (InstanceHandleImpl<I>) instanceHandle;
// No need to steal if this instance is already owned by this instancer.
if (handle.state == this) {
return;
}
// Not allowed to steal deleted instances.
if (handle.state instanceof InstanceHandleImpl.Deleted) {
return;
}
// No need to steal if the instance will recreate to us.
if (handle.state instanceof InstanceHandleImpl.Hidden<I> hidden && recreate.equals(hidden.recreate())) {
return;
}
// FIXME: in theory there could be a race condition here if the instance
// is somehow being stolen by 2 different instancers between threads.
// That seems kinda impossible so I'm fine leaving it as is for now.
// Add the instance to this instancer.
if (handle.state instanceof AbstractInstancer<I> other) {
// Remove the instance from its old instancer.
// This won't have any unwanted effect when the old instancer
// is filtering deleted instances later, so is safe.
other.notifyRemoval(handle.index);
handle.state = this;
// Only lock now that we'll be mutating our state.
synchronized (lock) {
handle.index = instances.size();
addLocked(instance, handle);
}
} else if (handle.state instanceof InstanceHandleImpl.Hidden<I>) {
handle.state = new InstanceHandleImpl.Hidden<>(recreate, instance);
}
}
/**
* Calls must be synchronized on {@link #lock}.
*/
private void addLocked(I instance, InstanceHandleImpl<I> handle) {
instances.add(instance);
handles.add(handle);
setIndexChanged(handle.index);
}
public int instanceCount() {
return instances.size();
}
public void notifyDirty(int index) {
if (index < 0 || index >= instanceCount()) {
return;
}
setIndexChanged(index);
}
protected void setIndexChanged(int index) {
changed.set(index);
}
public void notifyRemoval(int index) {
if (index < 0 || index >= instanceCount()) {
return;
}
deleted.set(index);
}
public void removeDeletedInstances() {
if (deleted.isEmpty()) {
return;
}
// Figure out which elements are to be removed.
final int oldSize = this.instances.size();
int removeCount = deleted.cardinality();
if (oldSize == removeCount) {
clear();
return;
}
final int newSize = oldSize - removeCount;
// Start from the first deleted index.
int writePos = deleted.nextSetBit(0);
if (writePos < newSize) {
// Since we'll be shifting everything into this space we can consider it all changed.
setRangeChanged(writePos, newSize);
}
// We definitely shouldn't consider the deleted instances as changed though,
// else we might try some out of bounds accesses later.
changed.clear(newSize, oldSize);
// Punch out the deleted instances, shifting over surviving instances to fill their place.
for (int scanPos = writePos; (scanPos < oldSize) && (writePos < newSize); scanPos++, writePos++) {
// Find next non-deleted element.
scanPos = deleted.nextClearBit(scanPos);
if (scanPos != writePos) {
// Grab the old instance/handle from scanPos...
var handle = handles.get(scanPos);
I instance = instances.get(scanPos);
// ... and move it to writePos.
handles.set(writePos, handle);
instances.set(writePos, instance);
// Make sure the handle knows it's been moved
handle.index = writePos;
}
}
deleted.clear();
instances.subList(newSize, oldSize)
.clear();
handles.subList(newSize, oldSize)
.clear();
}
protected void setRangeChanged(int start, int end) {
changed.set(start, end);
}
/**
* Clear all instances without freeing resources.
*/
public void clear() {
for (InstanceHandleImpl<I> handle : handles) {
// Only clear instances that belong to this instancer.
// If one of these handles was stolen by another instancer,
// clearing it here would cause significant visual artifacts and instance leaks.
// At the same time, we need to clear handles we own to prevent
// instances from changing/deleting positions in this instancer that no longer exist.
if (handle.state == this) {
handle.clear();
handle.state = InstanceHandleImpl.Deleted.instance();
}
}
instances.clear();
handles.clear();
changed.clear();
deleted.clear();
}
public abstract void delete(); public abstract void delete();
public abstract void clear();
@Override @Override
public String toString() { public String toString() {
return "AbstractInstancer[" + instanceCount() + ']'; return "AbstractInstancer[" + instanceCount() + ']';

View file

@ -0,0 +1,175 @@
package dev.engine_room.flywheel.backend.engine;
import java.util.ArrayList;
import org.jetbrains.annotations.Nullable;
import dev.engine_room.flywheel.api.instance.Instance;
import dev.engine_room.flywheel.backend.util.AtomicBitSet;
public abstract class BaseInstancer<I extends Instance> extends AbstractInstancer<I> implements InstanceHandleImpl.State<I> {
// Lock for all instances, only needs to be used in methods that may run on the TaskExecutor.
protected final Object lock = new Object();
protected final ArrayList<I> instances = new ArrayList<>();
protected final ArrayList<InstanceHandleImpl<I>> handles = new ArrayList<>();
protected final AtomicBitSet changed = new AtomicBitSet();
protected final AtomicBitSet deleted = new AtomicBitSet();
protected BaseInstancer(InstancerKey<I> key, Recreate<I> recreate) {
super(key, recreate);
}
@Override
public InstanceHandleImpl.State<I> setChanged(int index) {
notifyDirty(index);
return this;
}
@Override
public InstanceHandleImpl.State<I> setDeleted(int index) {
notifyRemoval(index);
return InstanceHandleImpl.Deleted.instance();
}
@Override
public InstanceHandleImpl.State<I> setVisible(InstanceHandleImpl<I> handle, int index, boolean visible) {
if (visible) {
return this;
}
notifyRemoval(index);
I instance;
synchronized (lock) {
// I think we need to lock to prevent wacky stuff from happening if the array gets resized.
instance = instances.get(index);
}
return new InstanceHandleImpl.Hidden<>(recreate, instance);
}
@Override
public I createInstance() {
var handle = new InstanceHandleImpl<>(this);
I instance = type.create(handle);
synchronized (lock) {
handle.index = instances.size();
addLocked(instance, handle);
return instance;
}
}
public InstanceHandleImpl.State<I> revealInstance(InstanceHandleImpl<I> handle, I instance) {
synchronized (lock) {
handle.index = instances.size();
addLocked(instance, handle);
}
return this;
}
@Override
public void stealInstance(@Nullable I instance) {
if (instance == null) {
return;
}
var instanceHandle = instance.handle();
if (!(instanceHandle instanceof InstanceHandleImpl<?>)) {
// UB: do nothing
return;
}
// Should InstanceType have an isInstance method?
@SuppressWarnings("unchecked") var handle = (InstanceHandleImpl<I>) instanceHandle;
// No need to steal if this instance is already owned by this instancer.
if (handle.state == this) {
return;
}
// Not allowed to steal deleted instances.
if (handle.state instanceof InstanceHandleImpl.Deleted) {
return;
}
// No need to steal if the instance will recreate to us.
if (handle.state instanceof InstanceHandleImpl.Hidden<I> hidden && recreate.equals(hidden.recreate())) {
return;
}
// FIXME: in theory there could be a race condition here if the instance
// is somehow being stolen by 2 different instancers between threads.
// That seems kinda impossible so I'm fine leaving it as is for now.
// Add the instance to this instancer.
if (handle.state instanceof BaseInstancer<I> other) {
// Remove the instance from its old instancer.
// This won't have any unwanted effect when the old instancer
// is filtering deleted instances later, so is safe.
other.notifyRemoval(handle.index);
handle.state = this;
// Only lock now that we'll be mutating our state.
synchronized (lock) {
handle.index = instances.size();
addLocked(instance, handle);
}
} else if (handle.state instanceof InstanceHandleImpl.Hidden<I>) {
handle.state = new InstanceHandleImpl.Hidden<>(recreate, instance);
}
}
/**
* Calls must be synchronized on {@link #lock}.
*/
private void addLocked(I instance, InstanceHandleImpl<I> handle) {
instances.add(instance);
handles.add(handle);
setIndexChanged(handle.index);
}
public int instanceCount() {
return instances.size();
}
public void notifyDirty(int index) {
if (index < 0 || index >= instanceCount()) {
return;
}
setIndexChanged(index);
}
protected void setIndexChanged(int index) {
changed.set(index);
}
public void notifyRemoval(int index) {
if (index < 0 || index >= instanceCount()) {
return;
}
deleted.set(index);
}
/**
* Clear all instances without freeing resources.
*/
public void clear() {
for (InstanceHandleImpl<I> handle : handles) {
// Only clear instances that belong to this instancer.
// If one of these handles was stolen by another instancer,
// clearing it here would cause significant visual artifacts and instance leaks.
// At the same time, we need to clear handles we own to prevent
// instances from changing/deleting positions in this instancer that no longer exist.
if (handle.state == this) {
handle.clear();
handle.state = InstanceHandleImpl.Deleted.instance();
}
}
instances.clear();
handles.clear();
changed.clear();
deleted.clear();
}
}

View file

@ -8,6 +8,8 @@ import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import org.jetbrains.annotations.Nullable;
import com.mojang.datafixers.util.Pair; import com.mojang.datafixers.util.Pair;
import dev.engine_room.flywheel.api.RenderContext; import dev.engine_room.flywheel.api.RenderContext;
@ -26,6 +28,8 @@ import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import net.minecraft.client.resources.model.ModelBakery; import net.minecraft.client.resources.model.ModelBakery;
public abstract class DrawManager<N extends AbstractInstancer<?>> { public abstract class DrawManager<N extends AbstractInstancer<?>> {
private static final boolean WARN_EMPTY_MODELS = Boolean.getBoolean("flywheel.warnEmptyModels");
/** /**
* A map of instancer keys to instancers. * A map of instancer keys to instancers.
* <br> * <br>
@ -50,14 +54,19 @@ public abstract class DrawManager<N extends AbstractInstancer<?>> {
public Plan<RenderContext> createFramePlan() { public Plan<RenderContext> createFramePlan() {
// Go wide on instancers to process deletions in parallel. // Go wide on instancers to process deletions in parallel.
return ForEachPlan.of(() -> new ArrayList<>(instancers.values()), AbstractInstancer::removeDeletedInstances); return ForEachPlan.of(() -> new ArrayList<>(instancers.values()), AbstractInstancer::parallelUpdate);
} }
public void flush(LightStorage lightStorage, EnvironmentStorage environmentStorage) { public void flush(LightStorage lightStorage, EnvironmentStorage environmentStorage) {
// Thread safety: flush is called from the render thread after all visual updates have been made, // Thread safety: flush is called from the render thread after all visual updates have been made,
// so there are no:tm: threads we could be racing with. // so there are no:tm: threads we could be racing with.
for (var instancer : initializationQueue) { for (var init : initializationQueue) {
initialize(instancer.key(), instancer.instancer()); var instancer = init.instancer();
if (instancer.instanceCount() > 0) {
initialize(init.key(), instancer);
} else {
instancers.remove(init.key());
}
} }
initializationQueue.clear(); initializationQueue.clear();
} }
@ -93,20 +102,27 @@ public abstract class DrawManager<N extends AbstractInstancer<?>> {
return true; return true;
} }
if (WARN_EMPTY_MODELS) {
StringBuilder builder = new StringBuilder(); StringBuilder builder = new StringBuilder();
builder.append("Creating an instancer for a model with no meshes! Stack trace:"); builder.append("Creating an instancer for a model with no meshes! Stack trace:");
StackWalker.getInstance() StackWalker.getInstance()
// .walk(s -> s.skip(3)) // this causes forEach to crash for some reason
.forEach(f -> builder.append("\n\t") .forEach(f -> builder.append("\n\t")
.append(f.toString())); .append(f.toString()));
FlwBackend.LOGGER.warn(builder.toString()); FlwBackend.LOGGER.warn(builder.toString());
}
return false; return false;
} }
protected static <I extends AbstractInstancer<?>> Map<GroupKey<?>, Int2ObjectMap<List<Pair<I, InstanceHandleImpl<?>>>>> doCrumblingSort(Class<I> clazz, List<Engine.CrumblingBlock> crumblingBlocks) { @FunctionalInterface
protected interface State2Instancer<I extends AbstractInstancer<?>> {
// I tried using a plain Function<State<?>, I> here, but it exploded with type errors.
@Nullable I apply(InstanceHandleImpl.State<?> state);
}
protected static <I extends AbstractInstancer<?>> Map<GroupKey<?>, Int2ObjectMap<List<Pair<I, InstanceHandleImpl<?>>>>> doCrumblingSort(List<Engine.CrumblingBlock> crumblingBlocks, State2Instancer<I> cast) {
Map<GroupKey<?>, Int2ObjectMap<List<Pair<I, InstanceHandleImpl<?>>>>> byType = new HashMap<>(); Map<GroupKey<?>, Int2ObjectMap<List<Pair<I, InstanceHandleImpl<?>>>>> byType = new HashMap<>();
for (Engine.CrumblingBlock block : crumblingBlocks) { for (Engine.CrumblingBlock block : crumblingBlocks) {
int progress = block.progress(); int progress = block.progress();
@ -123,16 +139,12 @@ public abstract class DrawManager<N extends AbstractInstancer<?>> {
continue; continue;
} }
InstanceHandleImpl.State<?> abstractInstancer = impl.state; var instancer = cast.apply(impl.state);
// AbstractInstancer directly implement HandleState, so this check is valid.
if (!clazz.isInstance(abstractInstancer)) { if (instancer == null) {
// This rejects instances that were created by a different engine,
// and also instances that are hidden or deleted.
continue; continue;
} }
var instancer = clazz.cast(abstractInstancer);
byType.computeIfAbsent(new GroupKey<>(instancer.type, instancer.environment), $ -> new Int2ObjectArrayMap<>()) byType.computeIfAbsent(new GroupKey<>(instancer.type, instancer.environment), $ -> new Int2ObjectArrayMap<>())
.computeIfAbsent(progress, $ -> new ArrayList<>()) .computeIfAbsent(progress, $ -> new ArrayList<>())
.add(Pair.of(instancer, impl)); .add(Pair.of(instancer, impl));

View file

@ -1,13 +1,16 @@
package dev.engine_room.flywheel.backend.engine; package dev.engine_room.flywheel.backend.engine;
import org.jetbrains.annotations.UnknownNullability;
import dev.engine_room.flywheel.api.instance.Instance; import dev.engine_room.flywheel.api.instance.Instance;
import dev.engine_room.flywheel.api.instance.InstanceHandle; import dev.engine_room.flywheel.api.instance.InstanceHandle;
public class InstanceHandleImpl<I extends Instance> implements InstanceHandle { public class InstanceHandleImpl<I extends Instance> implements InstanceHandle {
@UnknownNullability
public State<I> state; public State<I> state;
public int index; public int index;
public InstanceHandleImpl(State<I> state) { public InstanceHandleImpl(@UnknownNullability State<I> state) {
this.state = state; this.state = state;
} }
@ -62,8 +65,7 @@ public class InstanceHandleImpl<I extends Instance> implements InstanceHandle {
return this; return this;
} }
var instancer = recreate.recreate(); var instancer = recreate.recreate();
instancer.revealInstance(handle, instance); return instancer.revealInstance(handle, instance);
return instancer;
} }
} }

View file

@ -1,142 +1,212 @@
package dev.engine_room.flywheel.backend.engine; package dev.engine_room.flywheel.backend.engine;
import org.jetbrains.annotations.NotNull; import java.util.function.BiConsumer;
import java.util.function.Supplier;
import org.jetbrains.annotations.Nullable;
import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntObjectImmutablePair;
import it.unimi.dsi.fastutil.ints.IntObjectPair;
import it.unimi.dsi.fastutil.longs.Long2IntMap;
import it.unimi.dsi.fastutil.longs.LongArrayList;
import it.unimi.dsi.fastutil.longs.LongComparator;
import it.unimi.dsi.fastutil.objects.ReferenceArrayList;
import net.minecraft.core.SectionPos; import net.minecraft.core.SectionPos;
// Massive kudos to RogueLogix for figuring out this LUT scheme.
// First layer is Y, then X, then Z.
public final class LightLut { public final class LightLut {
private static final LongComparator SECTION_X_THEN_Y_THEN_Z = (long a, long b) -> { private final Layer<Layer<IntLayer>> indices = new Layer<>();
final var xComp = Integer.compare(SectionPos.x(a), SectionPos.x(b));
if (xComp != 0) {
return xComp;
}
var yComp = Integer.compare(SectionPos.y(a), SectionPos.y(b));
if (yComp != 0) {
return yComp;
}
return Integer.compare(SectionPos.z(a), SectionPos.z(b));
};
private LightLut() { public void add(long position, int index) {
}
// Massive kudos to RogueLogix for figuring out this LUT scheme.
// TODO: switch to y x z or x z y ordering
// DATA LAYOUT
// [0] : base chunk X, X index count, followed by linear indices of y blocks
// [yBlockIndex] : baseChunk Y, Y index count, followed by linear indices of z blocks for this x
// [zBlockIndex] : baseChunk Z, Z index count, followed by linear indices of lighting chunks
// this data layout allows a single buffer to represent the lighting volume, without requiring the entire 3d lookup volume to be allocated
public static IntArrayList buildLut(Long2IntMap sectionIndicesMaps) {
if (sectionIndicesMaps.isEmpty()) {
return new IntArrayList();
}
final var positions = sortedKeys(sectionIndicesMaps);
final var baseX = SectionPos.x(positions.getLong(0));
return buildLut(baseX, buildIndices(sectionIndicesMaps, positions, baseX));
}
private static ReferenceArrayList<IntObjectPair<ReferenceArrayList<IntArrayList>>> buildIndices(Long2IntMap sectionIndicesMaps, LongArrayList positions, int baseX) {
final var indices = new ReferenceArrayList<IntObjectPair<ReferenceArrayList<IntArrayList>>>();
for (long position : positions) {
final var x = SectionPos.x(position); final var x = SectionPos.x(position);
final var y = SectionPos.y(position); final var y = SectionPos.y(position);
final var z = SectionPos.z(position); final var z = SectionPos.z(position);
final var xIndex = x - baseX; indices.computeIfAbsent(y, Layer::new)
if (indices.size() <= xIndex) { .computeIfAbsent(x, IntLayer::new)
indices.ensureCapacity(xIndex + 1); .set(z, index + 1);
indices.size(xIndex + 1);
}
var yLookup = indices.get(xIndex);
if (yLookup == null) {
//noinspection SuspiciousNameCombination
yLookup = new IntObjectImmutablePair<>(y, new ReferenceArrayList<>());
indices.set(xIndex, yLookup);
} }
final var yIndices = yLookup.right(); public void remove(long section) {
final var yIndex = y - yLookup.leftInt(); final var x = SectionPos.x(section);
if (yIndices.size() <= yIndex) { final var y = SectionPos.y(section);
yIndices.ensureCapacity(yIndex + 1); final var z = SectionPos.z(section);
yIndices.size(yIndex + 1);
} var first = indices.get(y);
var zLookup = yIndices.get(yIndex);
if (zLookup == null) { if (first == null) {
zLookup = new IntArrayList(); return;
zLookup.add(z);
zLookup.add(0); // this value will be filled in later
yIndices.set(yIndex, zLookup);
} }
final var zIndex = z - zLookup.getInt(0); var second = first.get(x);
if ((zLookup.size() - 2) <= zIndex) {
zLookup.ensureCapacity(zIndex + 3); if (second == null) {
zLookup.size(zIndex + 3); return;
}
// Add 1 to the actual index so that 0 indicates a missing section.
zLookup.set(zIndex + 2, sectionIndicesMaps.get(position) + 1);
}
return indices;
} }
private static @NotNull LongArrayList sortedKeys(Long2IntMap sectionIndicesMaps) { second.clear(z);
final var out = new LongArrayList(sectionIndicesMaps.keySet());
out.unstableSort(SECTION_X_THEN_Y_THEN_Z);
return out;
} }
private static IntArrayList buildLut(int baseX, ReferenceArrayList<IntObjectPair<ReferenceArrayList<IntArrayList>>> indices) { public IntArrayList flatten() {
final var out = new IntArrayList(); final var out = new IntArrayList();
out.add(baseX); indices.fillLut(out, (yIndices, lut) -> yIndices.fillLut(lut, IntLayer::fillLut));
out.add(indices.size());
for (int i = 0; i < indices.size(); i++) {
out.add(0);
}
for (int x = 0; x < indices.size(); x++) {
final var yLookup = indices.get(x);
if (yLookup == null) {
out.set(x + 2, 0);
continue;
}
// ensure that the base position and size dont cross a (64 byte) cache line
if ((out.size() & 0xF) == 0xF) {
out.add(0);
}
final var baseYIndex = out.size();
out.set(x + 2, baseYIndex);
final var yIndices = yLookup.right();
out.add(yLookup.leftInt());
out.add(yIndices.size());
for (int i = 0; i < indices.size(); i++) {
out.add(0);
}
for (int y = 0; y < yIndices.size(); y++) {
final var zLookup = yIndices.get(y);
if (zLookup == null) {
out.set(baseYIndex + y + 2, 0);
continue;
}
// ensure that the base position and size dont cross a (64 byte) cache line
if ((out.size() & 0xF) == 0xF) {
out.add(0);
}
out.set(baseYIndex + y + 2, out.size());
zLookup.set(1, zLookup.size() - 2);
out.addAll(zLookup);
}
}
return out; return out;
} }
private static final class Layer<T> {
private boolean hasBase = false;
private int base = 0;
private Object[] nextLayer = new Object[0];
public void fillLut(IntArrayList lut, BiConsumer<T, IntArrayList> inner) {
lut.add(base);
lut.add(nextLayer.length);
int innerIndexBase = lut.size();
// Reserve space for the inner indices...
lut.size(innerIndexBase + nextLayer.length);
for (int i = 0; i < nextLayer.length; i++) {
final var innerIndices = (T) nextLayer[i];
if (innerIndices == null) {
continue;
}
int layerPosition = lut.size();
// ...so we can write in their actual positions later.
lut.set(innerIndexBase + i, layerPosition);
// Append the next layer to the lut.
inner.accept(innerIndices, lut);
}
}
@Nullable
public T get(int i) {
if (!hasBase) {
return null;
}
if (i < base) {
return null;
}
final var offset = i - base;
if (offset >= nextLayer.length) {
return null;
}
return (T) nextLayer[offset];
}
public T computeIfAbsent(int i, Supplier<T> ifAbsent) {
if (!hasBase) {
// We don't want to default to base 0, so we'll use the first value we get.
base = i;
hasBase = true;
}
if (i < base) {
rebase(i);
}
final var offset = i - base;
if (offset >= nextLayer.length) {
resize(offset + 1);
}
var out = nextLayer[offset];
if (out == null) {
out = ifAbsent.get();
nextLayer[offset] = out;
}
return (T) out;
}
private void resize(int length) {
final var newIndices = new Object[length];
System.arraycopy(nextLayer, 0, newIndices, 0, nextLayer.length);
nextLayer = newIndices;
}
private void rebase(int newBase) {
final var growth = base - newBase;
final var newIndices = new Object[nextLayer.length + growth];
// Shift the existing elements to the end of the new array to maintain their offset with the new base.
System.arraycopy(nextLayer, 0, newIndices, growth, nextLayer.length);
nextLayer = newIndices;
base = newBase;
}
}
private static final class IntLayer {
private boolean hasBase = false;
private int base = 0;
private int[] indices = new int[0];
public void fillLut(IntArrayList lut) {
lut.add(base);
lut.add(indices.length);
for (int index : indices) {
lut.add(index);
}
}
public void set(int i, int index) {
if (!hasBase) {
base = i;
hasBase = true;
}
if (i < base) {
rebase(i);
}
final var offset = i - base;
if (offset >= indices.length) {
resize(offset + 1);
}
indices[offset] = index;
}
public void clear(int i) {
if (!hasBase) {
return;
}
if (i < base) {
return;
}
final var offset = i - base;
if (offset >= indices.length) {
return;
}
indices[offset] = 0;
}
private void resize(int length) {
final var newIndices = new int[length];
System.arraycopy(indices, 0, newIndices, 0, indices.length);
indices = newIndices;
}
private void rebase(int newBase) {
final var growth = base - newBase;
final var newIndices = new int[indices.length + growth];
System.arraycopy(indices, 0, newIndices, growth, indices.length);
indices = newIndices;
base = newBase;
}
}
} }

View file

@ -22,7 +22,6 @@ import net.minecraft.world.level.LightLayer;
import net.minecraft.world.level.lighting.LayerLightEventListener; import net.minecraft.world.level.lighting.LayerLightEventListener;
/** /**
* TODO: AO data
* A managed arena of light sections for uploading to the GPU. * A managed arena of light sections for uploading to the GPU.
* *
* <p>Each section represents an 18x18x18 block volume of light data. * <p>Each section represents an 18x18x18 block volume of light data.
@ -45,12 +44,9 @@ public class LightStorage {
private static final int INVALID_SECTION = -1; private static final int INVALID_SECTION = -1;
private final LevelAccessor level; private final LevelAccessor level;
private final LightLut lut;
private final CpuArena arena; private final CpuArena arena;
private final Long2IntMap section2ArenaIndex = new Long2IntOpenHashMap(); private final Long2IntMap section2ArenaIndex;
{
section2ArenaIndex.defaultReturnValue(INVALID_SECTION);
}
private final BitSet changed = new BitSet(); private final BitSet changed = new BitSet();
private boolean needsLutRebuild = false; private boolean needsLutRebuild = false;
@ -61,8 +57,10 @@ public class LightStorage {
public LightStorage(LevelAccessor level) { public LightStorage(LevelAccessor level) {
this.level = level; this.level = level;
lut = new LightLut();
arena = new CpuArena(SECTION_SIZE_BYTES, DEFAULT_ARENA_CAPACITY_SECTIONS); arena = new CpuArena(SECTION_SIZE_BYTES, DEFAULT_ARENA_CAPACITY_SECTIONS);
section2ArenaIndex = new Long2IntOpenHashMap();
section2ArenaIndex.defaultReturnValue(INVALID_SECTION);
} }
/** /**
@ -116,7 +114,6 @@ public class LightStorage {
} }
// Now actually do the collection. // Now actually do the collection.
// TODO: Should this be done in parallel?
sectionsToCollect.forEach(this::collectSection); sectionsToCollect.forEach(this::collectSection);
updatedSections.clear(); updatedSections.clear();
@ -137,12 +134,22 @@ public class LightStorage {
if (!requestedSections.contains(section)) { if (!requestedSections.contains(section)) {
arena.free(entry.getIntValue()); arena.free(entry.getIntValue());
needsLutRebuild = true; endTrackingSection(section);
it.remove(); it.remove();
} }
} }
} }
private void beginTrackingSection(long section, int index) {
lut.add(section, index);
needsLutRebuild = true;
}
private void endTrackingSection(long section) {
lut.remove(section);
needsLutRebuild = true;
}
public int capacity() { public int capacity() {
return arena.capacity(); return arena.capacity();
} }
@ -376,7 +383,7 @@ public class LightStorage {
if (out == INVALID_SECTION) { if (out == INVALID_SECTION) {
out = arena.alloc(); out = arena.alloc();
section2ArenaIndex.put(section, out); section2ArenaIndex.put(section, out);
needsLutRebuild = true; beginTrackingSection(section, out);
} }
return out; return out;
} }
@ -408,8 +415,7 @@ public class LightStorage {
} }
public IntArrayList createLut() { public IntArrayList createLut() {
// TODO: incremental lut updates return lut.flatten();
return LightLut.buildLut(section2ArenaIndex);
} }
private enum SectionEdge { private enum SectionEdge {

View file

@ -70,23 +70,17 @@ public class MeshPool {
if (anyToRemove) { if (anyToRemove) {
anyToRemove = false; anyToRemove = false;
processDeletions(); processDeletions();
// Might want to shrink the index pool if something was removed.
indexPool.reset();
for (PooledMesh mesh : meshList) {
indexPool.updateCount(mesh.mesh.indexSequence(), mesh.indexCount());
} }
} else {
if (!recentlyAllocated.isEmpty()) {
// Otherwise, just update the index with the new counts. // Otherwise, just update the index with the new counts.
for (PooledMesh mesh : recentlyAllocated) { for (PooledMesh mesh : recentlyAllocated) {
indexPool.updateCount(mesh.mesh.indexSequence(), mesh.indexCount()); indexPool.updateCount(mesh.mesh.indexSequence(), mesh.indexCount());
} }
indexPool.flush();
recentlyAllocated.clear(); recentlyAllocated.clear();
} }
// Always need to flush the index pool.
indexPool.flush();
uploadAll(); uploadAll();
dirty = false; dirty = false;
} }

View file

@ -186,10 +186,14 @@ public class IndirectDrawManager extends DrawManager<IndirectInstancer<?>> {
programs.release(); programs.release();
depthPyramid.delete(); depthPyramid.delete();
lightBuffers.delete();
matrixBuffer.delete();
} }
public void renderCrumbling(List<Engine.CrumblingBlock> crumblingBlocks) { public void renderCrumbling(List<Engine.CrumblingBlock> crumblingBlocks) {
var byType = doCrumblingSort(IndirectInstancer.class, crumblingBlocks); var byType = doCrumblingSort(crumblingBlocks, IndirectInstancer::fromState);
if (byType.isEmpty()) { if (byType.isEmpty()) {
return; return;

View file

@ -2,7 +2,10 @@ package dev.engine_room.flywheel.backend.engine.indirect;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.UnknownNullability; import org.jetbrains.annotations.UnknownNullability;
import org.joml.Vector4fc; import org.joml.Vector4fc;
import org.lwjgl.system.MemoryUtil; import org.lwjgl.system.MemoryUtil;
@ -10,6 +13,7 @@ import org.lwjgl.system.MemoryUtil;
import dev.engine_room.flywheel.api.instance.Instance; import dev.engine_room.flywheel.api.instance.Instance;
import dev.engine_room.flywheel.api.instance.InstanceWriter; import dev.engine_room.flywheel.api.instance.InstanceWriter;
import dev.engine_room.flywheel.backend.engine.AbstractInstancer; import dev.engine_room.flywheel.backend.engine.AbstractInstancer;
import dev.engine_room.flywheel.backend.engine.InstanceHandleImpl;
import dev.engine_room.flywheel.backend.engine.InstancerKey; import dev.engine_room.flywheel.backend.engine.InstancerKey;
import dev.engine_room.flywheel.backend.util.AtomicBitSet; import dev.engine_room.flywheel.backend.util.AtomicBitSet;
import dev.engine_room.flywheel.lib.math.MoreMath; import dev.engine_room.flywheel.lib.math.MoreMath;
@ -20,7 +24,27 @@ public class IndirectInstancer<I extends Instance> extends AbstractInstancer<I>
private final List<IndirectDraw> associatedDraws = new ArrayList<>(); private final List<IndirectDraw> associatedDraws = new ArrayList<>();
private final Vector4fc boundingSphere; private final Vector4fc boundingSphere;
private final AtomicBitSet changedPages = new AtomicBitSet(); private final AtomicReference<InstancePage<I>[]> pages = new AtomicReference<>(pageArray(0));
/**
* The set of pages whose count changed and thus need their descriptor re-uploaded.
*/
private final AtomicBitSet validityChanged = new AtomicBitSet();
/**
* The set of pages whose content changed and thus need their instances re-uploaded.
* Note that we don't re-upload for deletions, as the memory becomes invalid and masked out by the validity bits.
*/
private final AtomicBitSet contentsChanged = new AtomicBitSet();
/**
* The set of pages that are entirely full.
* We scan the clear bits of this set when trying to add an instance.
*/
private final AtomicBitSet fullPages = new AtomicBitSet();
/**
* The set of mergable pages. A page is mergeable if it is not empty and has 16 or fewer instances.
* These constraints are set so that we can guarantee that merging two pages leaves one entirely empty,
* but we also don't want to waste work merging into pages that are already empty.
*/
private final AtomicBitSet mergeablePages = new AtomicBitSet();
public ObjectStorage.@UnknownNullability Mapping mapping; public ObjectStorage.@UnknownNullability Mapping mapping;
@ -35,16 +59,216 @@ public class IndirectInstancer<I extends Instance> extends AbstractInstancer<I>
boundingSphere = key.model().boundingSphere(); boundingSphere = key.model().boundingSphere();
} }
@Override @SuppressWarnings("unchecked")
public void setIndexChanged(int index) { private static <I extends Instance> InstancePage<I>[] pageArray(int length) {
changedPages.set(ObjectStorage.objectIndex2PageIndex(index)); return new InstancePage[length];
}
@SuppressWarnings("unchecked")
private static <I extends Instance> I[] instanceArray() {
return (I[]) new Instance[ObjectStorage.PAGE_SIZE];
}
@SuppressWarnings("unchecked")
private static <I extends Instance> InstanceHandleImpl<I>[] handleArray() {
return new InstanceHandleImpl[ObjectStorage.PAGE_SIZE];
}
@Nullable
public static IndirectInstancer<?> fromState(InstanceHandleImpl.State<?> handle) {
if (handle instanceof InstancePage<?> instancer) {
return instancer.parent;
}
return null;
}
private static final class InstancePage<I extends Instance> implements InstanceHandleImpl.State<I> {
private final IndirectInstancer<I> parent;
private final int pageNo;
private final I[] instances;
// Handles are only read in #takeFrom. It would be nice to avoid tracking these at all.
private final InstanceHandleImpl<I>[] handles;
/**
* A bitset describing which indices in the instances/handles arrays contain live instances.
*/
private final AtomicInteger valid;
private InstancePage(IndirectInstancer<I> parent, int pageNo) {
this.parent = parent;
this.pageNo = pageNo;
this.instances = instanceArray();
this.handles = handleArray();
this.valid = new AtomicInteger(0);
}
/**
* Attempt to add the given instance/handle to this page.
*
* @param instance The instance to add
* @param handle The instance's handle
* @return true if the instance was added, false if the page is full
*/
public boolean add(I instance, InstanceHandleImpl<I> handle) {
// Thread safety: we loop until we either win the race and add the given instance, or we
// run out of space because other threads trying to add at the same time.
while (true) {
int currentValue = valid.get();
if (isFull(currentValue)) {
// The page is full, must search elsewhere
return false;
}
// determine what the new long value will be after we set the appropriate bit.
int index = Integer.numberOfTrailingZeros(~currentValue);
int newValue = currentValue | (1 << index);
// if no other thread has modified the value since we read it, we won the race and we are done.
if (valid.compareAndSet(currentValue, newValue)) {
instances[index] = instance;
handles[index] = handle;
handle.state = this;
// Handle index is unique amongst all pages of this instancer.
handle.index = local2HandleIndex(index);
parent.contentsChanged.set(pageNo);
parent.validityChanged.set(pageNo);
if (isFull(newValue)) {
// The page is now full, mark it so in the bitset.
// This is safe because only one bit position changes at a time.
parent.fullPages.set(pageNo);
}
if (isEmpty(currentValue)) {
// Value we just saw was zero, so since we added something we are now mergeable!
parent.mergeablePages.set(pageNo);
} else if (Integer.bitCount(currentValue) == 16) {
// We just filled the 17th instance, so we are no longer mergeable.
parent.mergeablePages.clear(pageNo);
}
return true;
}
}
}
private int local2HandleIndex(int index) {
return (pageNo << ObjectStorage.LOG_2_PAGE_SIZE) + index;
} }
@Override @Override
protected void setRangeChanged(int start, int end) { public InstanceHandleImpl.State<I> setChanged(int index) {
super.setRangeChanged(start, end); parent.contentsChanged.set(pageNo);
return this;
}
changedPages.set(ObjectStorage.objectIndex2PageIndex(start), ObjectStorage.objectIndex2PageIndex(end) + 1); @Override
public InstanceHandleImpl.State<I> setDeleted(int index) {
int localIndex = index % ObjectStorage.PAGE_SIZE;
clear(localIndex);
return InstanceHandleImpl.Deleted.instance();
}
@Override
public InstanceHandleImpl.State<I> setVisible(InstanceHandleImpl<I> handle, int index, boolean visible) {
if (visible) {
return this;
}
int localIndex = index % ObjectStorage.PAGE_SIZE;
var out = instances[localIndex];
clear(localIndex);
return new InstanceHandleImpl.Hidden<>(parent.recreate, out);
}
private void clear(int localIndex) {
instances[localIndex] = null;
handles[localIndex] = null;
while (true) {
int currentValue = valid.get();
int newValue = currentValue & ~(1 << localIndex);
if (valid.compareAndSet(currentValue, newValue)) {
parent.validityChanged.set(pageNo);
if (isEmpty(newValue)) {
// If we decremented to zero then we're no longer mergeable.
parent.mergeablePages.clear(pageNo);
} else if (Integer.bitCount(newValue) == 16) {
// If we decremented to 16 then we're now mergeable.
parent.mergeablePages.set(pageNo);
}
// Set full page last so that other threads don't race to set the other bitsets.
parent.fullPages.clear(pageNo);
break;
}
}
}
/**
* Only call this on 2 pages that are mergeable.
*
* @param other The page to take instances from.
*/
private void takeFrom(InstancePage<I> other) {
// Fill the holes in this page with instances from the other page.
int valid = this.valid.get();
int otherValid = other.valid.get();
for (int i = 0; i < ObjectStorage.PAGE_SIZE; i++) {
int mask = 1 << i;
// Find set bits in the other page.
if ((otherValid & mask) == 0) {
continue;
}
int writePos = Integer.numberOfTrailingZeros(~valid);
instances[writePos] = other.instances[i];
handles[writePos] = other.handles[i];
handles[writePos].state = this;
handles[writePos].index = local2HandleIndex(writePos);
// Clear out the other page.
otherValid &= ~mask;
other.handles[i] = null;
other.instances[i] = null;
// Set the bit in this page so we can find the next write position.
valid |= 1 << writePos;
// If we're full, we're done.
if (isFull(valid)) {
break;
}
}
this.valid.set(valid);
other.valid.set(otherValid);
// If the other page was quite empty we may still be mergeable.
parent.mergeablePages.set(pageNo, isMergeable(valid));
// We definitely changed the contents and validity of this page.
parent.contentsChanged.set(pageNo);
parent.validityChanged.set(pageNo);
// The other page will end up empty, so the validity changes and it's no longer mergeable.
// Also clear the changed bit so we don't re-upload the instances.
parent.contentsChanged.clear(other.pageNo);
parent.validityChanged.set(other.pageNo);
parent.mergeablePages.clear(other.pageNo);
if (isFull(valid)) {
parent.fullPages.set(pageNo);
}
}
} }
public void addDraw(IndirectDraw draw) { public void addDraw(IndirectDraw draw) {
@ -56,9 +280,32 @@ public class IndirectInstancer<I extends Instance> extends AbstractInstancer<I>
} }
public void update(int modelIndex, int baseInstance) { public void update(int modelIndex, int baseInstance) {
this.modelIndex = modelIndex;
this.baseInstance = baseInstance; this.baseInstance = baseInstance;
mapping.update(modelIndex, instanceCount());
var sameModelIndex = this.modelIndex == modelIndex;
if (sameModelIndex && validityChanged.isEmpty()) {
// Nothing to do!
return;
}
this.modelIndex = modelIndex;
var pages = this.pages.get();
mapping.updateCount(pages.length);
if (sameModelIndex) {
// Only need to update the changed pages.
for (int page = validityChanged.nextSetBit(0); page >= 0 && page < pages.length; page = validityChanged.nextSetBit(page + 1)) {
mapping.updatePage(page, modelIndex, pages[page].valid.get());
}
} else {
// Need to update all pages since the model index changed.
for (int i = 0; i < pages.length; i++) {
mapping.updatePage(i, modelIndex, pages[i].valid.get());
}
}
validityChanged.clear();
} }
public void writeModel(long ptr) { public void writeModel(long ptr) {
@ -72,25 +319,22 @@ public class IndirectInstancer<I extends Instance> extends AbstractInstancer<I>
} }
public void uploadInstances(StagingBuffer stagingBuffer, int instanceVbo) { public void uploadInstances(StagingBuffer stagingBuffer, int instanceVbo) {
if (changedPages.isEmpty()) { if (contentsChanged.isEmpty()) {
return; return;
} }
int numPages = mapping.pageCount(); var pages = this.pages.get();
for (int page = contentsChanged.nextSetBit(0); page >= 0 && page < pages.length; page = contentsChanged.nextSetBit(page + 1)) {
var instanceCount = instances.size(); var instances = pages[page].instances;
for (int page = changedPages.nextSetBit(0); page >= 0 && page < numPages; page = changedPages.nextSetBit(page + 1)) {
int startObject = ObjectStorage.pageIndex2ObjectIndex(page);
if (startObject >= instanceCount) {
break;
}
int endObject = Math.min(instanceCount, ObjectStorage.pageIndex2ObjectIndex(page + 1));
long baseByte = mapping.page2ByteOffset(page); long baseByte = mapping.page2ByteOffset(page);
long size = (endObject - startObject) * instanceStride;
if (baseByte < 0) {
// This page is not mapped to the VBO.
continue;
}
long size = ObjectStorage.PAGE_SIZE * instanceStride;
// Because writes are broken into pages, we end up with significantly more calls into // Because writes are broken into pages, we end up with significantly more calls into
// StagingBuffer#enqueueCopy and the allocations for the writer got out of hand. Here // StagingBuffer#enqueueCopy and the allocations for the writer got out of hand. Here
@ -101,9 +345,10 @@ public class IndirectInstancer<I extends Instance> extends AbstractInstancer<I>
long direct = stagingBuffer.reserveForCopy(size, instanceVbo, baseByte); long direct = stagingBuffer.reserveForCopy(size, instanceVbo, baseByte);
if (direct != MemoryUtil.NULL) { if (direct != MemoryUtil.NULL) {
for (int i = startObject; i < endObject; i++) { for (I instance : instances) {
var instance = instances.get(i); if (instance != null) {
writer.write(direct, instance); writer.write(direct, instance);
}
direct += instanceStride; direct += instanceStride;
} }
continue; continue;
@ -112,15 +357,49 @@ public class IndirectInstancer<I extends Instance> extends AbstractInstancer<I>
// Otherwise, write to a scratch buffer and enqueue a copy. // Otherwise, write to a scratch buffer and enqueue a copy.
var block = stagingBuffer.getScratch(size); var block = stagingBuffer.getScratch(size);
var ptr = block.ptr(); var ptr = block.ptr();
for (int i = startObject; i < endObject; i++) { for (I instance : instances) {
var instance = instances.get(i); if (instance != null) {
writer.write(ptr, instance); writer.write(ptr, instance);
}
ptr += instanceStride; ptr += instanceStride;
} }
stagingBuffer.enqueueCopy(block.ptr(), size, instanceVbo, baseByte); stagingBuffer.enqueueCopy(block.ptr(), size, instanceVbo, baseByte);
} }
changedPages.clear(); contentsChanged.clear();
}
public void parallelUpdate() {
var pages = this.pages.get();
int page = 0;
while (mergeablePages.cardinality() > 1) {
page = mergeablePages.nextSetBit(page);
if (page < 0) {
break;
}
// Find the next mergeable page.
int next = mergeablePages.nextSetBit(page + 1);
if (next < 0) {
break;
}
// Try to merge the pages.
pages[page].takeFrom(pages[next]);
}
}
private static boolean isFull(int valid) {
return valid == 0xFFFFFFFF;
}
private static boolean isEmpty(int valid) {
return valid == 0;
}
private static boolean isMergeable(int valid) {
return !isEmpty(valid) && Integer.bitCount(valid) <= 16;
} }
@Override @Override
@ -143,4 +422,135 @@ public class IndirectInstancer<I extends Instance> extends AbstractInstancer<I>
public int local2GlobalInstanceIndex(int instanceIndex) { public int local2GlobalInstanceIndex(int instanceIndex) {
return mapping.objectIndex2GlobalIndex(instanceIndex); return mapping.objectIndex2GlobalIndex(instanceIndex);
} }
@Override
public I createInstance() {
var handle = new InstanceHandleImpl<I>(null);
I instance = type.create(handle);
addInner(instance, handle);
return instance;
}
public InstanceHandleImpl.State<I> revealInstance(InstanceHandleImpl<I> handle, I instance) {
addInner(instance, handle);
return handle.state;
}
@Override
public void stealInstance(@Nullable I instance) {
if (instance == null) {
return;
}
var instanceHandle = instance.handle();
if (!(instanceHandle instanceof InstanceHandleImpl<?>)) {
// UB: do nothing
return;
}
// Should InstanceType have an isInstance method?
@SuppressWarnings("unchecked") var handle = (InstanceHandleImpl<I>) instanceHandle;
// Not allowed to steal deleted instances.
if (handle.state instanceof InstanceHandleImpl.Deleted) {
return;
}
// No need to steal if the instance will recreate to us.
if (handle.state instanceof InstanceHandleImpl.Hidden<I> hidden && recreate.equals(hidden.recreate())) {
return;
}
// FIXME: in theory there could be a race condition here if the instance
// is somehow being stolen by 2 different instancers between threads.
// That seems kinda impossible so I'm fine leaving it as is for now.
// Add the instance to this instancer.
if (handle.state instanceof InstancePage<?> other) {
if (other.parent == this) {
return;
}
// Remove the instance from its old instancer.
// This won't have any unwanted effect when the old instancer
// is filtering deleted instances later, so is safe.
other.setDeleted(handle.index);
// Only lock now that we'll be mutating our state.
addInner(instance, handle);
} else if (handle.state instanceof InstanceHandleImpl.Hidden<I>) {
handle.state = new InstanceHandleImpl.Hidden<>(recreate, instance);
}
}
private void addInner(I instance, InstanceHandleImpl<I> handle) {
// Outer loop:
// - try to find an empty space
// - or grow the page array if we can't
// - add the instance to the new page, or try again
while (true) {
var pages = this.pages.get();
// First, try to find a page with space.
for (int i = fullPages.nextClearBit(0); i < pages.length; i = fullPages.nextClearBit(i + 1)) {
// It may have been filled in while we were searching, but hopefully not.
if (pages[i].add(instance, handle)) {
return;
}
}
// If we're here, all other pages are full
// If we hit this on the second iteration of the outer loop then `pages` is once again full.
var desiredLength = pages.length + 1;
// Inner loop: grow the page array. This is very similar to the logic in AtomicBitSet.
while (pages.length < desiredLength) {
// Thread safety: segments contains all pages from the currently visible pages, plus extra.
// all pages in the currently visible pages are canonical and will not change.
// Can't just `new InstancePage[]` because it has a generic parameter.
InstancePage<I>[] newPages = pageArray(desiredLength);
System.arraycopy(pages, 0, newPages, 0, pages.length);
newPages[pages.length] = new InstancePage<>(this, pages.length);
// because we are using a compareAndSet, if this thread "wins the race" and successfully sets this variable, then the new page becomes canonical.
if (this.pages.compareAndSet(pages, newPages)) {
pages = newPages;
} else {
// If we "lose the race" and are growing the AtomicBitset segments larger,
// then we will gather the new canonical pages from the update which we missed on the next iteration of this loop.
// The new page will be discarded and never seen again.
pages = this.pages.get();
}
}
// Shortcut: try to add the instance to the last page.
// Technically we could just let the outer loop go again, but that
// involves a good bit of work just to likely get back here.
if (pages[pages.length - 1].add(instance, handle)) {
return;
}
// It may be the case that many other instances were added in the same instant.
// We can still lose this race, though it is very unlikely.
}
}
public int instanceCount() {
// Not exactly accurate but it's an upper bound.
// TODO: maybe this could be tracked with an AtomicInteger?
return pages.get().length << ObjectStorage.LOG_2_PAGE_SIZE;
}
/**
* Clear all instances without freeing resources.
*/
public void clear() {
this.pages.set(pageArray(0));
contentsChanged.clear();
validityChanged.clear();
fullPages.clear();
mergeablePages.clear();
}
} }

View file

@ -40,4 +40,9 @@ public class LightBuffers {
GL46.glBindBufferRange(GL46.GL_SHADER_STORAGE_BUFFER, BufferBindings.LIGHT_LUT, lut.handle(), 0, lut.byteCapacity()); GL46.glBindBufferRange(GL46.GL_SHADER_STORAGE_BUFFER, BufferBindings.LIGHT_LUT, lut.handle(), 0, lut.byteCapacity());
GL46.glBindBufferRange(GL46.GL_SHADER_STORAGE_BUFFER, BufferBindings.LIGHT_SECTION, sections.handle(), 0, sections.byteCapacity()); GL46.glBindBufferRange(GL46.GL_SHADER_STORAGE_BUFFER, BufferBindings.LIGHT_SECTION, sections.handle(), 0, sections.byteCapacity());
} }
public void delete() {
lut.delete();
sections.delete();
}
} }

View file

@ -30,4 +30,8 @@ public class MatrixBuffer {
GL46.glBindBufferRange(GL46.GL_SHADER_STORAGE_BUFFER, BufferBindings.MATRICES, matrices.handle(), 0, matrices.byteCapacity()); GL46.glBindBufferRange(GL46.GL_SHADER_STORAGE_BUFFER, BufferBindings.MATRICES, matrices.handle(), 0, matrices.byteCapacity());
} }
public void delete() {
matrices.delete();
}
} }

View file

@ -1,6 +1,7 @@
package dev.engine_room.flywheel.backend.engine.indirect; package dev.engine_room.flywheel.backend.engine.indirect;
import java.util.Arrays; import java.util.Arrays;
import java.util.BitSet;
import org.lwjgl.system.MemoryUtil; import org.lwjgl.system.MemoryUtil;
@ -13,8 +14,12 @@ public class ObjectStorage extends AbstractArena {
public static final int PAGE_SIZE = 1 << LOG_2_PAGE_SIZE; public static final int PAGE_SIZE = 1 << LOG_2_PAGE_SIZE;
public static final int PAGE_MASK = PAGE_SIZE - 1; public static final int PAGE_MASK = PAGE_SIZE - 1;
public static final int INITIAL_PAGES_ALLOCATED = 4; public static final int INVALID_PAGE = -1;
public static final int INITIAL_PAGES_ALLOCATED = 4;
public static final int DESCRIPTOR_SIZE_BYTES = Integer.BYTES * 2;
private final BitSet changedFrames = new BitSet();
/** /**
* The GPU side buffer containing all the objects, logically divided into page frames. * The GPU side buffer containing all the objects, logically divided into page frames.
*/ */
@ -28,8 +33,6 @@ public class ObjectStorage extends AbstractArena {
*/ */
private MemoryBlock frameDescriptors; private MemoryBlock frameDescriptors;
private boolean needsUpload = false;
public ObjectStorage(long objectSizeBytes) { public ObjectStorage(long objectSizeBytes) {
super(PAGE_SIZE * objectSizeBytes); super(PAGE_SIZE * objectSizeBytes);
@ -37,8 +40,8 @@ public class ObjectStorage extends AbstractArena {
this.frameDescriptorBuffer = new ResizableStorageBuffer(); this.frameDescriptorBuffer = new ResizableStorageBuffer();
objectBuffer.ensureCapacity(INITIAL_PAGES_ALLOCATED * elementSizeBytes); objectBuffer.ensureCapacity(INITIAL_PAGES_ALLOCATED * elementSizeBytes);
frameDescriptorBuffer.ensureCapacity(INITIAL_PAGES_ALLOCATED * Integer.BYTES); frameDescriptorBuffer.ensureCapacity(INITIAL_PAGES_ALLOCATED * DESCRIPTOR_SIZE_BYTES);
frameDescriptors = MemoryBlock.malloc(INITIAL_PAGES_ALLOCATED * Integer.BYTES); frameDescriptors = MemoryBlock.malloc(INITIAL_PAGES_ALLOCATED * DESCRIPTOR_SIZE_BYTES);
} }
public Mapping createMapping() { public Mapping createMapping() {
@ -52,8 +55,23 @@ public class ObjectStorage extends AbstractArena {
@Override @Override
public void free(int i) { public void free(int i) {
if (i == INVALID_PAGE) {
return;
}
super.free(i); super.free(i);
MemoryUtil.memPutInt(ptrForPage(i), 0); var ptr = ptrForPage(i);
MemoryUtil.memPutInt(ptr, 0);
MemoryUtil.memPutInt(ptr + 4, 0);
changedFrames.set(i);
}
private void set(int i, int modelIndex, int validBits) {
var ptr = ptrForPage(i);
MemoryUtil.memPutInt(ptr, modelIndex);
MemoryUtil.memPutInt(ptr + 4, validBits);
changedFrames.set(i);
} }
@Override @Override
@ -64,12 +82,17 @@ public class ObjectStorage extends AbstractArena {
} }
public void uploadDescriptors(StagingBuffer stagingBuffer) { public void uploadDescriptors(StagingBuffer stagingBuffer) {
if (!needsUpload) { if (changedFrames.isEmpty()) {
return; return;
} }
// We could be smarter about which spans are uploaded but this thing is so small it's probably not worth it.
stagingBuffer.enqueueCopy(frameDescriptors.ptr(), frameDescriptors.size(), frameDescriptorBuffer.handle(), 0); var ptr = frameDescriptors.ptr();
needsUpload = false; for (int i = changedFrames.nextSetBit(0); i >= 0 && i < capacity(); i = changedFrames.nextSetBit(i + 1)) {
var offset = (long) i * DESCRIPTOR_SIZE_BYTES;
stagingBuffer.enqueueCopy(ptr + offset, DESCRIPTOR_SIZE_BYTES, frameDescriptorBuffer.handle(), offset);
}
changedFrames.clear();
} }
public void delete() { public void delete() {
@ -79,7 +102,7 @@ public class ObjectStorage extends AbstractArena {
} }
private long ptrForPage(int page) { private long ptrForPage(int page) {
return frameDescriptors.ptr() + (long) page * Integer.BYTES; return frameDescriptors.ptr() + (long) page * DESCRIPTOR_SIZE_BYTES;
} }
public static int objectIndex2PageIndex(int objectIndex) { public static int objectIndex2PageIndex(int objectIndex) {
@ -97,61 +120,52 @@ public class ObjectStorage extends AbstractArena {
private static final int[] EMPTY_ALLOCATION = new int[0]; private static final int[] EMPTY_ALLOCATION = new int[0];
private int[] pages = EMPTY_ALLOCATION; private int[] pages = EMPTY_ALLOCATION;
private int modelIndex = -1; public void updatePage(int index, int modelIndex, int validBits) {
private int objectCount = 0; if (validBits == 0) {
holePunch(index);
/**
* Adjust this allocation to the given model index and object count.
*
* <p>This method triggers eager resizing of the allocation to fit the new object count.
* If the model index is different from the current one, all frame descriptors will be updated.
*
* @param modelIndex The model index the objects in this allocation are associated with.
* @param objectCount The number of objects in this allocation.
*/
public void update(int modelIndex, int objectCount) {
boolean incremental = this.modelIndex == modelIndex;
if (incremental && objectCount == this.objectCount) {
// Nothing will change.
return; return;
} }
var frame = pages[index];
ObjectStorage.this.needsUpload = true; if (frame == INVALID_PAGE) {
// Un-holed punch.
frame = unHolePunch(index);
}
this.modelIndex = modelIndex; ObjectStorage.this.set(frame, modelIndex, validBits);
this.objectCount = objectCount; }
/**
* Free a page on the inside of the mapping, maintaining the same virtual mapping size.
*
* @param index The index of the page to free.
*/
public void holePunch(int index) {
ObjectStorage.this.free(pages[index]);
pages[index] = INVALID_PAGE;
}
/**
* Allocate a new page on the inside of the mapping, maintaining the same virtual mapping size.
*
* @param index The index of the page to allocate.
* @return The allocated page.
*/
private int unHolePunch(int index) {
int page = ObjectStorage.this.alloc();
pages[index] = page;
return page;
}
public void updateCount(int newLength) {
var oldLength = pages.length; var oldLength = pages.length;
var newLength = objectIndex2PageIndex((objectCount + PAGE_MASK));
if (oldLength > newLength) { if (oldLength > newLength) {
// Eagerly free the now unnecessary pages. // Eagerly free the now unnecessary pages.
// shrink will zero out the pageTable entries for the freed pages. // shrink will zero out the pageTable entries for the freed pages.
shrink(oldLength, newLength); shrink(oldLength, newLength);
if (incremental) {
// Only update the last page, everything else is unchanged.
updateRange(newLength - 1, newLength);
}
} else if (oldLength < newLength) { } else if (oldLength < newLength) {
// Allocate new pages to fit the new object count. // Allocate new pages to fit the new object count.
grow(newLength, oldLength); grow(newLength, oldLength);
if (incremental) {
// Update the old last page + all new pages
updateRange(oldLength - 1, newLength);
}
} else {
if (incremental) {
// Only update the last page.
updateRange(oldLength - 1, oldLength);
}
}
if (!incremental) {
// Update all pages.
updateRange(0, newLength);
} }
} }
@ -159,8 +173,8 @@ public class ObjectStorage extends AbstractArena {
return pages.length; return pages.length;
} }
public long page2ByteOffset(int page) { public long page2ByteOffset(int index) {
return ObjectStorage.this.byteOffsetOf(pages[page]); return ObjectStorage.this.byteOffsetOf(pages[index]);
} }
public void delete() { public void delete() {
@ -168,35 +182,6 @@ public class ObjectStorage extends AbstractArena {
ObjectStorage.this.free(page); ObjectStorage.this.free(page);
} }
pages = EMPTY_ALLOCATION; pages = EMPTY_ALLOCATION;
modelIndex = -1;
objectCount = 0;
ObjectStorage.this.needsUpload = true;
}
/**
* Calculates the page descriptor for the given page index.
* Runs under the assumption than all pages are full except maybe the last one.
*/
private int calculatePageDescriptor(int pageIndex) {
int countInPage;
if (objectCount % PAGE_SIZE != 0 && pageIndex == pages.length - 1) {
// Last page && it isn't full -> use the remainder.
countInPage = objectCount & PAGE_MASK;
} else if (objectCount > 0) {
// Full page.
countInPage = PAGE_SIZE;
} else {
// Empty page, this shouldn't be reachable because we eagerly free empty pages.
countInPage = 0;
}
return (modelIndex & 0x3FFFFF) | (countInPage << 26);
}
private void updateRange(int start, int oldLength) {
for (int i = start; i < oldLength; i++) {
MemoryUtil.memPutInt(ptrForPage(pages[i]), calculatePageDescriptor(i));
}
} }
private void grow(int neededPages, int oldLength) { private void grow(int neededPages, int oldLength) {

View file

@ -147,7 +147,15 @@ public class InstancedDrawManager extends DrawManager<InstancedInstancer<?>> {
@Override @Override
public void renderCrumbling(List<Engine.CrumblingBlock> crumblingBlocks) { public void renderCrumbling(List<Engine.CrumblingBlock> crumblingBlocks) {
// Sort draw calls into buckets, so we don't have to do as many shader binds. // Sort draw calls into buckets, so we don't have to do as many shader binds.
var byType = doCrumblingSort(InstancedInstancer.class, crumblingBlocks); var byType = doCrumblingSort(crumblingBlocks, handle -> {
// AbstractInstancer directly implement HandleState, so this check is valid.
if (handle instanceof InstancedInstancer<?> instancer) {
return instancer;
}
// This rejects instances that were created by a different engine,
// and also instances that are hidden or deleted.
return null;
});
if (byType.isEmpty()) { if (byType.isEmpty()) {
return; return;

View file

@ -7,7 +7,7 @@ import org.jetbrains.annotations.Nullable;
import dev.engine_room.flywheel.api.instance.Instance; import dev.engine_room.flywheel.api.instance.Instance;
import dev.engine_room.flywheel.api.instance.InstanceWriter; import dev.engine_room.flywheel.api.instance.InstanceWriter;
import dev.engine_room.flywheel.backend.engine.AbstractInstancer; import dev.engine_room.flywheel.backend.engine.BaseInstancer;
import dev.engine_room.flywheel.backend.engine.InstancerKey; import dev.engine_room.flywheel.backend.engine.InstancerKey;
import dev.engine_room.flywheel.backend.gl.TextureBuffer; import dev.engine_room.flywheel.backend.gl.TextureBuffer;
import dev.engine_room.flywheel.backend.gl.buffer.GlBuffer; import dev.engine_room.flywheel.backend.gl.buffer.GlBuffer;
@ -15,7 +15,7 @@ import dev.engine_room.flywheel.backend.gl.buffer.GlBufferUsage;
import dev.engine_room.flywheel.lib.math.MoreMath; import dev.engine_room.flywheel.lib.math.MoreMath;
import dev.engine_room.flywheel.lib.memory.MemoryBlock; import dev.engine_room.flywheel.lib.memory.MemoryBlock;
public class InstancedInstancer<I extends Instance> extends AbstractInstancer<I> { public class InstancedInstancer<I extends Instance> extends BaseInstancer<I> {
private final int instanceStride; private final int instanceStride;
private final InstanceWriter<I> writer; private final InstanceWriter<I> writer;
@ -109,6 +109,60 @@ public class InstancedInstancer<I extends Instance> extends AbstractInstancer<I>
return capacity > vbo.size(); return capacity > vbo.size();
} }
public void parallelUpdate() {
if (deleted.isEmpty()) {
return;
}
// Figure out which elements are to be removed.
final int oldSize = this.instances.size();
int removeCount = deleted.cardinality();
if (oldSize == removeCount) {
clear();
return;
}
final int newSize = oldSize - removeCount;
// Start from the first deleted index.
int writePos = deleted.nextSetBit(0);
if (writePos < newSize) {
// Since we'll be shifting everything into this space we can consider it all changed.
changed.set(writePos, newSize);
}
// We definitely shouldn't consider the deleted instances as changed though,
// else we might try some out of bounds accesses later.
changed.clear(newSize, oldSize);
// Punch out the deleted instances, shifting over surviving instances to fill their place.
for (int scanPos = writePos; (scanPos < oldSize) && (writePos < newSize); scanPos++, writePos++) {
// Find next non-deleted element.
scanPos = deleted.nextClearBit(scanPos);
if (scanPos != writePos) {
// Grab the old instance/handle from scanPos...
var handle = handles.get(scanPos);
I instance = instances.get(scanPos);
// ... and move it to writePos.
handles.set(writePos, handle);
instances.set(writePos, instance);
// Make sure the handle knows it's been moved
handle.index = writePos;
}
}
deleted.clear();
instances.subList(newSize, oldSize)
.clear();
handles.subList(newSize, oldSize)
.clear();
}
public void delete() { public void delete() {
if (vbo == null) { if (vbo == null) {
return; return;

View file

@ -27,7 +27,10 @@ abstract class RenderSystemMixin {
FogUniforms.update(); FogUniforms.update();
} }
@Inject(method = "setShaderFogShape(Lcom/mojang/blaze3d/shaders/FogShape;)V", at = @At("RETURN")) // Fabric fails to resolve the mixin in prod when the full signature is specified.
// I suspect it's because this method references a class name in its signature,
// and that needs to be remapped while the function names in RenderSystem are marked with @DontObfuscate.
@Inject(method = "setShaderFogShape", at = @At("RETURN"))
private static void flywheel$onSetFogShape(CallbackInfo ci) { private static void flywheel$onSetFogShape(CallbackInfo ci) {
FogUniforms.update(); FogUniforms.update();
} }

View file

@ -43,6 +43,14 @@ public class AtomicBitSet {
segments = new AtomicReference<>(new AtomicBitSetSegments(numSegmentsToPreallocate, numLongsPerSegment)); segments = new AtomicReference<>(new AtomicBitSetSegments(numSegmentsToPreallocate, numLongsPerSegment));
} }
public void set(int position, boolean value) {
if (value) {
set(position);
} else {
clear(position);
}
}
public void set(int position) { public void set(int position) {
int longPosition = longIndexInSegmentForPosition(position); int longPosition = longIndexInSegmentForPosition(position);

View file

@ -102,19 +102,14 @@ bool _flw_isVisible(uint instanceIndex, uint modelIndex) {
// Clamp to the texture bounds. // Clamp to the texture bounds.
// Since we're not going through a sampler out of bounds texel fetches will return 0. // Since we're not going through a sampler out of bounds texel fetches will return 0.
bounds = clamp(bounds, ivec4(0), levelSizePair); bounds = clamp(bounds, ivec4(0), levelSizePair - ivec4(1));
float depth01 = texelFetch(_flw_depthPyramid, bounds.xw, level).r; float depth01 = texelFetch(_flw_depthPyramid, bounds.xw, level).r;
float depth11 = texelFetch(_flw_depthPyramid, bounds.zw, level).r; float depth11 = texelFetch(_flw_depthPyramid, bounds.zw, level).r;
float depth10 = texelFetch(_flw_depthPyramid, bounds.zy, level).r; float depth10 = texelFetch(_flw_depthPyramid, bounds.zy, level).r;
float depth00 = texelFetch(_flw_depthPyramid, bounds.xy, level).r; float depth00 = texelFetch(_flw_depthPyramid, bounds.xy, level).r;
float depth; float depth = max(max(depth00, depth01), max(depth10, depth11));
if (_flw_cullData.useMin == 0) {
depth = max(max(depth00, depth01), max(depth10, depth11));
} else {
depth = min(min(depth00, depth01), min(depth10, depth11));
}
float depthSphere = 1. + _flw_cullData.znear / (center.z + radius); float depthSphere = 1. + _flw_cullData.znear / (center.z + radius);
@ -126,24 +121,22 @@ bool _flw_isVisible(uint instanceIndex, uint modelIndex) {
} }
void main() { void main() {
uint pageIndex = gl_WorkGroupID.x; uint pageIndex = gl_WorkGroupID.x << 1u;
if (pageIndex >= _flw_pageFrameDescriptors.length()) { if (pageIndex >= _flw_pageFrameDescriptors.length()) {
return; return;
} }
uint packedModelIndexAndCount = _flw_pageFrameDescriptors[pageIndex]; uint modelIndex = _flw_pageFrameDescriptors[pageIndex];
uint pageInstanceCount = packedModelIndexAndCount >> _FLW_PAGE_COUNT_OFFSET; uint pageValidity = _flw_pageFrameDescriptors[pageIndex + 1];
if (gl_LocalInvocationID.x >= pageInstanceCount) { if (((1u << gl_LocalInvocationID.x) & pageValidity) == 0) {
return; return;
} }
uint instanceIndex = gl_GlobalInvocationID.x; uint instanceIndex = gl_GlobalInvocationID.x;
uint modelIndex = packedModelIndexAndCount & _FLW_MODEL_INDEX_MASK;
if (_flw_isVisible(instanceIndex, modelIndex)) { if (_flw_isVisible(instanceIndex, modelIndex)) {
uint localIndex = atomicAdd(_flw_models[modelIndex].instanceCount, 1); uint localIndex = atomicAdd(_flw_models[modelIndex].instanceCount, 1);
uint targetIndex = _flw_models[modelIndex].baseInstance + localIndex; uint targetIndex = _flw_models[modelIndex].baseInstance + localIndex;

View file

@ -14,6 +14,8 @@ const float _FLW_EPSILON = 1e-5;
const uint _FLW_LOWER_10_BITS = 0x3FFu; const uint _FLW_LOWER_10_BITS = 0x3FFu;
const uint _FLW_UPPER_10_BITS = 0xFFF00000u; const uint _FLW_UPPER_10_BITS = 0xFFF00000u;
const float _FLW_LIGHT_NORMALIZER = 1. / 16.;
uint _flw_indexLut(uint index); uint _flw_indexLut(uint index);
uint _flw_indexLight(uint index); uint _flw_indexLight(uint index);
@ -43,18 +45,18 @@ bool _flw_nextLut(uint base, int coord, out uint next) {
} }
bool _flw_chunkCoordToSectionIndex(ivec3 sectionPos, out uint index) { bool _flw_chunkCoordToSectionIndex(ivec3 sectionPos, out uint index) {
uint y; uint first;
if (_flw_nextLut(0, sectionPos.x, y) || y == 0) { if (_flw_nextLut(0, sectionPos.y, first) || first == 0) {
return true; return true;
} }
uint z; uint second;
if (_flw_nextLut(y, sectionPos.y, z) || z == 0) { if (_flw_nextLut(first, sectionPos.x, second) || second == 0) {
return true; return true;
} }
uint sectionIndex; uint sectionIndex;
if (_flw_nextLut(z, sectionPos.z, sectionIndex) || sectionIndex == 0) { if (_flw_nextLut(second, sectionPos.z, sectionIndex) || sectionIndex == 0) {
return true; return true;
} }
@ -98,7 +100,7 @@ bool flw_lightFetch(ivec3 blockPos, out vec2 lightCoord) {
uvec3 blockInSectionPos = (blockPos & 0xF) + 1; uvec3 blockInSectionPos = (blockPos & 0xF) + 1;
lightCoord = vec2(_flw_lightAt(sectionOffset, blockInSectionPos)) / 15.; lightCoord = vec2(_flw_lightAt(sectionOffset, blockInSectionPos)) * _FLW_LIGHT_NORMALIZER;
return true; return true;
} }
@ -298,7 +300,7 @@ vec3 _flw_lightForDirection(uint[27] lights, vec3 interpolant, uint c00, uint c0
vec3 light = mix(light0, light1, interpolant.y); vec3 light = mix(light0, light1, interpolant.y);
// Normalize the light coords // Normalize the light coords
light.xy *= 1. / 15.; light.xy *= _FLW_LIGHT_NORMALIZER;
// Calculate the AO multiplier from the number of valid blocks // Calculate the AO multiplier from the number of valid blocks
light.z = _flw_validCountToAo(light.z); light.z = _flw_validCountToAo(light.z);
@ -351,7 +353,7 @@ bool flw_light(vec3 worldPos, vec3 normal, out FlwLightAo light) {
vec2 light0 = mix(light00, light01, interpolant.y); vec2 light0 = mix(light00, light01, interpolant.y);
vec2 light1 = mix(light10, light11, interpolant.y); vec2 light1 = mix(light10, light11, interpolant.y);
light.light = mix(light0, light1, interpolant.x) / 15.; light.light = mix(light0, light1, interpolant.x) * _FLW_LIGHT_NORMALIZER;
light.ao = 1.; light.ao = 1.;
// Lighting and AO accurate to chunk baking // Lighting and AO accurate to chunk baking
@ -410,7 +412,7 @@ bool flw_light(vec3 worldPos, vec3 normal, out FlwLightAo light) {
// Entirely flat lighting, the lowest setting and a fallback in case an invalid option is set // Entirely flat lighting, the lowest setting and a fallback in case an invalid option is set
#else #else
light.light = vec2(_flw_lightAt(sectionOffset, blockInSectionPos)) / 15.; light.light = vec2(_flw_lightAt(sectionOffset, blockInSectionPos)) * _FLW_LIGHT_NORMALIZER;
light.ao = 1.; light.ao = 1.;
#endif #endif

View file

@ -11,7 +11,6 @@ import net.minecraft.world.entity.EntityType;
import net.minecraft.world.level.block.entity.BlockEntity; import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.entity.BlockEntityType; import net.minecraft.world.level.block.entity.BlockEntityType;
// TODO: Add freezing
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public final class VisualizerRegistryImpl { public final class VisualizerRegistryImpl {
@Nullable @Nullable

View file

@ -43,7 +43,11 @@ public class ShaderLightVisualStorage {
} }
public void remove(ShaderLightVisual visual) { public void remove(ShaderLightVisual visual) {
trackers.remove(visual); var tracker = trackers.remove(visual);
if (tracker != null) {
markDirty();
}
} }
public void clear() { public void clear() {

View file

@ -7,26 +7,6 @@ import net.minecraft.client.model.geom.ModelLayers;
import net.minecraft.world.entity.EntityType; import net.minecraft.world.entity.EntityType;
import net.minecraft.world.level.block.entity.BlockEntityType; import net.minecraft.world.level.block.entity.BlockEntityType;
/**
* TODO:
* <table>
* <tr><td>{@link BlockEntityType#SIGN}</td><td> {@link net.minecraft.client.renderer.blockentity.SignRenderer SignRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#PISTON}</td><td> {@link net.minecraft.client.renderer.blockentity.PistonHeadRenderer PistonHeadRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#CONDUIT}</td><td> {@link net.minecraft.client.renderer.blockentity.ConduitRenderer ConduitRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#ENCHANTING_TABLE}</td><td> {@link net.minecraft.client.renderer.blockentity.EnchantTableRenderer EnchantTableRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#LECTERN}</td><td> {@link net.minecraft.client.renderer.blockentity.LecternRenderer LecternRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#MOB_SPAWNER}</td><td> {@link net.minecraft.client.renderer.blockentity.SpawnerRenderer SpawnerRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#BED}</td><td> {@link net.minecraft.client.renderer.blockentity.BedRenderer BedRenderer}</td></tr>
* <tr><td>^^ Interesting - Major vv</td></tr>
* <tr><td>{@link BlockEntityType#END_PORTAL}</td><td> {@link net.minecraft.client.renderer.blockentity.TheEndPortalRenderer TheEndPortalRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#END_GATEWAY}</td><td> {@link net.minecraft.client.renderer.blockentity.TheEndGatewayRenderer TheEndGatewayRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#BEACON}</td><td> {@link net.minecraft.client.renderer.blockentity.BeaconRenderer BeaconRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#SKULL}</td><td> {@link net.minecraft.client.renderer.blockentity.SkullBlockRenderer SkullBlockRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#BANNER}</td><td> {@link net.minecraft.client.renderer.blockentity.BannerRenderer BannerRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#STRUCTURE_BLOCK}</td><td> {@link net.minecraft.client.renderer.debug.StructureRenderer StructureRenderer}</td></tr>
* <tr><td>{@link BlockEntityType#CAMPFIRE}</td><td> {@link net.minecraft.client.renderer.blockentity.CampfireRenderer CampfireRenderer}</td></tr>
* </table>
*/
public class VanillaVisuals { public class VanillaVisuals {
public static void init() { public static void init() {
builder(BlockEntityType.CHEST) builder(BlockEntityType.CHEST)

View file

@ -11,6 +11,7 @@ val api = sourceSets.create("api")
val lib = sourceSets.create("lib") val lib = sourceSets.create("lib")
val backend = sourceSets.create("backend") val backend = sourceSets.create("backend")
val main = sourceSets.getByName("main") val main = sourceSets.getByName("main")
val testMod = sourceSets.create("testMod")
transitiveSourceSets { transitiveSourceSets {
compileClasspath = main.compileClasspath compileClasspath = main.compileClasspath
@ -29,6 +30,9 @@ transitiveSourceSets {
sourceSet(main) { sourceSet(main) {
implementation(api, lib, backend) implementation(api, lib, backend)
} }
sourceSet(testMod) {
rootCompile()
}
createCompileConfigurations() createCompileConfigurations()
} }
@ -39,6 +43,7 @@ platform {
setupLoomMod(api, lib, backend, main) setupLoomMod(api, lib, backend, main)
setupLoomRuns() setupLoomRuns()
setupFatJar(api, lib, backend, main) setupFatJar(api, lib, backend, main)
setupTestMod(testMod)
} }
jarSets { jarSets {

View file

@ -0,0 +1,29 @@
package dev.engine_room.flywheel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spongepowered.asm.mixin.MixinEnvironment;
import net.fabricmc.api.ClientModInitializer;
import net.fabricmc.fabric.api.client.event.lifecycle.v1.ClientLifecycleEvents;
import net.fabricmc.loader.api.FabricLoader;
public class FlywheelTestModClient implements ClientModInitializer {
public static final String NAME = "Flywheel Test Mod";
private static final Logger LOGGER = LoggerFactory.getLogger(NAME);
@Override
public void onInitializeClient() {
LOGGER.info("Starting {} on EnvType: {}", NAME, FabricLoader.getInstance()
.getEnvironmentType());
ClientLifecycleEvents.CLIENT_STARTED.register(client -> {
LOGGER.info("Running mixin audit");
MixinEnvironment.getCurrentEnvironment()
.audit();
LOGGER.info("Stopping client");
client.stop();
});
}
}

View file

@ -0,0 +1,13 @@
{
"schemaVersion": 1,
"id" : "${mod_id}_testmod",
"name": "${mod_name} Test Mod",
"version": "1.0.0",
"environment": "*",
"license": "${mod_license}",
"entrypoints": {
"client": [
"dev.engine_room.flywheel.FlywheelTestModClient"
]
}
}

View file

@ -11,6 +11,7 @@ val api = sourceSets.create("api")
val lib = sourceSets.create("lib") val lib = sourceSets.create("lib")
val backend = sourceSets.create("backend") val backend = sourceSets.create("backend")
val main = sourceSets.getByName("main") val main = sourceSets.getByName("main")
val testMod = sourceSets.create("testMod")
transitiveSourceSets { transitiveSourceSets {
compileClasspath = main.compileClasspath compileClasspath = main.compileClasspath
@ -29,6 +30,9 @@ transitiveSourceSets {
sourceSet(main) { sourceSet(main) {
compile(api, lib, backend) compile(api, lib, backend)
} }
sourceSet(testMod) {
rootCompile()
}
createCompileConfigurations() createCompileConfigurations()
} }
@ -39,6 +43,7 @@ platform {
setupLoomMod(api, lib, backend, main) setupLoomMod(api, lib, backend, main)
setupLoomRuns() setupLoomRuns()
setupFatJar(api, lib, backend, main) setupFatJar(api, lib, backend, main)
setupTestMod(testMod)
} }
jarSets { jarSets {

View file

@ -37,7 +37,6 @@ mandatory = false
versionRange = "[0.6.0-beta.2,)" versionRange = "[0.6.0-beta.2,)"
side = "CLIENT" side = "CLIENT"
# Simulates a breaks/incompatible dependency
[[dependencies.${mod_id}]] [[dependencies.${mod_id}]]
modId = "embeddium" modId = "embeddium"
type = "incompatible" type = "incompatible"

View file

@ -0,0 +1,33 @@
package dev.engine_room.flywheel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spongepowered.asm.mixin.MixinEnvironment;
import net.minecraft.client.Minecraft;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.TickEvent;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.loading.FMLLoader;
@Mod("flywheel_testmod")
public class FlywheelTestModClient {
public static final String NAME = "Flywheel Test Mod";
private static final Logger LOGGER = LoggerFactory.getLogger(NAME);
public FlywheelTestModClient() {
LOGGER.info("Starting {} on Dist: {}", NAME, FMLLoader.getDist());
MinecraftForge.EVENT_BUS.addListener((TickEvent.ClientTickEvent e) -> {
if (e.phase == TickEvent.Phase.END) {
LOGGER.info("Running mixin audit");
MixinEnvironment.getCurrentEnvironment()
.audit();
LOGGER.info("Stopping client");
Minecraft.getInstance()
.stop();
}
});
}
}

View file

@ -0,0 +1,8 @@
modLoader = "javafml"
loaderVersion = "[0,)"
license = "${mod_license}"
[[mods]]
modId = "${mod_id}_testmod"
version = "1.0.0"
displayName = "${mod_name} Test Mod"