From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 From: Riley Park Date: Tue, 16 Jul 2024 14:55:23 -0700 Subject: [PATCH] Bundle spark diff --git a/build.gradle.kts b/build.gradle.kts index 1a734293c9416f13324bb0edf8f950c9029f8bc4..421f6b3dc8890d63d2e7aa774d0bf8f7e15890ab 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -61,6 +61,10 @@ dependencies { implementation("io.papermc:reflection-rewriter-runtime:$reflectionRewriterVersion") implementation("io.papermc:reflection-rewriter-proxy-generator:$reflectionRewriterVersion") // Paper end - Remap reflection + // Paper start - spark + implementation("me.lucko:spark-api:0.1-20240720.200737-2") + implementation("me.lucko:spark-paper:1.10.84-20240720.204128-1") + // Paper end - spark } paperweight { diff --git a/src/main/java/io/papermc/paper/SparksFly.java b/src/main/java/io/papermc/paper/SparksFly.java new file mode 100644 index 0000000000000000000000000000000000000000..19ee43e1ca053574a0151b4c43b01972183657e6 --- /dev/null +++ b/src/main/java/io/papermc/paper/SparksFly.java @@ -0,0 +1,200 @@ +package io.papermc.paper; + +import io.papermc.paper.configuration.GlobalConfiguration; +import io.papermc.paper.plugin.entrypoint.classloader.group.PaperPluginClassLoaderStorage; +import io.papermc.paper.plugin.provider.classloader.ConfiguredPluginClassLoader; +import io.papermc.paper.plugin.provider.classloader.PaperClassLoaderStorage; +import io.papermc.paper.util.MCUtil; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import me.lucko.spark.paper.api.Compatibility; +import me.lucko.spark.paper.api.PaperClassLookup; +import me.lucko.spark.paper.api.PaperScheduler; +import me.lucko.spark.paper.api.PaperSparkModule; +import net.kyori.adventure.text.Component; +import net.kyori.adventure.text.format.TextColor; +import net.minecraft.util.ExceptionCollector; +import org.bukkit.Server; +import org.bukkit.command.Command; +import org.bukkit.command.CommandSender; +import org.bukkit.craftbukkit.CraftServer; + +// It's like electricity. +public final class SparksFly { + public static final String ID = "spark"; + public static final String COMMAND_NAME = "spark"; + + private static final String PREFER_SPARK_PLUGIN_PROPERTY = "paper.preferSparkPlugin"; + + private static final int SPARK_YELLOW = 0xffc93a; + + private final Logger logger; + private final PaperSparkModule spark; + + private boolean enabled; + private boolean disabledInConfigurationWarningLogged; + + public SparksFly(final Server server) { + this.logger = Logger.getLogger(ID); + this.logger.log(Level.INFO, "This server bundles the spark profiler. For more information please visit https://docs.papermc.io/paper/profiling"); + this.spark = PaperSparkModule.create(Compatibility.VERSION_1_0, server, this.logger, new PaperScheduler() { + @Override + public void executeAsync(final Runnable runnable) { + MCUtil.scheduleAsyncTask(this.catching(runnable, "asynchronous")); + } + + @Override + public void executeSync(final Runnable runnable) { + MCUtil.ensureMain(this.catching(runnable, "synchronous")); + } + + private Runnable catching(final Runnable runnable, final String type) { + return () -> { + try { + runnable.run(); + } catch (final Throwable t) { + SparksFly.this.logger.log(Level.SEVERE, "An exception was encountered while executing a " + type + " spark task", t); + } + }; + } + }, new PaperClassLookup() { + @Override + public Class lookup(final String className) throws Exception { + final ExceptionCollector exceptions = new ExceptionCollector<>(); + try { + return Class.forName(className); + } catch (final ClassNotFoundException e) { + exceptions.add(e); + for (final ConfiguredPluginClassLoader loader : ((PaperPluginClassLoaderStorage) PaperClassLoaderStorage.instance()).getGlobalGroup().getClassLoaders()) { + try { + final Class loadedClass = loader.loadClass(className, true, false, true); + if (loadedClass != null) { + return loadedClass; + } + } catch (final ClassNotFoundException exception) { + exceptions.add(exception); + } + } + exceptions.throwIfPresent(); + return null; + } + } + }); + } + + public void enableEarlyIfRequested() { + if (!isPluginPreferred() && shouldEnableImmediately()) { + this.enable(); + } + } + + public void enableBeforePlugins() { + if (!isPluginPreferred()) { + this.enable(); + } + } + + public void enableAfterPlugins(final Server server) { + final boolean isPluginPreferred = isPluginPreferred(); + final boolean isPluginEnabled = isPluginEnabled(server); + if (!isPluginPreferred || !isPluginEnabled) { + if (isPluginPreferred && !this.enabled) { + this.logger.log(Level.INFO, "The spark plugin has been preferred but was not loaded. The bundled spark profiler will enabled instead."); + } + this.enable(); + } + } + + private void enable() { + if (!this.enabled) { + if (GlobalConfiguration.get().spark.enabled) { + this.enabled = true; + this.spark.enable(); + } else { + if (!this.disabledInConfigurationWarningLogged) { + this.logger.log(Level.INFO, "The spark profiler will not be enabled because it is currently disabled in the configuration."); + this.disabledInConfigurationWarningLogged = true; + } + } + } + } + + public void disable() { + if (this.enabled) { + this.spark.disable(); + this.enabled = false; + } + } + + public void registerCommandBeforePlugins(final Server server) { + if (!isPluginPreferred()) { + this.registerCommand(server); + } + } + + public void registerCommandAfterPlugins(final Server server) { + if ((!isPluginPreferred() || !isPluginEnabled(server)) && server.getCommandMap().getCommand(COMMAND_NAME) == null) { + this.registerCommand(server); + } + } + + private void registerCommand(final Server server) { + server.getCommandMap().register(COMMAND_NAME, "paper", new CommandImpl(COMMAND_NAME)); + } + + public void tickStart() { + this.spark.onServerTickStart(); + } + + public void tickEnd(final double duration) { + this.spark.onServerTickEnd(duration); + } + + void executeCommand(final CommandSender sender, final String[] args) { + this.spark.executeCommand(sender, args); + } + + List tabComplete(final CommandSender sender, final String[] args) { + return this.spark.tabComplete(sender, args); + } + + public static boolean isPluginPreferred() { + return Boolean.getBoolean(PREFER_SPARK_PLUGIN_PROPERTY); + } + + private static boolean isPluginEnabled(final Server server) { + return server.getPluginManager().isPluginEnabled(ID); + } + + private static boolean shouldEnableImmediately() { + return GlobalConfiguration.get().spark.enableImmediately; + } + + public static final class CommandImpl extends Command { + CommandImpl(final String name) { + super(name); + this.setPermission("spark"); + } + + @Override + public boolean execute(final CommandSender sender, final String commandLabel, final String[] args) { + final SparksFly spark = ((CraftServer) sender.getServer()).spark; + if (spark.enabled) { + spark.executeCommand(sender, args); + } else { + sender.sendMessage(Component.text("The spark profiler is currently disabled.", TextColor.color(SPARK_YELLOW))); + } + return true; + } + + @Override + public List tabComplete(final CommandSender sender, final String alias, final String[] args) throws IllegalArgumentException { + final SparksFly spark = ((CraftServer) sender.getServer()).spark; + if (spark.enabled) { + return spark.tabComplete(sender, args); + } + return List.of(); + } + } +} diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java index 6b8ed8a0baaf4a57d20e57cec3400af5561ddd79..48604e7f96adc9e226e034054c5e2bad0b024eb5 100644 --- a/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java +++ b/src/main/java/io/papermc/paper/plugin/provider/source/FileProviderSource.java @@ -1,6 +1,9 @@ package io.papermc.paper.plugin.provider.source; +import com.mojang.logging.LogUtils; +import io.papermc.paper.SparksFly; import io.papermc.paper.plugin.PluginInitializerManager; +import io.papermc.paper.plugin.configuration.PluginMeta; import io.papermc.paper.plugin.entrypoint.EntrypointHandler; import io.papermc.paper.plugin.provider.type.PluginFileType; import org.bukkit.plugin.InvalidPluginException; @@ -17,12 +20,14 @@ import java.nio.file.attribute.BasicFileAttributes; import java.util.Set; import java.util.function.Function; import java.util.jar.JarFile; +import org.slf4j.Logger; /** * Loads a plugin provider at the given plugin jar file path. */ public class FileProviderSource implements ProviderSource { + private static final Logger LOGGER = LogUtils.getClassLogger(); private final Function contextChecker; private final boolean applyRemap; @@ -82,6 +87,12 @@ public class FileProviderSource implements ProviderSource { ); } + final PluginMeta config = type.getConfig(file); + if ((config.getName().equals("spark") && config.getMainClass().equals("me.lucko.spark.bukkit.BukkitSparkPlugin")) && !SparksFly.isPluginPreferred()) { + LOGGER.info("The spark plugin will not be loaded as this server bundles the spark profiler."); + return; + } + type.register(entrypointHandler, file, context); } diff --git a/src/main/java/net/minecraft/server/MinecraftServer.java b/src/main/java/net/minecraft/server/MinecraftServer.java index 8160c35368fc2c52d6f4a42df27adb2ef6eb87f3..9325d6f95165a7cee00d7de736af723681cc16b4 100644 --- a/src/main/java/net/minecraft/server/MinecraftServer.java +++ b/src/main/java/net/minecraft/server/MinecraftServer.java @@ -751,6 +751,8 @@ public abstract class MinecraftServer extends ReentrantBlockableEventLoop