diff --git a/api-test/build.gradle b/api-test/build.gradle new file mode 100644 index 00000000..be4c8efd --- /dev/null +++ b/api-test/build.gradle @@ -0,0 +1,45 @@ +plugins { + id 'net.minecraftforge.gradle' version '5.1.+' +} + +tasks.withType(JavaCompile) { + // override, compile targeting J17 + options.release = 17 +} + +configurations { + library + implementation.extendsFrom library +} + +minecraft.runs.all { + lazyToken('minecraft_classpath') { + configurations.library.copyRecursive().resolve().collect { it.absolutePath }.join(File.pathSeparator) + } +} + +minecraft { + mappings channel: 'official', version: '1.19' + + runs { + client { + workingDirectory project.file('run') + property 'forge.enabledGameTestNamespaces', 'sparktest' + mods { + sparktest { + source sourceSets.main + } + } + } + } +} + +dependencies { + minecraft "net.minecraftforge:forge:${rootProject.forge_version}" + + compileOnly project(':spark-api') + compileOnly project(':spark-proto') + + library project(':spark-common') + runtimeOnly project(':spark-forge') +} \ No newline at end of file diff --git a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java new file mode 100644 index 00000000..0ccefcad --- /dev/null +++ b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java @@ -0,0 +1,171 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.test; + +import com.google.protobuf.CodedInputStream; +import com.mojang.brigadier.Command; +import com.mojang.brigadier.context.CommandContext; +import cpw.mods.modlauncher.api.LamdbaExceptionUtils; +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfiguration; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.util.ErrorHandler; +import me.lucko.spark.api.util.UploadResult; +import me.lucko.spark.proto.SparkSamplerProtos; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; +import net.minecraft.commands.CommandRuntimeException; +import net.minecraft.commands.CommandSourceStack; +import net.minecraft.commands.Commands; +import net.minecraft.network.chat.Component; +import net.minecraftforge.event.RegisterCommandsEvent; +import net.minecraftforge.event.server.ServerStoppingEvent; +import net.minecraftforge.eventbus.api.SubscribeEvent; +import net.minecraftforge.fml.common.Mod; +import net.minecraftforge.fml.loading.FMLPaths; +import net.minecraftforge.server.ServerLifecycleHooks; + +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Duration; + +import static net.minecraft.commands.Commands.literal; + +@Mod("sparktest") +@Mod.EventBusSubscriber +public class SparkTest { + + private static Profiler profiler; + private static Path savePath; + + public SparkTest() { + // Mod loading is parallel, so we're not assured that spark will be loaded before us + // As such, get the profiler once spark loads + SparkProvider.whenLoaded(spark -> profiler = spark.profiler(12) /* Request a profiler capable of managing 12 active samplers */); + + savePath = FMLPaths.GAMEDIR.get().resolve("sparktest"); + } + + @SubscribeEvent + static void serverStop(final ServerStoppingEvent event) { + profiler.stop(); + } + + @SubscribeEvent + static void registerCommand(final RegisterCommandsEvent event) { + event.getDispatcher().register(Commands.literal("sparktest") + .then(literal("test1") + .executes(throwingCommand(SparkTest::test1))) + .then(literal("test2") + .executes(throwingCommand(SparkTest::test2)))); + } + + private static void test1(CommandContext ctx) throws Exception { + final var source = ctx.getSource(); + source.sendFailure(Component.literal("Building sampler... stand by.")); + // Create the sampler + final Profiler.Sampler sampler = profiler.createSamplerThrowing(ProfilerConfiguration.builder() + .dumper(new SpecificThreadDumper(ServerLifecycleHooks.getCurrentServer().getRunningThread())) + .grouper(ThreadGrouper.BY_NAME) + .ignoreSleeping() + .samplingInterval(12) + .forceJavaSampler() + .duration(Duration.ofSeconds(20)) + .build()); + + sampler.start(); // Start the sampler + + source.sendSuccess(Component.literal("Started sampler. Please await the results in the next 20 seconds."), false); + + // Await sampler completion and execute callback once the sampler is completed + sampler.onCompleted(ReportConfiguration.builder() + .separateParentCalls(true).build()) + .whenComplete(LamdbaExceptionUtils.rethrowBiConsumer((report, t) -> { + final SamplerData data = report.data(); + source.sendSuccess(Component.literal("Profiling done. Profiled threads: " + data.getThreadsList() + .stream() + .map(SparkSamplerProtos.ThreadNode::getName) + .toList()), false); + final Path path = report.saveToFile(savePath.resolve("test1.sparkprofile")); + try (final var is = Files.newInputStream(path)) { + final SamplerData fromBytes = SparkSamplerProtos.SamplerData.parseFrom(is); + final var isEqual = data.equals(fromBytes); + if (isEqual) { + source.sendSuccess(Component.literal("Results from bytes and from memory are equal!"), false); + } else { + source.sendFailure(Component.literal("Results from bytes and from memory do not match!")); + } + } + })); + } + + private static void test2(final CommandContext context) throws Exception { + final var source = context.getSource(); + source.sendFailure(Component.literal("Building sampler... Please stand by.")); + // Create the sampler + final Profiler.Sampler sampler = profiler.createSamplerThrowing(ProfilerConfiguration.builder() + .dumper(ThreadDumper.ALL) + .grouper(ThreadGrouper.AS_ONE) + .ignoreNative() + .build()); + + sampler.start(); // Start the profiler + source.sendSuccess(Component.literal("Profiler started..."), true); + Thread.sleep(1000 * 5); // Wait 5 seconds + sampler.stop(); // Stop the profiler + + // Dump the report + final ProfilerReport report = sampler.dumpReport(ReportConfiguration.onlySender("My test")); + final Path saveFile = report.saveToFile(savePath.resolve("test2.sparkprofile")); // Save the report + final UploadResult uploadResult = report.upload(); + try (final var localIs = Files.newInputStream(saveFile); + final var onlineIs = URI.create(uploadResult.getBytebinUrl()).toURL().openStream()) { + final SamplerData data = report.data(); + final CodedInputStream localCd = CodedInputStream.newInstance(localIs); + localCd.setRecursionLimit(Integer.MAX_VALUE); + final SamplerData fromLocal = SamplerData.parseFrom(localCd); + final CodedInputStream onlineCd = CodedInputStream.newInstance(onlineIs); + onlineCd.setRecursionLimit(Integer.MAX_VALUE); + final SamplerData fromOnline = SamplerData.parseFrom(onlineCd); + if (data.equals(fromLocal) && fromLocal.equals(fromOnline)) { + source.sendSuccess(Component.literal("Results from local file, memory and Bytebin are equal!"), false); + } else { + source.sendFailure(Component.literal("Results do not match!")); + } + } + } + + private static Command throwingCommand(LamdbaExceptionUtils.Consumer_WithExceptions, Exception> consumer) { + return ctx -> { + try { + consumer.accept(ctx); + return 1; + } catch (Exception e) { + throw new CommandRuntimeException(Component.literal(e.toString())); + } + }; + } +} diff --git a/api-test/src/main/resources/META-INF/mods.toml b/api-test/src/main/resources/META-INF/mods.toml new file mode 100644 index 00000000..ed4d7b61 --- /dev/null +++ b/api-test/src/main/resources/META-INF/mods.toml @@ -0,0 +1,10 @@ +modLoader="javafml" +loaderVersion="[34,)" +authors="Luck" +license="GPLv3" + +[[mods]] +modId="sparktest" +displayName="sparktest" +version="1.0.0" +description="Spark testing" \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 1c3cd0da..92c9098e 100644 --- a/gradle.properties +++ b/gradle.properties @@ -2,4 +2,8 @@ org.gradle.jvmargs=-Xmx2G org.gradle.parallel=true # thanks, forge -org.gradle.daemon=false \ No newline at end of file +org.gradle.daemon=false + +forge_version=1.19.2-43.0.0 + +api_version=1.0.0 diff --git a/settings.gradle b/settings.gradle index 5dd95828..7eaa0e55 100644 --- a/settings.gradle +++ b/settings.gradle @@ -4,12 +4,17 @@ pluginManagement { name = 'Fabric' url = 'https://maven.fabricmc.net/' } + maven { + name = 'Forge' + url = "https://maven.minecraftforge.net" + } gradlePluginPortal() } } rootProject.name = 'spark' include ( + 'spark-proto', 'spark-api', 'spark-common', 'spark-bukkit', @@ -22,5 +27,8 @@ include ( 'spark-fabric', 'spark-nukkit', 'spark-waterdog', - 'spark-minestom' + 'spark-minestom', + + // A Forge project for testing the API + 'api-test' ) diff --git a/spark-api/build.gradle b/spark-api/build.gradle index 0fbe9e1a..4b21a1b1 100644 --- a/spark-api/build.gradle +++ b/spark-api/build.gradle @@ -2,27 +2,47 @@ plugins { id 'maven-publish' } -version = '0.1-SNAPSHOT' +version = api_version +group = 'me.lucko.spark' +archivesBaseName = 'api' dependencies { - compileOnly 'org.checkerframework:checker-qual:3.8.0' - compileOnly 'org.jetbrains:annotations:20.1.0' + api project(':spark-proto') + compileOnly 'org.jetbrains:annotations:23.0.0' + compileOnly 'org.checkerframework:checker-qual:3.22.1' + compileOnly 'com.google.errorprone:error_prone_annotations:2.6.0' } license { header = project.file('HEADER.txt') } +java.withSourcesJar() +java.withJavadocJar() + +jar { + from file('LICENSE.txt') + manifest.attributes([ + "Specification-Title" : 'SparkAPI', + "Specification-Vendor" : 'Lucko', + "Specification-Version" : '1', // We are version 1 of ourselves + "Implementation-Title" : 'SparkAPI', + "Implementation-Version" : api_version, + "Implementation-Vendor" : 'Lucko', + "Implementation-Timestamp": new Date().format("yyyy-MM-dd'T'HH:mm:ssZ") + ]) +} + publishing { - //repositories { - // maven { - // url = 'https://oss.sonatype.org/content/repositories/snapshots' - // credentials { - // username = sonatypeUsername - // password = sonatypePassword - // } - // } - //} + repositories { + maven { + url = 'https://oss.sonatype.org/content/repositories/snapshots' + credentials { + username = findProperty('sonatypeUsername') ?: '' + password = findProperty('sonatypePassword') ?: '' + } + } + } publications { mavenJava(MavenPublication) { from components.java @@ -33,4 +53,4 @@ publishing { } } } -} +} \ No newline at end of file diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index 653eb536..49a061f5 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -26,12 +26,19 @@ package me.lucko.spark.api; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.heap.HeapAnalysis; +import me.lucko.spark.api.ping.PingStatistics; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; - +import me.lucko.spark.api.util.StreamSupplier; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata.DataAggregator; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.Unmodifiable; import java.util.Map; @@ -42,9 +49,20 @@ /** * The spark API. + * @see #get() */ public interface Spark { + /** + * Gets the singleton spark API instance. + * + * @return the spark API instance + * @see SparkProvider#get() + */ + static @NonNull Spark get() { + return SparkProvider.get(); + } + /** * Gets the CPU usage statistic for the current process. * @@ -84,4 +102,53 @@ public interface Spark { */ @NonNull @Unmodifiable Map gc(); -} + /** + * Creates a thread finder. + * + * @return a thread finder + */ + @NonNull StreamSupplier threadFinder(); + + /** + * Creates a new {@link ProfilerConfigurationBuilder profiler configuration builder}. + * + * @return the builder + */ + @NonNull ProfilerConfigurationBuilder configurationBuilder(); + + /** + * Creates a new {@link Profiler profiler}. + * + * @param maxSamplers the maximum amount of active samplers the profiler can manage + * @return the profiler + * @throws IllegalArgumentException if {@code maxSamplers <= 0} + */ + @NonNull Profiler profiler(int maxSamplers); + + /** + * Gets a {@link HeapAnalysis} instance. + * + * @return the heap analysis instance + */ + @NonNull HeapAnalysis heapAnalysis(); + + /** + * Gets a {@link PingStatistics} instance. + * + * @return the ping statistics instance, or {@code null} if the platform cannot provide that info + */ + @Nullable PingStatistics ping(); + + /** + * Gets the {@link ThreadGrouper} associated with a Proto {@link DataAggregator.ThreadGrouper}. + * + * @param type the Proto type + * @return the grouper + * @see ThreadGrouper#BY_POOL + * @see ThreadGrouper#BY_NAME + * @see ThreadGrouper#AS_ONE + * @throws AssertionError if the type is {@link DataAggregator.ThreadGrouper#UNRECOGNIZED unknown}. + */ + @ApiStatus.Internal + @NonNull ThreadGrouper grouper(DataAggregator.ThreadGrouper type); +} \ No newline at end of file diff --git a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java index c8a5520d..91ea648a 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java +++ b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java @@ -27,11 +27,18 @@ import org.checkerframework.checker.nullness.qual.NonNull; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; + /** * Singleton provider for {@link Spark}. */ +@SuppressWarnings("unused") public final class SparkProvider { + private static final List> WHEN_LOADED = new CopyOnWriteArrayList<>(); + private static final List WHEN_UNLOADED = new CopyOnWriteArrayList<>(); private static Spark instance; /** @@ -47,8 +54,34 @@ public final class SparkProvider { return instance; } + /** + * Registers a listener called when spark is loaded. + * + * @param listener the listener + */ + public static void whenLoaded(Consumer listener) { + WHEN_LOADED.add(listener); + } + + /** + * Registers a listener called when spark is unloaded. + * + * @param listener the listener + */ + public static void whenUnloaded(Runnable listener) { + WHEN_UNLOADED.add(listener); + } + static void set(Spark impl) { SparkProvider.instance = impl; + // If null, we are unregistered + if (impl == null) { + WHEN_UNLOADED.forEach(Runnable::run); + } + // If non-null we are registered + else { + WHEN_LOADED.forEach(cons -> cons.accept(impl)); + } } private SparkProvider() { diff --git a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapAnalysis.java b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapAnalysis.java new file mode 100644 index 00000000..c8f9e92f --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapAnalysis.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.heap; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.api.Spark; +import me.lucko.spark.api.util.Sender; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.nio.file.Path; + +/** + * Utility interface used for heap analysis. + * + * @see Spark#heapAnalysis() + */ +public interface HeapAnalysis { + + /** + * Creates a summary of the heap. + * + * @param sender the sender of the report + * @return the report + */ + @NotNull + HeapSummaryReport summary(@Nullable Sender sender); + + /** + * Creates a heap dump at the given output path. + * + * @param outputPath the path to write the snapshot to + * @param liveOnly if true dump only live objects i.e. objects that are reachable from others + */ + @NotNull + @CanIgnoreReturnValue + Path dumpHeap(Path outputPath, boolean liveOnly) throws Exception; +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java new file mode 100644 index 00000000..615b909e --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.heap; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.api.util.Sender; +import me.lucko.spark.api.util.UploadResult; +import me.lucko.spark.proto.SparkHeapProtos; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Represents the result of a heap summary. + * + * @see HeapAnalysis#summary(Sender) + */ +public interface HeapSummaryReport { + /** + * Uploads this report online. + * + * @return the result of the upload + */ + @NotNull + UploadResult upload() throws IOException; + + /** + * Gets the data of this report + * + * @return the data + */ + @NotNull + SparkHeapProtos.HeapData data(); + + /** + * Saves this report to a local file. + * + * @param path the path to save to + * @return the {@code path} + * @throws IOException if an exception occurred + */ + @NotNull + @CanIgnoreReturnValue + Path saveToFile(Path path) throws IOException; +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java b/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java new file mode 100644 index 00000000..8e61e35f --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java @@ -0,0 +1,63 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.ping; + +import me.lucko.spark.api.Spark; +import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.NotNull; + +/** + * Utility interface used for ping analysis. + * + * @see Spark#ping() + */ +public interface PingStatistics { + /** + * Queries a summary of current player pings. + * + * @return a summary of current pings + */ + @NotNull + PingSummary getSummary(); + + /** + * Gets the ping average. + * + * @return the average + */ + @NotNull + DoubleAverageInfo getAverage(); + + /** + * Queries the ping of a given player. + * + * @param playerName the name of the player + * @return the ping, if available + */ + @Nullable + PlayerPing query(String playerName); +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java b/spark-api/src/main/java/me/lucko/spark/api/ping/PingSummary.java similarity index 53% rename from spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java rename to spark-api/src/main/java/me/lucko/spark/api/ping/PingSummary.java index 024d27d9..7d3723a4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java +++ b/spark-api/src/main/java/me/lucko/spark/api/ping/PingSummary.java @@ -1,24 +1,29 @@ /* - * This file is part of spark. + * This file is part of spark, licensed under the MIT License. * * Copyright (c) lucko (Luck) * Copyright (c) contributors * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. */ -package me.lucko.spark.common.monitor.ping; +package me.lucko.spark.api.ping; import java.util.Arrays; diff --git a/spark-api/src/main/java/me/lucko/spark/api/ping/PlayerPing.java b/spark-api/src/main/java/me/lucko/spark/api/ping/PlayerPing.java new file mode 100644 index 00000000..3c1027d8 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/ping/PlayerPing.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.ping; + +import java.util.Objects; + +public final class PlayerPing { + private final String name; + private final int ping; + + public PlayerPing(String name, int ping) { + this.name = name; + this.ping = ping; + } + + public String name() { + return this.name; + } + + public int ping() { + return this.ping; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PlayerPing that = (PlayerPing) o; + return ping == that.ping && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name, ping); + } + + @Override + public String toString() { + return "PlayerPing{" + + "name='" + name + '\'' + + ", ping=" + ping + + '}'; + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java new file mode 100644 index 00000000..14649f4a --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -0,0 +1,193 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler; + +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.util.ErrorHandler; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.jetbrains.annotations.Unmodifiable; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +/** + * The base interface of profilers.
+ * Profilers monitor the activity of the JVM, using {@link Sampler samplers}. + * + * @see me.lucko.spark.api.Spark#profiler(int) + */ +public interface Profiler { + /** + * Generates a new {@link Sampler}.
+ * Note: the sampler is not started by default, use {@link Sampler#start()}.
+ * This method is thread-safe.

+ * + * Contracts: + *
    + *
  • If this method returns {@code null}, then the {@code errorReporter} must have an error reported. Moreover, the error will be reported before the method finishes its execution.
  • + *
+ * + * @param configuration the configuration to use for the profiler + * @param errorReporter a consumer that reports any errors encountered in the creation of the sampler + * @return the sampler, or if a validation error was caught, {@code null} + */ + @Nullable + Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler errorReporter); + + /** + * Generates a new {@link Sampler}, throwing a {@link CreationException} if an error occurred creating the sampler. + * @see #createSampler(ProfilerConfiguration, ErrorHandler) + * @param configuration the configuration to use for the profiler + * @return the sampler + */ + @NotNull + @SuppressWarnings("RedundantThrows") + default Sampler createSamplerThrowing(ProfilerConfiguration configuration) throws CreationException { + // noinspection ConstantConditions Exception will be thrown before method finishes execution + return createSampler(configuration, ErrorHandler.throwing(CreationException::new)); + } + + /** + * Gets the active samplers of this profiler. + * + * @return the active samplers + */ + @Unmodifiable + List activeSamplers(); + + /** + * Gets the maximum amount of samplers managed by this profiler. + * + * @return the maximum amount of samplers + */ + int maxSamplers(); + + /** + * Stops this profiler and any {@link #activeSamplers() active children}.
+ * Note that {@link Sampler#onCompleted() completion callbacks} will not be completed. + * + * @see Sampler#stop() + */ + void stop(); + + /** + * Represents a sampler used for profiling. + */ + interface Sampler { + /** + * The minimum amount of seconds a sampler may run for. + */ + int MINIMUM_DURATION = 10; + + /** + * Starts the sampler. + */ + void start(); + + /** + * Stops the sampler.
+ * Note that {@link #onCompleted() completion callbacks} will not be completed. + */ + void stop(); + + /** + * Gets the time when the sampler started (unix timestamp in millis) + * + * @return the start time + */ + long getStartTime(); + + /** + * Gets the time when the sampler should automatically stop (unix timestamp in millis) + * + * @return the end time, or -1 if undefined + */ + long getAutoEndTime(); + + /** + * Gets a future that encapsulates the completion of the sampler, containing the report.
+ * Note: this future will not be completed unless this sampler is configured to automatically stop. + * + * @param configuration the configuration to use for generating the report + * @return a future + * @see #onCompleted() + */ + CompletableFuture onCompleted(ReportConfiguration configuration); + + /** + * Gets a future that encapsulates the completion of the sampler, containing the sampler. + * Note: this future will not be completed unless this sampler is configured to automatically stop. + * + * @return a future + * @see #onCompleted(ReportConfiguration) + */ + CompletableFuture onCompleted(); + + /** + * Dumps the report of the sampler.
+ * Note: make sure to {@link #stop() stop} the sampler before generating the report. + * + * @param configuration the configuration to use for generating the report + * @return the report of the sampler + */ + ProfilerReport dumpReport(ReportConfiguration configuration); + + /** + * Checks if this sampler is an async sampler. + * + * @return if this sampler is an async sampler + */ + boolean isAsync(); + } + + /** + * Represents an exception that may occur during the creation of a {@link Sampler}. + * @see #createSamplerThrowing(ProfilerConfiguration) + */ + class CreationException extends Exception { + private final ErrorHandler.ErrorType type; + + public CreationException(ErrorHandler.ErrorType type, String message) { + super(message); + this.type = type; + } + + @Override + public String getMessage() { + return type + ": " + super.getMessage(); + } + + /** + * Gets the type of this exception. + * @return the type of this exception + */ + public ErrorHandler.ErrorType getType() { + return type; + } + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java new file mode 100644 index 00000000..b9cba658 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java @@ -0,0 +1,109 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import org.jetbrains.annotations.Nullable; + +import java.time.Duration; + +/** + * Configuration for {@link Profiler profilers}. + */ +public interface ProfilerConfiguration { + static ProfilerConfigurationBuilder builder() { + return SparkProvider.get().configurationBuilder(); + } + + /** + * The default profiler configuration + */ + ProfilerConfiguration DEFAULT = builder().build(); + + /** + * Get the interval (in millis) of when the profiler should take samples. + * + * @return the sample interval + */ + double getInterval(); + + /** + * Get if sleeping threads should be ignored. + * + * @return if sleeping threads are ignored + */ + boolean ignoreSleeping(); + + /** + * Get if native threads should be ignored. + * + * @return if native threads are ignored + */ + boolean ignoreNative(); + + /** + * Get if the native Java sampler should be used. + * + * @return if the native Java sampler is used + */ + boolean forceJavaSampler(); + + /** + * Minimum duration (in millis) a tick has to take in order to be recorded. + * If this value is below 0, all ticks will be recorded. + * + * @return the minimum tick duration + */ + int getMinimumTickDuration(); + + /** + * Get how long the profiler should run, if the duration is null, the profiler runs indefinite. + * + * @return duration of the profile or null if indefinite + */ + @Nullable + Duration getDuration(); + + /** + * Get the choice of which dumper to use (i.e. ALL, Regex or Specific). + * If no dumper is defined, ALL is used. + * + * @return the thread dumper choice + */ + @Nullable + ThreadDumper getDumper(); + + /** + * Get the choice of which thread grouper ({@link ThreadGrouper#AS_ONE}, {@link ThreadGrouper#BY_NAME}, {@link ThreadGrouper#BY_POOL}) to use for this profiler. + * If the grouper is null, BY_POOL is used. + * + * @return the thread grouper choice + */ + @Nullable + ThreadGrouper getGrouper(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java new file mode 100644 index 00000000..ba464eae --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java @@ -0,0 +1,115 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import com.google.errorprone.annotations.CheckReturnValue; +import me.lucko.spark.api.Spark; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import org.jetbrains.annotations.Nullable; + +import java.time.Duration; + +/** + * A builder for {@link ProfilerConfiguration profiler configurations}. + * + * @see Spark#configurationBuilder() + */ +@CanIgnoreReturnValue +@SuppressWarnings("UnusedReturnValue") +public interface ProfilerConfigurationBuilder { + /** + * Set the sampling interval to a given value or 4 if value is below 0.
+ * Note: the interval is in milliseconds + * + * @param samplingInterval the interval + * @return the builder instance + */ + ProfilerConfigurationBuilder samplingInterval(double samplingInterval); + + /** + * Sets the duration of the profiler. + * + * @param duration the duration + * @return the builder instance + */ + ProfilerConfigurationBuilder duration(Duration duration); + + /** + * Set the minimum tick duration that will be profiled. + * If the minimumTickDuration is lower than 0 (default is -1), all ticks will be recorded. + * + * @param minimumTickDuration the minimum tick duration + * @return the builder instance + */ + ProfilerConfigurationBuilder minimumTickDuration(int minimumTickDuration); + + /** + * Set the {@link ThreadGrouper grouper} used to sort the report. + * + * @param threadGrouper the grouper + * @return the builder instance + */ + ProfilerConfigurationBuilder grouper(@Nullable ThreadGrouper threadGrouper); + + /** + * Set the {@link ThreadDumper dumper} used to generate the report. + * + * @param threadDumper the dumper + * @return the builder instance + */ + ProfilerConfigurationBuilder dumper(@Nullable ThreadDumper threadDumper); + + /** + * Makes the configuration to ignore sleeping threads. + * + * @return the builder instance + */ + ProfilerConfigurationBuilder ignoreSleeping(); + + /** + * Makes the configuration to ignore native threads. + * + * @return the builder instance + */ + ProfilerConfigurationBuilder ignoreNative(); + + /** + * Forces the configuration to use a non-async java sampler. + * + * @return the builder instance + */ + ProfilerConfigurationBuilder forceJavaSampler(); + + /** + * Builds the configuration. + * + * @return the built configuration + */ + @CheckReturnValue + ProfilerConfiguration build(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java new file mode 100644 index 00000000..87f474e9 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java @@ -0,0 +1,60 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.dumper; + +import java.util.Objects; +import java.util.function.Supplier; + +/** + * Utility to cache the creation of a {@link ThreadDumper} targeting + * the game (server/client) thread. + */ +public final class GameThreadDumper implements Supplier { + private Supplier threadSupplier; + private SpecificThreadDumper dumper = null; + + public GameThreadDumper() { + + } + + public GameThreadDumper(Supplier threadSupplier) { + this.threadSupplier = threadSupplier; + } + + @Override + public ThreadDumper get() { + if (this.dumper == null) { + setThread(this.threadSupplier.get()); + this.threadSupplier = null; + } + + return Objects.requireNonNull(this.dumper, "dumper"); + } + + public void setThread(Thread thread) { + this.dumper = new SpecificThreadDumper(new long[]{thread.getId()}); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java new file mode 100644 index 00000000..05235859 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java @@ -0,0 +1,93 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.dumper; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.api.util.StreamSupplier; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; +import java.util.stream.Collectors; + +/** + * Implementation of {@link ThreadDumper} that generates data for a regex matched set of threads. + */ +public final class RegexThreadDumper implements ThreadDumper { + private final StreamSupplier finder = SparkProvider.get().threadFinder(); + private final Set namePatterns; + private final Map cache = new HashMap<>(); + + public RegexThreadDumper(Set namePatterns) { + this.namePatterns = namePatterns.stream() + .map(regex -> { + try { + return Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + } catch (PatternSyntaxException e) { + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + } + + @Override + public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { + return finder.get() + .filter(thread -> { + Boolean result = this.cache.get(thread.getId()); + if (result != null) { + return result; + } + + for (Pattern pattern : this.namePatterns) { + if (pattern.matcher(thread.getName()).matches()) { + this.cache.put(thread.getId(), true); + return true; + } + } + this.cache.put(thread.getId(), false); + return false; + }) + .map(thread -> threadBean.getThreadInfo(thread.getId(), Integer.MAX_VALUE)) + .filter(Objects::nonNull) + .toArray(ThreadInfo[]::new); + } + + @Override + public SparkSamplerProtos.SamplerMetadata.ThreadDumper getMetadata() { + return SparkSamplerProtos.SamplerMetadata.ThreadDumper.newBuilder() + .setType(SparkSamplerProtos.SamplerMetadata.ThreadDumper.Type.REGEX) + .addAllPatterns(this.namePatterns.stream().map(Pattern::pattern).collect(Collectors.toList())) + .build(); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java new file mode 100644 index 00000000..cba018d9 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java @@ -0,0 +1,92 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.dumper; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Implementation of {@link ThreadDumper} that generates data for a specific set of threads. + */ +public final class SpecificThreadDumper implements ThreadDumper { + private final long[] ids; + private Set threads; + private Set threadNamesLowerCase; + + public SpecificThreadDumper(Thread thread) { + this.ids = new long[] {thread.getId()}; + } + + public SpecificThreadDumper(long[] ids) { + this.ids = ids; + } + + public SpecificThreadDumper(Set names) { + this.threadNamesLowerCase = names.stream().map(String::toLowerCase).collect(Collectors.toSet()); + this.ids = SparkProvider.get().threadFinder().get() + .filter(t -> this.threadNamesLowerCase.contains(t.getName().toLowerCase())) + .mapToLong(Thread::getId) + .toArray(); + Arrays.sort(this.ids); + } + + public Set getThreads() { + if (this.threads == null) { + this.threads = SparkProvider.get().threadFinder().get() + .filter(t -> Arrays.binarySearch(this.ids, t.getId()) >= 0) + .collect(Collectors.toSet()); + } + return this.threads; + } + + public Set getThreadNames() { + if (this.threadNamesLowerCase == null) { + this.threadNamesLowerCase = getThreads().stream() + .map(t -> t.getName().toLowerCase()) + .collect(Collectors.toSet()); + } + return this.threadNamesLowerCase; + } + + @Override + public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { + return threadBean.getThreadInfo(this.ids, Integer.MAX_VALUE); + } + + @Override + public SparkSamplerProtos.SamplerMetadata.ThreadDumper getMetadata() { + return SparkSamplerProtos.SamplerMetadata.ThreadDumper.newBuilder() + .setType(SparkSamplerProtos.SamplerMetadata.ThreadDumper.Type.SPECIFIC) + .addAllIds(Arrays.stream(this.ids).boxed().collect(Collectors.toList())) + .build(); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java new file mode 100644 index 00000000..5e497aa7 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.dumper; + +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; + +/** + * Uses the {@link ThreadMXBean} to generate {@link ThreadInfo} instances for the threads being + * sampled. + */ +public interface ThreadDumper { + + /** + * Generates {@link ThreadInfo} data for the sampled threads. + * + * @param threadBean the thread bean instance to obtain the data from + * @return an array of generated thread info instances + */ + ThreadInfo[] dumpThreads(ThreadMXBean threadBean); + + /** + * Gets metadata about the thread dumper instance. + */ + SamplerMetadata.ThreadDumper getMetadata(); + + /** + * Implementation of {@link ThreadDumper} that generates data for all threads. + */ + ThreadDumper ALL = new ThreadDumper() { + @Override + public ThreadInfo[] dumpThreads(final ThreadMXBean threadBean) { + return threadBean.dumpAllThreads(false, false); + } + + @Override + public SamplerMetadata.ThreadDumper getMetadata() { + return SamplerMetadata.ThreadDumper.newBuilder() + .setType(SamplerMetadata.ThreadDumper.Type.ALL) + .build(); + } + }; + +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java new file mode 100644 index 00000000..8ce1dce0 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.report; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.api.util.UploadResult; +import me.lucko.spark.proto.SparkSamplerProtos; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Represents the result of a profiler. + * + * @see me.lucko.spark.api.profiler.Profiler.Sampler#dumpReport(ReportConfiguration) + * @see me.lucko.spark.api.profiler.Profiler.Sampler#onCompleted(ReportConfiguration) + */ +public interface ProfilerReport { + /** + * Uploads this report online. + * + * @return the result of the upload + */ + @NotNull + UploadResult upload() throws IOException; + + /** + * Gets the data of this report + * + * @return the data + */ + @NotNull + SparkSamplerProtos.SamplerData data(); + + /** + * Saves this report to a local file. + * + * @param path the path to save to + * @return the {@code path} + * @throws IOException if an exception occurred + */ + @NotNull + @CanIgnoreReturnValue + Path saveToFile(Path path) throws IOException; +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java new file mode 100644 index 00000000..2ea7938d --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java @@ -0,0 +1,86 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.report; + +import me.lucko.spark.api.profiler.thread.ThreadNode; +import me.lucko.spark.api.profiler.thread.ThreadOrder; +import me.lucko.spark.api.util.Sender; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Comparator; +import java.util.UUID; + +/** + * Configuration for {@link ProfilerReport reports}. + */ +public interface ReportConfiguration { + static ReportConfigurationBuilder builder() { + return new ReportConfigurationBuilder(); + } + + static ReportConfiguration onlySender(@NotNull Sender sender) { + return builder().sender(sender).build(); + } + static ReportConfiguration onlySender(@NotNull String sender) { + return onlySender(new Sender(sender, null)); + } + static ReportConfiguration onlySender(@NotNull String sender, @Nullable UUID uuid) { + return onlySender(new Sender(sender, uuid)); + } + + /** + * Gets the ordering used by the report. + * + * @return the ordering used by the report + * @see ThreadOrder + */ + Comparator getThreadOrder(); + + /** + * Gets the sender of the report + * + * @return the report's sender, or else {@code null} + */ + @Nullable + Sender getSender(); + + /** + * If the thread viewer should separate parent calls. + * + * @return if the thread viewer should separate parent calls + */ + boolean separateParentCalls(); + + /** + * Gets the comment of the report. + * + * @return the report's comment + */ + @Nullable + String getComment(); + +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java new file mode 100644 index 00000000..666ecceb --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java @@ -0,0 +1,100 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.report; + +import me.lucko.spark.api.profiler.thread.ThreadNode; +import me.lucko.spark.api.profiler.thread.ThreadOrder; +import me.lucko.spark.api.util.Sender; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Comparator; +import java.util.UUID; + +public class ReportConfigurationBuilder { + private Comparator order = ThreadOrder.BY_NAME; + private Sender sender; + private boolean separateParentCalls; + private String comment; + + /** + * Sets the order used by this builder. + * @param order the order + * @return the builder + * @see ThreadOrder + */ + public ReportConfigurationBuilder order(@NonNull Comparator order) { + this.order = order; + return this; + } + + public ReportConfigurationBuilder sender(@Nullable Sender sender) { + this.sender = sender; + return this; + } + + public ReportConfigurationBuilder sender(@NonNull String name) { + return sender(new Sender(name, null)); + } + + public ReportConfigurationBuilder sender(@NonNull String name, @Nullable UUID uuid) { + return sender(new Sender(name, uuid)); + } + + public ReportConfigurationBuilder separateParentCalls(boolean separateParentCalls) { + this.separateParentCalls = separateParentCalls; + return this; + } + + public ReportConfigurationBuilder comment(@Nullable String comment) { + this.comment = comment; + return this; + } + + public ReportConfiguration build() { + return new ReportConfiguration() { + @Override + public Comparator getThreadOrder() { + return order; + } + + @Override + public @Nullable Sender getSender() { + return sender; + } + + @Override + public boolean separateParentCalls() { + return separateParentCalls; + } + + @Override + public @Nullable String getComment() { + return comment; + } + }; + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java new file mode 100644 index 00000000..bda1ca52 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java @@ -0,0 +1,80 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.thread; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata.DataAggregator; + +/** + * Function for grouping threads together + */ +public interface ThreadGrouper { + + /** + * Implementation of {@link ThreadGrouper} that just groups by thread name. + */ + ThreadGrouper BY_NAME = SparkProvider.get().grouper(DataAggregator.ThreadGrouper.BY_NAME); + + /** + * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool + * the thread originated from. + * + *

The regex pattern used to match pools expects a digit at the end of the thread name, + * separated from the pool name with any of one or more of ' ', '-', or '#'.

+ */ + ThreadGrouper BY_POOL = SparkProvider.get().grouper(DataAggregator.ThreadGrouper.BY_POOL); + + /** + * Implementation of {@link ThreadGrouper} which groups all threads as one, under + * the name "All". + */ + ThreadGrouper AS_ONE = SparkProvider.get().grouper(DataAggregator.ThreadGrouper.AS_ONE); + + /** + * Gets the group for the given thread. + * + * @param threadId the id of the thread + * @param threadName the name of the thread + * @return the group + */ + String getGroup(long threadId, String threadName); + + /** + * Gets the label to use for a given group. + * + * @param group the group + * @return the label + */ + String getLabel(String group); + + /** + * Gets the proto equivalent of this grouper.
+ * If this is a custom grouper, use {@link DataAggregator.ThreadGrouper#UNRECOGNIZED} + * + * @return the proto equivalent + */ + DataAggregator.ThreadGrouper asProto(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java new file mode 100644 index 00000000..e3921af2 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java @@ -0,0 +1,52 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.thread; + +/** + * Represents a thread + */ +public interface ThreadNode { + /** + * Gets the label of this thread. + * + * @return the label + */ + String getLabel(); + + /** + * Gets the group of this thread. + * + * @return the group + */ + String getGroup(); + + /** + * Gets the total lifetime of this thread. + * + * @return the lifetime + */ + double getTotalTime(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java new file mode 100644 index 00000000..70d085d4 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java @@ -0,0 +1,54 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.profiler.thread; + +import java.util.Comparator; + +/** + * Methods of ordering {@link ThreadNode}s in the output data. + */ +public enum ThreadOrder implements Comparator { + + /** + * Order by the name of the thread (alphabetically) + */ + BY_NAME { + @Override + public int compare(ThreadNode o1, ThreadNode o2) { + return o1.getLabel().compareTo(o2.getLabel()); + } + }, + + /** + * Order by the time taken by the thread (most time taken first) + */ + BY_TIME { + @Override + public int compare(ThreadNode o1, ThreadNode o2) { + return -Double.compare(o1.getTotalTime(), o2.getTotalTime()); + } + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java new file mode 100644 index 00000000..ffec10f2 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java @@ -0,0 +1,112 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.util; + +import org.jetbrains.annotations.ApiStatus; +import org.jetbrains.annotations.NotNull; + +import java.util.function.BiFunction; +import java.util.function.Function; + +/** + * Interface used for reporting errors during execution of methods. + */ +@FunctionalInterface +public interface ErrorHandler { + /** + * Accepts and reports an error. + * + * @param error the error to report + * @param message a detailed message of the error + */ + void accept(ErrorType error, String message); + + /** + * Represents the type of an error. + * + * @see #accept(ErrorType, String) + */ + enum ErrorType { + /** + * Indicates that the maximum amount of active samplers the profiler can manage has been reached. + */ + MAX_AMOUNT_REACHED, + /** + * Indicates that the platform does not support tick counting. + */ + TICK_COUNTING_NOT_SUPPORTED, + /** + * Indicates that an invalid duration that the sampler should run for has been supplied. + * + * @see me.lucko.spark.api.profiler.Profiler.Sampler#MINIMUM_DURATION + */ + INVALID_DURATION, + + /** + * A more general error; indicates that an invalid argument for constructing the sampler has been provided.
+ * The message will include more information. + */ + INVALID_ARGUMENT, + + /** + * Represents an 'unknown' error type.
+ * The message will include more information. + */ + UNKNOWN + } + + /** + * Creates an {@link ErrorHandler} that throws exceptions. + * + * @param supplier a factory to use for creating the exceptions + * @param the type of the exception + * @return the handler + */ + @NotNull + static ErrorHandler throwing(@NotNull BiFunction supplier) { + return (e, msg) -> throwAsUnchecked(supplier.apply(e, msg)); + } + + /** + * Creates an {@link ErrorHandler} that throws exceptions.
+ * Note: the message passed in the {@code supplier} is obtained in the following way: + * {@code errorType + ": " + message} + * + * @param supplier a factory to use for creating the exceptions + * @param the type of the exception + * @return the handler + */ + @NotNull + static ErrorHandler throwingConcat(@NotNull Function supplier) { + return throwing((e, msg) -> supplier.apply(e + ": " + msg)); + } + + @ApiStatus.Internal + @SuppressWarnings("unchecked") + static void throwAsUnchecked(Throwable exception) throws E { + throw (E) exception; + } +} \ No newline at end of file diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java b/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java new file mode 100644 index 00000000..8b9dc7b8 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java @@ -0,0 +1,72 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.util; + +import me.lucko.spark.proto.SparkProtos; +import org.jetbrains.annotations.Nullable; + +import java.util.UUID; + +/** + * Represents a sender used for online uploading of data. + */ +public class Sender { + public final String name; + /** + * The UUID of the sender. May be {@code null} if it wasn't sent by a player. + */ + @Nullable + public final UUID uuid; + + public Sender(String name, @Nullable UUID uuid) { + this.name = name; + this.uuid = uuid; + } + public Sender(String name) { + this(name, null); + } + + /** + * Checks if this sender is a player. + * + * @return if this sender is a player + */ + public boolean isPlayer() { + return uuid != null; + } + + public SparkProtos.CommandSenderMetadata toProto() { + SparkProtos.CommandSenderMetadata.Builder proto = SparkProtos.CommandSenderMetadata.newBuilder() + .setType(isPlayer() ? SparkProtos.CommandSenderMetadata.Type.PLAYER : SparkProtos.CommandSenderMetadata.Type.OTHER) + .setName(this.name); + + if (this.uuid != null) { + proto.setUniqueId(this.uuid.toString()); + } + + return proto.build(); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java b/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java new file mode 100644 index 00000000..f654dcac --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java @@ -0,0 +1,38 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.util; + +import java.util.stream.Stream; + +/** + * A {@link java.util.function.Supplier supplier} returning a stream of the type {@code T}. + * + * @param the type of the stream + */ +@FunctionalInterface +public interface StreamSupplier { + Stream get(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/UploadResult.java b/spark-api/src/main/java/me/lucko/spark/api/util/UploadResult.java new file mode 100644 index 00000000..34572629 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/UploadResult.java @@ -0,0 +1,43 @@ +package me.lucko.spark.api.util; + +import java.util.Objects; + +public final class UploadResult { + private final String viewerUrl, bytebinUrl; + + public UploadResult(String viewerUrl, String bytebinUrl) { + this.viewerUrl = viewerUrl; + this.bytebinUrl = bytebinUrl; + } + + /** Gets the viewer URL */ + public String getViewerUrl() { + return viewerUrl; + } + + /** Gets the Bytebin URL */ + public String getBytebinUrl() { + return bytebinUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UploadResult that = (UploadResult) o; + return Objects.equals(viewerUrl, that.viewerUrl) && Objects.equals(bytebinUrl, that.bytebinUrl); + } + + @Override + public int hashCode() { + return Objects.hash(viewerUrl, bytebinUrl); + } + + @Override + public String toString() { + return "UploadResult{" + + "viewerUrl='" + viewerUrl + '\'' + + ", bytebinUrl='" + bytebinUrl + '\'' + + '}'; + } +} diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index 5737d3dc..b172e736 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -21,6 +21,7 @@ package me.lucko.spark.bukkit; import me.lucko.spark.api.Spark; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.bukkit.placeholder.SparkMVdWPlaceholders; import me.lucko.spark.bukkit.placeholder.SparkPlaceholderApi; import me.lucko.spark.common.SparkPlatform; @@ -29,7 +30,7 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.ClassSourceLookup; @@ -59,7 +60,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { @Override public void onEnable() { this.audienceFactory = BukkitAudiences.create(this); - this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread()); + this.gameThreadDumper = new SpecificThreadDumper(Thread.currentThread()); this.platform = new SparkPlatform(this); this.platform.enable(); diff --git a/spark-common/build.gradle b/spark-common/build.gradle index ce09d51e..3d417f4d 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -1,7 +1,3 @@ -plugins { - id 'com.google.protobuf' version '0.8.16' -} - license { exclude '**/sampler/async/jfr/**' } @@ -10,7 +6,6 @@ dependencies { api project(':spark-api') implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.1' implementation 'org.ow2.asm:asm:9.1' - implementation 'com.google.protobuf:protobuf-javalite:3.15.6' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' api('net.kyori:adventure-api:4.11.0') { exclude(module: 'adventure-bom') @@ -32,23 +27,5 @@ dependencies { compileOnly 'com.google.code.gson:gson:2.7' compileOnly 'com.google.guava:guava:19.0' compileOnly 'org.checkerframework:checker-qual:3.8.0' -} - -protobuf { - protoc { - if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") { - path = '/opt/homebrew/bin/protoc' - } else { - artifact = 'com.google.protobuf:protoc:3.15.6' - } - } - generateProtoTasks { - all().each { task -> - task.builtins { - java { - option 'lite' - } - } - } - } + compileOnly 'org.jetbrains:annotations:23.0.0' } diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index f92abf36..8732ac23 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -91,6 +91,7 @@ public class SparkPlatform { private final SparkPlugin plugin; private final Configuration configuration; private final String viewerUrl; + private final String bytebinUrl; private final BytebinClient bytebinClient; private final boolean disableResponseBroadcast; private final List commandModules; @@ -112,13 +113,13 @@ public SparkPlatform(SparkPlugin plugin) { this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json")); this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); - String bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/"); + this.bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/"); this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin"); this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false); this.commandModules = ImmutableList.of( - new SamplerModule(), + new SamplerModule(this), new HealthModule(), new TickMonitoringModule(), new GcMonitoringModule(), @@ -207,6 +208,10 @@ public String getViewerUrl() { return this.viewerUrl; } + public String getBytebinUrl() { + return this.bytebinUrl; + } + public BytebinClient getBytebinClient() { return this.bytebinClient; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index 1116b04c..5296f179 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -26,7 +26,7 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.ClassSourceLookup; diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 5b1ec2b9..4f382840 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -21,24 +21,38 @@ package me.lucko.spark.common.api; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.api.Spark; import me.lucko.spark.api.SparkProvider; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.heap.HeapAnalysis; +import me.lucko.spark.api.ping.PingStatistics; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; +import me.lucko.spark.api.util.StreamSupplier; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.heapdump.HeapAnalysisProvider; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; - +import me.lucko.spark.common.sampler.SamplerBuilder; +import me.lucko.spark.common.sampler.ProfilerService; +import me.lucko.spark.common.util.ThreadFinder; +import me.lucko.spark.proto.SparkSamplerProtos; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import java.lang.reflect.Method; +import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static me.lucko.spark.api.statistic.StatisticWindow.CpuUsage; import static me.lucko.spark.api.statistic.StatisticWindow.MillisPerTick; @@ -57,9 +71,11 @@ public class SparkApi implements Spark { } private final SparkPlatform platform; + private final HeapAnalysis heapAnalysis; public SparkApi(SparkPlatform platform) { this.platform = platform; + heapAnalysis = new HeapAnalysisProvider(platform); } @Override @@ -172,6 +188,115 @@ public DoubleAverageInfo poll(@NonNull MillisPerTick window) { return ImmutableMap.copyOf(map); } + @Override + public @NonNull StreamSupplier threadFinder() { + final ThreadFinder finder = new ThreadFinder(); + return finder::getThreads; + } + + @Override + public @NonNull ProfilerConfigurationBuilder configurationBuilder() { + return new SamplerBuilder(); + } + + @Override + public @NonNull Profiler profiler(int maxSamplers) { + return new ProfilerService(platform, maxSamplers); + } + + @Override + public @NonNull ThreadGrouper grouper(SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper type) { + switch (type) { + case AS_ONE: return new ThreadGrouper() { + private final Set seen = ConcurrentHashMap.newKeySet(); + + @Override + public String getGroup(long threadId, String threadName) { + this.seen.add(threadId); + return "root"; + } + + @Override + public String getLabel(String group) { + return "All (x" + this.seen.size() + ")"; + } + + @Override + public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() { + return SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper.AS_ONE; + } + }; + case BY_NAME: return new ThreadGrouper() { + @Override + public String getGroup(long threadId, String threadName) { + return threadName; + } + + @Override + public String getLabel(String group) { + return group; + } + + @Override + public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() { + return SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME; + } + }; + case BY_POOL: //noinspection EnumSwitchStatementWhichMissesCases + return new ThreadGrouper() { + private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$"); + + // thread id -> group + private final Map cache = new ConcurrentHashMap<>(); + // group -> thread ids + private final Map> seen = new ConcurrentHashMap<>(); + + @Override + public String getGroup(long threadId, String threadName) { + String cached = this.cache.get(threadId); + if (cached != null) { + return cached; + } + + Matcher matcher = this.pattern.matcher(threadName); + if (!matcher.matches()) { + return threadName; + } + + String group = matcher.group(1).trim(); + this.cache.put(threadId, group); + this.seen.computeIfAbsent(group, g -> ConcurrentHashMap.newKeySet()).add(threadId); + return group; + } + + @Override + public String getLabel(String group) { + int count = this.seen.getOrDefault(group, Collections.emptySet()).size(); + if (count == 0) { + return group; + } + return group + " (x" + count + ")"; + } + + @Override + public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() { + return SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL; + } + }; + default: throw new AssertionError("Unknown thread grouper!"); + } + } + + @Override + public @NonNull HeapAnalysis heapAnalysis() { + return heapAnalysis; + } + + @Override + public @Nullable PingStatistics ping() { + return platform.getPingStatistics(); + } + public static void register(Spark spark) { try { SINGLETON_SET_METHOD.invoke(null, spark); diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java index 16eadc8a..a34d3398 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.command.modules; +import me.lucko.spark.api.ping.PlayerPing; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; @@ -33,7 +34,7 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages; import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; -import me.lucko.spark.common.monitor.ping.PingSummary; +import me.lucko.spark.api.ping.PingSummary; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.RollingAverage; @@ -150,7 +151,7 @@ private static void ping(SparkPlatform platform, CommandSender sender, CommandRe Set players = arguments.stringFlag("player"); if (!players.isEmpty()) { for (String player : players) { - PingStatistics.PlayerPing playerPing = pingStatistics.query(player); + PlayerPing playerPing = pingStatistics.query(player); if (playerPing == null) { resp.replyPrefixed(text("Ping data is not available for '" + player + "'.")); } else { diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 5bd62a89..b70feb12 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.command.modules; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; @@ -73,6 +74,11 @@ public void registerCommands(Consumer consumer) { ); } + public static UploadResult upload(SparkPlatform platform, SparkHeapProtos.HeapData output) throws IOException { + final String key = platform.getBytebinClient().postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); + return new UploadResult(FormatUtil.getBaseDomainUrl(platform.getViewerUrl()) + key, FormatUtil.getBaseDomainUrl(platform.getBytebinUrl()) + key); + } + private static void heapSummary(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { if (arguments.boolFlag("run-gc-before")) { resp.broadcastPrefixed(text("Running garbage collector...")); @@ -90,17 +96,16 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co return; } - SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender); + SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender.asSender()); boolean saveToFile = false; if (arguments.boolFlag("save-to-file")) { saveToFile = true; } else { try { - String key = platform.getBytebinClient().postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); - String url = platform.getViewerUrl() + key; + final String url = upload(platform, output).getViewerUrl(); - resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD)); + resp.broadcastPrefixed(text("Heap dump summary output:", GOLD)); resp.broadcast(text() .content(url) .color(GRAY) diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 0a80c317..4c404220 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -21,31 +21,31 @@ package me.lucko.spark.common.command.modules; import com.google.common.collect.Iterables; - +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadOrder; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; import me.lucko.spark.common.command.CommandResponseHandler; -import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; -import me.lucko.spark.common.sampler.Sampler; +import me.lucko.spark.common.sampler.ProfilerService; import me.lucko.spark.common.sampler.SamplerBuilder; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; -import me.lucko.spark.common.sampler.ThreadNodeOrder; -import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.tick.TickHook; -import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.proto.SparkSamplerProtos; - import net.kyori.adventure.text.event.ClickEvent; import java.io.IOException; -import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; @@ -66,15 +66,15 @@ public class SamplerModule implements CommandModule { private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; - /** The sampler instance currently running, if any */ - private Sampler activeSampler = null; + private final Profiler profiler; + + public SamplerModule(SparkPlatform platform) { + profiler = new ProfilerService(platform, 1); + } @Override public void close() { - if (this.activeSampler != null) { - this.activeSampler.stop(); - this.activeSampler = null; - } + profiler.stop(); } @Override @@ -95,7 +95,7 @@ public void registerCommands(Consumer consumer) { .argumentUsage("stop --comment", "comment") .argumentUsage("stop --order-by-time", null) .argumentUsage("stop --save-to-file", null) - .executor(this::profiler) + .executor((platform, sender, resp, args) -> profiler(platform, resp, args)) .tabCompleter((platform, sender, arguments) -> { if (arguments.contains("--info") || arguments.contains("--cancel")) { return Collections.emptyList(); @@ -119,7 +119,7 @@ public void registerCommands(Consumer consumer) { ); } - private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + private void profiler(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { if (arguments.boolFlag("info")) { profilerInfo(resp); return; @@ -131,26 +131,17 @@ private void profiler(SparkPlatform platform, CommandSender sender, CommandRespo } if (arguments.boolFlag("stop") || arguments.boolFlag("upload")) { - profilerStop(platform, sender, resp, arguments); + profilerStop(platform, resp, arguments); return; } - profilerStart(platform, sender, resp, arguments); + profilerStart(platform, resp, arguments); } - private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + private void profilerStart(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { + resp.broadcastPrefixed(text("Initializing a new profiler, please wait...")); + int timeoutSeconds = arguments.intFlag("timeout"); - if (timeoutSeconds != -1 && timeoutSeconds <= 10) { - resp.replyPrefixed(text("The specified timeout is not long enough for accurate results to be formed. " + - "Please choose a value greater than 10.", RED)); - return; - } - - if (timeoutSeconds != -1 && timeoutSeconds < 30) { - resp.replyPrefixed(text("The accuracy of the output will significantly improve when the profiler is able to run for longer periods. " + - "Consider setting a timeout value over 30 seconds.")); - } - double intervalMillis = arguments.doubleFlag("interval"); if (intervalMillis <= 0) { intervalMillis = 4; @@ -169,10 +160,10 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command threadDumper = ThreadDumper.ALL; } else { if (arguments.boolFlag("regex")) { - threadDumper = new ThreadDumper.Regex(threads); + threadDumper = new RegexThreadDumper(threads); } else { // specific matches - threadDumper = new ThreadDumper.Specific(threads); + threadDumper = new SpecificThreadDumper(threads); } } @@ -186,25 +177,10 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command } int ticksOver = arguments.intFlag("only-ticks-over"); - TickHook tickHook = null; - if (ticksOver != -1) { - tickHook = platform.getTickHook(); - if (tickHook == null) { - resp.replyPrefixed(text("Tick counting is not supported!", RED)); - return; - } - } - - if (this.activeSampler != null) { - resp.replyPrefixed(text("An active profiler is already running.")); - return; - } - - resp.broadcastPrefixed(text("Initializing a new profiler, please wait...")); SamplerBuilder builder = new SamplerBuilder(); - builder.threadDumper(threadDumper); - builder.threadGrouper(threadGrouper); + builder.dumper(threadDumper); + builder.grouper(threadGrouper); if (timeoutSeconds != -1) { builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS); } @@ -213,14 +189,18 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command builder.ignoreNative(ignoreNative); builder.forceJavaSampler(forceJavaSampler); if (ticksOver != -1) { - builder.ticksOver(ticksOver, tickHook); + builder.minimumTickDuration(ticksOver); } - Sampler sampler = this.activeSampler = builder.start(platform); + final Profiler.Sampler sampler = profiler.createSampler(builder.build(), (e, msg) -> resp.replyPrefixed(text(e.toString() + ": " + msg, RED))); + if (sampler == null) // Feedback is handled in the consumer + return; + + sampler.start(); resp.broadcastPrefixed(text() .append(text("Profiler now active!", GOLD)) .append(space()) - .append(text("(" + (sampler instanceof AsyncSampler ? "async" : "built-in java") + ")", DARK_GRAY)) + .append(text("(" + (sampler.isAsync() ? "async" : "built-in java") + ")", DARK_GRAY)) .build() ); if (timeoutSeconds == -1) { @@ -229,42 +209,35 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); } - CompletableFuture future = this.activeSampler.getFuture(); + final CompletableFuture future = sampler.onCompleted(); // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { if (throwable != null) { - resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable.toString(), RED)); + resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable, RED)); throwable.printStackTrace(); } }); - // set activeSampler to null when complete. - future.whenCompleteAsync((s, throwable) -> { - if (sampler == this.activeSampler) { - this.activeSampler = null; - } - }); - // await the result if (timeoutSeconds != -1) { - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; + ThreadOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadOrder.BY_TIME : ThreadOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); - MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); - MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); + boolean sepPar = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); - future.thenAcceptAsync(s -> { + sampler.onCompleted(configuration(resp, comment, sepPar, threadOrder)).thenAcceptAsync(report -> { resp.broadcastPrefixed(text("The active profiler has completed! Uploading results...")); - handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile); + handleUpload(platform, resp, report, saveToFile); }); } } private void profilerInfo(CommandResponseHandler resp) { - if (this.activeSampler == null) { + final Profiler.Sampler active = activeSampler(); + if (active == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - long timeout = this.activeSampler.getAutoEndTime(); + long timeout = active.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); } else { @@ -272,13 +245,13 @@ private void profilerInfo(CommandResponseHandler resp) { resp.replyPrefixed(text("There is an active profiler currently running, due to timeout in " + timeoutDiff + " seconds.")); } - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + long runningTime = (System.currentTimeMillis() - active.getStartTime()) / 1000L; resp.replyPrefixed(text("It has been profiling for " + runningTime + " seconds so far.")); } } private void profilerCancel(CommandResponseHandler resp) { - if (this.activeSampler == null) { + if (activeSampler() == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { close(); @@ -286,32 +259,47 @@ private void profilerCancel(CommandResponseHandler resp) { } } - private void profilerStop(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (this.activeSampler == null) { + private void profilerStop(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { + final Profiler.Sampler sampler = activeSampler(); + if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - this.activeSampler.stop(); + sampler.stop(); resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results...")); - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; + final ThreadOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadOrder.BY_TIME : ThreadOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); - MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); - MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); + boolean sepParentCalls = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); - handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile); - this.activeSampler = null; + handleUpload(platform, resp, sampler.dumpReport(configuration(resp, comment, sepParentCalls, threadOrder)), saveToFile); } } - private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { - SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup()); + private Profiler.Sampler activeSampler() { + if (profiler.activeSamplers().isEmpty()) return null; + return profiler.activeSamplers().get(0); + } + + public static UploadResult postData(SparkPlatform platform, SparkSamplerProtos.SamplerData output) throws IOException { + final String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); + return new UploadResult(FormatUtil.getBaseDomainUrl(platform.getViewerUrl()) + key, FormatUtil.getBaseDomainUrl(platform.getBytebinUrl()) + key); + } + + private ReportConfiguration configuration(CommandResponseHandler resp, String comment, boolean separateParentCalls, ThreadOrder order) { + return ReportConfiguration.builder() + .order(order) + .comment(comment) + .separateParentCalls(separateParentCalls) + .sender(resp.sender().asSender()) + .build(); + } + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, ProfilerReport report, boolean saveToFileFlag) { boolean saveToFile = false; if (saveToFileFlag) { saveToFile = true; } else { try { - String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); - String url = platform.getViewerUrl() + key; + final String url = report.upload().getViewerUrl(); resp.broadcastPrefixed(text("Profiler results:", GOLD)); resp.broadcast(text() @@ -332,7 +320,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S if (saveToFile) { Path file = platform.resolveSaveFile("profile", "sparkprofile"); try { - Files.write(file, output.toByteArray()); + report.saveToFile(file); resp.broadcastPrefixed(text() .content("Profile written to: ") diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java index bae5ddfa..476fd827 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java @@ -24,6 +24,7 @@ import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.proto.SparkProtos.CommandSenderMetadata; import net.kyori.adventure.text.Component; @@ -44,6 +45,10 @@ default Data toData() { return new Data(getName(), getUniqueId()); } + default Sender asSender() { + return new Sender(getName(), getUniqueId()); + } + final class Data { private final String name; private final UUID uniqueId; diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java new file mode 100644 index 00000000..5f02b59b --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java @@ -0,0 +1,76 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.heapdump; + +import me.lucko.spark.api.heap.HeapAnalysis; +import me.lucko.spark.api.heap.HeapSummaryReport; +import me.lucko.spark.api.util.Sender; +import me.lucko.spark.api.util.UploadResult; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.modules.HeapAnalysisModule; +import me.lucko.spark.proto.SparkHeapProtos; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +public class HeapAnalysisProvider implements HeapAnalysis { + private final SparkPlatform platform; + + public HeapAnalysisProvider(SparkPlatform platform) { + this.platform = platform; + } + + @Override + public @NotNull HeapSummaryReport summary(Sender sender) { + final SparkHeapProtos.HeapData data = HeapDumpSummary.createNew().toProto(platform, sender); + return new HeapSummaryReport() { + UploadResult uploadResult; + + @Override + @NonNull + public UploadResult upload() throws IOException { + if (uploadResult == null) + uploadResult = HeapAnalysisModule.upload(platform, data); + return uploadResult; + } + + @NotNull + @Override + public SparkHeapProtos.HeapData data() { + return data; + } + + @Override + public @NotNull Path saveToFile(Path path) throws IOException { + return Files.write(path, data.toByteArray()); + } + }; + } + + @Override + public @NotNull Path dumpHeap(Path outputPath, boolean liveOnly) throws Exception { + HeapDump.dumpHeap(outputPath, liveOnly); + return outputPath; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index c0980e79..97d3fd7d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -20,12 +20,14 @@ package me.lucko.spark.common.heapdump; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.proto.SparkHeapProtos.HeapData; import me.lucko.spark.proto.SparkHeapProtos.HeapEntry; import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; +import org.jetbrains.annotations.Nullable; import org.objectweb.asm.Type; import java.lang.management.ManagementFactory; @@ -125,10 +127,13 @@ private HeapDumpSummary(List entries) { this.entries = entries; } - public HeapData toProto(SparkPlatform platform, CommandSender creator) { + public HeapData toProto(SparkPlatform platform, @Nullable Sender creator) { HeapMetadata.Builder metadata = HeapMetadata.newBuilder() - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toData().toProto()); + .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()); + if (creator != null) { + metadata.setCreator(creator.toProto()); + } + try { metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null)); } catch (Exception e) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java index 49fcbe1b..2138804d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java @@ -20,10 +20,14 @@ package me.lucko.spark.common.monitor.ping; +import me.lucko.spark.api.ping.PingSummary; +import me.lucko.spark.api.ping.PlayerPing; +import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.common.monitor.MonitoringExecutor; import me.lucko.spark.common.util.RollingAverage; import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.NotNull; import java.math.BigDecimal; import java.util.Map; @@ -33,7 +37,7 @@ /** * Provides statistics for player ping RTT to the server. */ -public final class PingStatistics implements Runnable, AutoCloseable { +public final class PingStatistics implements Runnable, AutoCloseable, me.lucko.spark.api.ping.PingStatistics { private static final int QUERY_RATE_SECONDS = 10; private static final int WINDOW_SIZE_SECONDS = (int) TimeUnit.MINUTES.toSeconds(15); // 900 private static final int WINDOW_SIZE = WINDOW_SIZE_SECONDS / QUERY_RATE_SECONDS; // 90 @@ -100,6 +104,16 @@ public PingSummary currentSummary() { : new PingSummary(values); } + @Override + public @NotNull PingSummary getSummary() { + return currentSummary(); + } + + @Override + public @NotNull DoubleAverageInfo getAverage() { + return getPingAverage(); + } + /** * Queries the ping of a given player. * @@ -128,22 +142,4 @@ public PingSummary currentSummary() { return null; } - public static final class PlayerPing { - private final String name; - private final int ping; - - PlayerPing(String name, int ping) { - this.name = name; - this.ping = ping; - } - - public String name() { - return this.name; - } - - public int ping() { - return this.ping; - } - } - } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 1c217dbb..b9ae7709 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -20,18 +20,33 @@ package me.lucko.spark.common.sampler; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.util.Sender; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.command.sender.CommandSender; +import me.lucko.spark.common.command.modules.SamplerModule; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; +import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.proto.SparkSamplerProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.NotNull; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -42,6 +57,9 @@ */ public abstract class AbstractSampler implements Sampler { + /** The manager associated with this sampler */ + protected final SamplerManager manager; + /** The spark platform instance */ protected final SparkPlatform platform; @@ -61,12 +79,13 @@ public abstract class AbstractSampler implements Sampler { protected final long autoEndTime; // -1 for nothing /** A future to encapsulate the completion of this sampler instance */ - protected final CompletableFuture future = new CompletableFuture<>(); + protected final CompletableFuture future = new CompletableFuture<>(); /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + protected AbstractSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + this.manager = manager; this.platform = platform; this.interval = interval; this.threadDumper = threadDumper; @@ -86,11 +105,6 @@ public long getAutoEndTime() { return this.autoEndTime; } - @Override - public CompletableFuture getFuture() { - return this.future; - } - protected void recordInitialGcStats() { this.initialGcStats = GarbageCollectorStatistics.pollStats(); } @@ -99,8 +113,14 @@ protected Map getInitialGcStats() { return this.initialGcStats; } + @Override + public void stop() { + manager.markStopped(this); + } + @Override public void start() { + manager.markStarted(this); this.startTime = System.currentTimeMillis(); TickHook tickHook = this.platform.getTickHook(); @@ -109,16 +129,67 @@ public void start() { } } - protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { + @Override + public ProfilerReport dumpReport(ReportConfiguration configuration) { + return createReport(configuration); + } + + private ProfilerReport createReport(ReportConfiguration configuration) { + final MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); + return new ProfilerReport() { + final SparkSamplerProtos.SamplerData data = toProto(platform, configuration.getSender(), configuration.getThreadOrder()::compare, configuration.getComment(), configuration.separateParentCalls() ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator), platform.createClassSourceLookup()); + + UploadResult uploadResult; + + @Override + @NonNull + public UploadResult upload() throws IOException { + if (uploadResult == null) + uploadResult = SamplerModule.postData(platform, data); + return uploadResult; + } + + @Override + @NotNull + public SparkSamplerProtos.SamplerData data() { + return data; + } + + @Override + @NotNull + public Path saveToFile(Path path) throws IOException { + if (path.getParent() != null) + Files.createDirectories(path.getParent()); + Files.deleteIfExists(path); + return Files.write(path, data.toByteArray()); + } + }; + } + + @Override + public CompletableFuture onCompleted(ReportConfiguration configuration) { + return onCompleted().thenApply(samp -> createReport(configuration)); + } + + @NonNull + @Override + public CompletableFuture onCompleted() { + return future; + } + + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, @org.jetbrains.annotations.Nullable Sender creator, @Nullable String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toData().toProto()) .setStartTime(this.startTime) .setEndTime(System.currentTimeMillis()) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(dataAggregator.getMetadata()); + if (creator != null) { + metadata.setCreator(creator.toProto()); + } + if (comment != null) { metadata.setComment(comment); } @@ -168,4 +239,9 @@ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAg proto.putAllClassSources(classSourceVisitor.getMapping()); } } + + @Override + public boolean isAsync() { + return this instanceof AsyncSampler; + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java new file mode 100644 index 00000000..65d982b6 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -0,0 +1,141 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import com.google.common.collect.Lists; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfiguration; +import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; +import me.lucko.spark.api.util.ErrorHandler; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.sampler.async.AsyncProfilerAccess; +import me.lucko.spark.common.sampler.async.AsyncSampler; +import me.lucko.spark.common.sampler.java.JavaSampler; +import me.lucko.spark.common.tick.TickHook; +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +import static me.lucko.spark.api.profiler.Profiler.Sampler.MINIMUM_DURATION; + +public class ProfilerService implements Profiler, SamplerManager { + private final SparkPlatform platform; + + private final int maxSamplers; + private final List active; + private final List activeView; + + public ProfilerService(SparkPlatform platform, int samplerAmount) { + if (samplerAmount <= 0) + throw new IllegalArgumentException("samplerAmount <= 0"); + + this.platform = platform; + this.maxSamplers = samplerAmount; + this.active = new CopyOnWriteArrayList<>(); + this.activeView = Collections.unmodifiableList(active); + } + + @Override + public Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler err) { + if (active.size() >= maxSamplers) { + if (maxSamplers == 1) { + err.accept(ErrorHandler.ErrorType.MAX_AMOUNT_REACHED, "A profiling sampler is already running!"); + } else { + err.accept(ErrorHandler.ErrorType.MAX_AMOUNT_REACHED, String.format("Maximum amount of %s profiling samplers are already running!", active.size())); + } + return null; + } + + Duration duration = configuration.getDuration(); + if (duration != null && duration.getSeconds() < MINIMUM_DURATION) { + err.accept(ErrorHandler.ErrorType.INVALID_DURATION, "A profiler needs to run for at least " + MINIMUM_DURATION + " seconds!"); + return null; + } + + double interval = configuration.getInterval(); + if (interval <= 0) { + err.accept(ErrorHandler.ErrorType.INVALID_ARGUMENT, "Cannot run profiler with negative interval."); + return null; + } + + TickHook hook = null; + int minimum = configuration.getMinimumTickDuration(); + if (minimum >= 0) { + hook = platform.getTickHook(); + if (hook == null) { + err.accept(ErrorHandler.ErrorType.TICK_COUNTING_NOT_SUPPORTED, "Tick counting is not supported!"); + return null; + } + } + + final int intervalMicros = (int) (interval * 1000d); + final long timeout = computeTimeout(duration); + + final me.lucko.spark.common.sampler.Sampler sampler; + if (minimum >= 1) { + sampler = new JavaSampler(this, platform, intervalMicros, configuration.getDumper(), configuration.getGrouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.getMinimumTickDuration()); + } else if (!configuration.forceJavaSampler() && !(configuration.getDumper() instanceof RegexThreadDumper) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { + sampler = new AsyncSampler(this, platform, intervalMicros, configuration.getDumper(), configuration.getGrouper(), timeout); + } else { + sampler = new JavaSampler(this, platform, intervalMicros, configuration.getDumper(), configuration.getGrouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); + } + + return sampler; + } + + @Override + public List activeSamplers() { + return activeView; + } + + @Override + public int maxSamplers() { + return maxSamplers; + } + + @Override + public void stop() { + // Copy the list of active samplers before stopping them, so we make sure we stop all of them + final List copy = Lists.newArrayList(active); + copy.forEach(Sampler::stop); + } + + private static long computeTimeout(@Nullable Duration duration) { + if (duration == null) + return -1; + return System.currentTimeMillis() + duration.toMillis(); + } + + @Override + public void markStopped(Sampler sampler) { + active.remove(sampler); + } + + @Override + public void markStarted(Sampler sampler) { + if (active.size() >= maxSamplers) + throw new ArrayIndexOutOfBoundsException("Maximum amount of active samplers has been reached!"); + active.add(sampler); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 84f2da1c..7a8e4f17 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -20,53 +20,22 @@ package me.lucko.spark.common.sampler; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import java.util.Comparator; -import java.util.concurrent.CompletableFuture; /** * Abstract superinterface for all sampler implementations. */ -public interface Sampler { - - /** - * Starts the sampler. - */ - void start(); - - /** - * Stops the sampler. - */ - void stop(); - - /** - * Gets the time when the sampler started (unix timestamp in millis) - * - * @return the start time - */ - long getStartTime(); - - /** - * Gets the time when the sampler should automatically stop (unix timestamp in millis) - * - * @return the end time, or -1 if undefined - */ - long getAutoEndTime(); - - /** - * Gets a future to encapsulate the completion of the sampler - * - * @return a future - */ - CompletableFuture getFuture(); +public interface Sampler extends Profiler.Sampler { // Methods used to export the sampler data to the web viewer. - SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); + SamplerData toProto(SparkPlatform platform, Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 88b9d919..7ef61359 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -20,36 +20,37 @@ package me.lucko.spark.common.sampler; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.sampler.async.AsyncProfilerAccess; -import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.sampler.java.JavaSampler; -import me.lucko.spark.common.tick.TickHook; - +import me.lucko.spark.api.profiler.ProfilerConfiguration; +import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.concurrent.TimeUnit; /** * Builds {@link Sampler} instances. */ @SuppressWarnings("UnusedReturnValue") -public class SamplerBuilder { +public class SamplerBuilder implements ProfilerConfigurationBuilder { private double samplingInterval = 4; // milliseconds private boolean ignoreSleeping = false; private boolean ignoreNative = false; private boolean useAsyncProfiler = true; - private long timeout = -1; + private Duration duration; private ThreadDumper threadDumper = ThreadDumper.ALL; private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; - private int ticksOver = -1; - private TickHook tickHook = null; + private int minimumTickDuration = -1; public SamplerBuilder() { } public SamplerBuilder samplingInterval(double samplingInterval) { - this.samplingInterval = samplingInterval; + this.samplingInterval = samplingInterval <= 0 ? 4 : samplingInterval; return this; } @@ -57,23 +58,41 @@ public SamplerBuilder completeAfter(long timeout, TimeUnit unit) { if (timeout <= 0) { throw new IllegalArgumentException("timeout > 0"); } - this.timeout = System.currentTimeMillis() + unit.toMillis(timeout); + this.duration = Duration.of(timeout, toChronoUnit(unit)); return this; } - public SamplerBuilder threadDumper(ThreadDumper threadDumper) { + private static ChronoUnit toChronoUnit(TimeUnit unit) { + switch (unit) { + case NANOSECONDS: return ChronoUnit.NANOS; + case MICROSECONDS: return ChronoUnit.MICROS; + case MILLISECONDS: return ChronoUnit.MILLIS; + case SECONDS: return ChronoUnit.SECONDS; + case MINUTES: return ChronoUnit.MINUTES; + case HOURS: return ChronoUnit.HOURS; + case DAYS: return ChronoUnit.DAYS; + default: throw new AssertionError(); + } + } + + @Override + public SamplerBuilder duration(Duration duration) { + return completeAfter(duration.toMillis(), TimeUnit.MILLISECONDS); + } + + public SamplerBuilder dumper(ThreadDumper threadDumper) { this.threadDumper = threadDumper; return this; } - public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) { + public SamplerBuilder grouper(ThreadGrouper threadGrouper) { this.threadGrouper = threadGrouper; return this; } - public SamplerBuilder ticksOver(int ticksOver, TickHook tickHook) { - this.ticksOver = ticksOver; - this.tickHook = tickHook; + @Override + public SamplerBuilder minimumTickDuration(int duration) { + this.minimumTickDuration = duration; return this; } @@ -82,30 +101,72 @@ public SamplerBuilder ignoreSleeping(boolean ignoreSleeping) { return this; } + @Override + public SamplerBuilder ignoreSleeping() { + return ignoreSleeping(true); + } + public SamplerBuilder ignoreNative(boolean ignoreNative) { this.ignoreNative = ignoreNative; return this; } + @Override + public SamplerBuilder ignoreNative() { + return ignoreNative(true); + } public SamplerBuilder forceJavaSampler(boolean forceJavaSampler) { this.useAsyncProfiler = !forceJavaSampler; return this; } + @Override + public SamplerBuilder forceJavaSampler() { + return forceJavaSampler(true); + } - public Sampler start(SparkPlatform platform) { - int intervalMicros = (int) (this.samplingInterval * 1000d); - - Sampler sampler; - if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); - } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); - } else { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); - } - - sampler.start(); - return sampler; + @Override + public ProfilerConfiguration build() { + return new ProfilerConfiguration() { + @Override + public double getInterval() { + return samplingInterval; + } + + @Override + public boolean ignoreSleeping() { + return ignoreSleeping; + } + + @Override + public boolean ignoreNative() { + return ignoreNative; + } + + @Override + public boolean forceJavaSampler() { + return !useAsyncProfiler; + } + + @Override + public int getMinimumTickDuration() { + return minimumTickDuration; + } + + @Override + public @Nullable Duration getDuration() { + return duration; + } + + @Override + public @Nullable ThreadDumper getDumper() { + return threadDumper; + } + + @Override + public @Nullable ThreadGrouper getGrouper() { + return threadGrouper; + } + }; } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerManager.java similarity index 52% rename from spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java rename to spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerManager.java index adcedcdb..c2175c44 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerManager.java @@ -20,33 +20,9 @@ package me.lucko.spark.common.sampler; -import me.lucko.spark.common.sampler.node.ThreadNode; - -import java.util.Comparator; - -/** - * Methods of ordering {@link ThreadNode}s in the output data. - */ -public enum ThreadNodeOrder implements Comparator { - - /** - * Order by the name of the thread (alphabetically) - */ - BY_NAME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return o1.getThreadLabel().compareTo(o2.getThreadLabel()); - } - }, - - /** - * Order by the time taken by the thread (most time taken first) - */ - BY_TIME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return -Double.compare(o1.getTotalTime(), o2.getTotalTime()); - } - } +import me.lucko.spark.api.profiler.Profiler; +public interface SamplerManager { + void markStopped(Profiler.Sampler sampler); + void markStarted(Profiler.Sampler sampler); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java deleted file mode 100644 index fd0c4133..00000000 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.sampler; - -import me.lucko.spark.common.util.ThreadFinder; -import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; - -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; -import java.util.regex.Pattern; -import java.util.regex.PatternSyntaxException; -import java.util.stream.Collectors; - -/** - * Uses the {@link ThreadMXBean} to generate {@link ThreadInfo} instances for the threads being - * sampled. - */ -public interface ThreadDumper { - - /** - * Generates {@link ThreadInfo} data for the sampled threads. - * - * @param threadBean the thread bean instance to obtain the data from - * @return an array of generated thread info instances - */ - ThreadInfo[] dumpThreads(ThreadMXBean threadBean); - - /** - * Gets metadata about the thread dumper instance. - */ - SamplerMetadata.ThreadDumper getMetadata(); - - /** - * Implementation of {@link ThreadDumper} that generates data for all threads. - */ - ThreadDumper ALL = new ThreadDumper() { - @Override - public ThreadInfo[] dumpThreads(final ThreadMXBean threadBean) { - return threadBean.dumpAllThreads(false, false); - } - - @Override - public SamplerMetadata.ThreadDumper getMetadata() { - return SamplerMetadata.ThreadDumper.newBuilder() - .setType(SamplerMetadata.ThreadDumper.Type.ALL) - .build(); - } - }; - - /** - * Utility to cache the creation of a {@link ThreadDumper} targeting - * the game (server/client) thread. - */ - final class GameThread implements Supplier { - private Supplier threadSupplier; - private Specific dumper = null; - - public GameThread() { - - } - - public GameThread(Supplier threadSupplier) { - this.threadSupplier = threadSupplier; - } - - @Override - public ThreadDumper get() { - if (this.dumper == null) { - setThread(this.threadSupplier.get()); - this.threadSupplier = null; - } - - return Objects.requireNonNull(this.dumper, "dumper"); - } - - public void setThread(Thread thread) { - this.dumper = new Specific(new long[]{thread.getId()}); - } - } - - /** - * Implementation of {@link ThreadDumper} that generates data for a specific set of threads. - */ - final class Specific implements ThreadDumper { - private final long[] ids; - private Set threads; - private Set threadNamesLowerCase; - - public Specific(Thread thread) { - this.ids = new long[]{thread.getId()}; - } - - public Specific(long[] ids) { - this.ids = ids; - } - - public Specific(Set names) { - this.threadNamesLowerCase = names.stream().map(String::toLowerCase).collect(Collectors.toSet()); - this.ids = new ThreadFinder().getThreads() - .filter(t -> this.threadNamesLowerCase.contains(t.getName().toLowerCase())) - .mapToLong(Thread::getId) - .toArray(); - Arrays.sort(this.ids); - } - - public Set getThreads() { - if (this.threads == null) { - this.threads = new ThreadFinder().getThreads() - .filter(t -> Arrays.binarySearch(this.ids, t.getId()) >= 0) - .collect(Collectors.toSet()); - } - return this.threads; - } - - public Set getThreadNames() { - if (this.threadNamesLowerCase == null) { - this.threadNamesLowerCase = getThreads().stream() - .map(t -> t.getName().toLowerCase()) - .collect(Collectors.toSet()); - } - return this.threadNamesLowerCase; - } - - @Override - public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { - return threadBean.getThreadInfo(this.ids, Integer.MAX_VALUE); - } - - @Override - public SamplerMetadata.ThreadDumper getMetadata() { - return SamplerMetadata.ThreadDumper.newBuilder() - .setType(SamplerMetadata.ThreadDumper.Type.SPECIFIC) - .addAllIds(Arrays.stream(this.ids).boxed().collect(Collectors.toList())) - .build(); - } - } - - /** - * Implementation of {@link ThreadDumper} that generates data for a regex matched set of threads. - */ - final class Regex implements ThreadDumper { - private final ThreadFinder threadFinder = new ThreadFinder(); - private final Set namePatterns; - private final Map cache = new HashMap<>(); - - public Regex(Set namePatterns) { - this.namePatterns = namePatterns.stream() - .map(regex -> { - try { - return Pattern.compile(regex, Pattern.CASE_INSENSITIVE); - } catch (PatternSyntaxException e) { - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toSet()); - } - - @Override - public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { - return this.threadFinder.getThreads() - .filter(thread -> { - Boolean result = this.cache.get(thread.getId()); - if (result != null) { - return result; - } - - for (Pattern pattern : this.namePatterns) { - if (pattern.matcher(thread.getName()).matches()) { - this.cache.put(thread.getId(), true); - return true; - } - } - this.cache.put(thread.getId(), false); - return false; - }) - .map(thread -> threadBean.getThreadInfo(thread.getId(), Integer.MAX_VALUE)) - .filter(Objects::nonNull) - .toArray(ThreadInfo[]::new); - } - - @Override - public SamplerMetadata.ThreadDumper getMetadata() { - return SamplerMetadata.ThreadDumper.newBuilder() - .setType(SamplerMetadata.ThreadDumper.Type.REGEX) - .addAllPatterns(this.namePatterns.stream().map(Pattern::pattern).collect(Collectors.toList())) - .build(); - } - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java deleted file mode 100644 index 9ad84df3..00000000 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.sampler; - -import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; - -import java.util.Collections; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Function for grouping threads together - */ -public interface ThreadGrouper { - - /** - * Implementation of {@link ThreadGrouper} that just groups by thread name. - */ - ThreadGrouper BY_NAME = new ThreadGrouper() { - @Override - public String getGroup(long threadId, String threadName) { - return threadName; - } - - @Override - public String getLabel(String group) { - return group; - } - - @Override - public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { - return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME; - } - }; - - /** - * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool - * the thread originated from. - * - *

The regex pattern used to match pools expects a digit at the end of the thread name, - * separated from the pool name with any of one or more of ' ', '-', or '#'.

- */ - ThreadGrouper BY_POOL = new ThreadGrouper() { - private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$"); - - // thread id -> group - private final Map cache = new ConcurrentHashMap<>(); - // group -> thread ids - private final Map> seen = new ConcurrentHashMap<>(); - - @Override - public String getGroup(long threadId, String threadName) { - String cached = this.cache.get(threadId); - if (cached != null) { - return cached; - } - - Matcher matcher = this.pattern.matcher(threadName); - if (!matcher.matches()) { - return threadName; - } - - String group = matcher.group(1).trim(); - this.cache.put(threadId, group); - this.seen.computeIfAbsent(group, g -> ConcurrentHashMap.newKeySet()).add(threadId); - return group; - } - - @Override - public String getLabel(String group) { - int count = this.seen.getOrDefault(group, Collections.emptySet()).size(); - if (count == 0) { - return group; - } - return group + " (x" + count + ")"; - } - - @Override - public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { - return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL; - } - }; - - /** - * Implementation of {@link ThreadGrouper} which groups all threads as one, under - * the name "All". - */ - ThreadGrouper AS_ONE = new ThreadGrouper() { - private final Set seen = ConcurrentHashMap.newKeySet(); - - @Override - public String getGroup(long threadId, String threadName) { - this.seen.add(threadId); - return "root"; - } - - @Override - public String getLabel(String group) { - return "All (x" + this.seen.size() + ")"; - } - - @Override - public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { - return SamplerMetadata.DataAggregator.ThreadGrouper.AS_ONE; - } - }; - - /** - * Gets the group for the given thread. - * - * @param threadId the id of the thread - * @param threadName the name of the thread - * @return the group - */ - String getGroup(long threadId, String threadName); - - /** - * Gets the label to use for a given group. - * - * @param group the group - * @return the label - */ - String getLabel(String group); - - SamplerMetadata.DataAggregator.ThreadGrouper asProto(); - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java index ad9dee4b..28607f8c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.aggregator; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.node.ThreadNode; import java.util.ArrayList; @@ -55,7 +55,7 @@ protected ThreadNode getNode(String group) { public List exportData() { List data = new ArrayList<>(this.threadData.values()); for (ThreadNode node : data) { - node.setThreadLabel(this.threadGrouper.getLabel(node.getThreadGroup())); + node.setThreadLabel(this.threadGrouper.getLabel(node.getGroup())); } return data; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java index 3de39432..6072664c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.async; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.AbstractDataAggregator; import me.lucko.spark.common.sampler.node.StackTraceNode; import me.lucko.spark.common.sampler.node.ThreadNode; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index d8288da7..ba76f4a6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -21,19 +21,19 @@ package me.lucko.spark.common.sampler.async; import com.google.common.util.concurrent.ThreadFactoryBuilder; - +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.SamplerManager; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.common.util.TemporaryFiles; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; - import one.profiler.AsyncProfiler; import java.io.IOException; @@ -65,8 +65,8 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for timeouts */ private ScheduledExecutorService timeoutExecutor; - public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(platform, interval, threadDumper, endTime); + public AsyncSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + super(manager, platform, interval, threadDumper, endTime); this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler(); this.dataAggregator = new AsyncDataAggregator(threadGrouper); } @@ -98,8 +98,8 @@ public void start() { throw new RuntimeException("Unable to create temporary output file", e); } - String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); - if (this.threadDumper instanceof ThreadDumper.Specific) { + String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile; + if (this.threadDumper instanceof SpecificThreadDumper) { command += ",filter"; } @@ -108,8 +108,8 @@ public void start() { throw new RuntimeException("Unexpected response: " + resp); } - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; + if (this.threadDumper instanceof SpecificThreadDumper) { + SpecificThreadDumper threadDumper = (SpecificThreadDumper) this.threadDumper; for (Thread thread : threadDumper.getThreads()) { this.profiler.addThread(thread); } @@ -144,6 +144,7 @@ private void scheduleTimeout() { */ @Override public void stop() { + super.stop(); try { this.profiler.stop(); } catch (IllegalStateException e) { @@ -160,7 +161,7 @@ public void stop() { } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); aggregateOutput(); @@ -175,8 +176,8 @@ private void aggregateOutput() { this.outputComplete = true; Predicate threadFilter; - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; + if (this.threadDumper instanceof SpecificThreadDumper) { + SpecificThreadDumper threadDumper = (SpecificThreadDumper) this.threadDumper; threadFilter = n -> threadDumper.getThreadNames().contains(n.toLowerCase()); } else { threadFilter = n -> true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java index cc530d6a..b274723c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.java; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.AbstractDataAggregator; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.StackTraceNode; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 913faee0..8b235674 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -21,12 +21,12 @@ package me.lucko.spark.common.sampler.java; import com.google.common.util.concurrent.ThreadFactoryBuilder; - +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.common.sampler.SamplerManager; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; @@ -63,13 +63,13 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(platform, interval, threadDumper, endTime); + public JavaSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { + super(manager, platform, interval, threadDumper, endTime); this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); } - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(platform, interval, threadDumper, endTime); + public JavaSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(manager, platform, interval, threadDumper, endTime); this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } @@ -81,6 +81,7 @@ public void start() { @Override public void stop() { + super.stop(); this.task.cancel(false); } @@ -124,7 +125,7 @@ public void run() { } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java index 39e21aaa..58660f68 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.java; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index e062f31a..c25688a3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.java; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index ed97443e..e1209e51 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -25,7 +25,7 @@ /** * The root of a sampling stack for a given thread / thread group. */ -public final class ThreadNode extends AbstractNode { +public final class ThreadNode extends AbstractNode implements me.lucko.spark.api.profiler.thread.ThreadNode { /** * The name of this thread / thread group @@ -41,12 +41,14 @@ public ThreadNode(String name) { this.name = name; } - public String getThreadLabel() { + @Override + public String getLabel() { return this.label != null ? this.label : this.name; } - public String getThreadGroup() { - return this.name; + @Override + public String getGroup() { + return name; } public void setThreadLabel(String label) { @@ -55,7 +57,7 @@ public void setThreadLabel(String label) { public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) { SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() - .setName(getThreadLabel()) + .setName(getLabel()) .setTime(getTotalTime()); for (StackTraceNode child : exportChildren(mergeMode)) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java index c4a3d666..9f258ea7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java @@ -62,4 +62,9 @@ public static Component formatBytes(long bytes, TextColor color, String suffix) .append(Component.text(unit)) .build(); } + + public static String getBaseDomainUrl(String input) { + if (input.endsWith("/")) return input; + return input + "/"; + } } diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index 0ef6620d..d56df078 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -28,9 +28,10 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.api.profiler.dumper.GameThreadDumper; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -59,12 +60,12 @@ public static void register(FabricSparkMod mod, MinecraftClient client) { } private final MinecraftClient minecraft; - private final ThreadDumper.GameThread gameThreadDumper; + private final GameThreadDumper gameThreadDumper; public FabricClientSparkPlugin(FabricSparkMod mod, MinecraftClient minecraft) { super(mod); this.minecraft = minecraft; - this.gameThreadDumper = new ThreadDumper.GameThread(() -> ((MinecraftClientAccessor) minecraft).getThread()); + this.gameThreadDumper = new GameThreadDumper(() -> ((MinecraftClientAccessor) minecraft).getThread()); } @Override diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index f840f5e8..4cf360ac 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -29,11 +29,12 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.fabric.api.permissions.v0.Permissions; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -69,7 +70,7 @@ public static FabricServerSparkPlugin register(FabricSparkMod mod, MinecraftServ public FabricServerSparkPlugin(FabricSparkMod mod, MinecraftServer server) { super(mod); this.server = server; - this.gameThreadDumper = new ThreadDumper.Specific(server.getThread()); + this.gameThreadDumper = new SpecificThreadDumper(server.getThread()); } @Override diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 5d6f2dce..8b6e325b 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -1,19 +1,8 @@ -buildscript { - repositories { - maven { url = "https://maven.minecraftforge.net" } - mavenCentral() - } - dependencies { - classpath group: 'net.minecraftforge.gradle', name: 'ForgeGradle', version: '5.1.+', changing: true - } -} - plugins { id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'net.minecraftforge.gradle' version '5.1.+' } -apply plugin: 'net.minecraftforge.gradle' - tasks.withType(JavaCompile) { // override, compile targeting J17 options.release = 17 @@ -30,7 +19,7 @@ configurations { } dependencies { - minecraft 'net.minecraftforge:forge:1.19.2-43.0.0' + minecraft "net.minecraftforge:forge:${rootProject.forge_version}" shade project(':spark-common') } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index a4c6bd14..ad5e9fec 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -27,9 +27,10 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -64,7 +65,7 @@ public static void register(ForgeSparkMod mod, FMLClientSetupEvent event) { public ForgeClientSparkPlugin(ForgeSparkMod mod, Minecraft minecraft) { super(mod); this.minecraft = minecraft; - this.gameThreadDumper = new ThreadDumper.Specific(minecraft.gameThread); + this.gameThreadDumper = new SpecificThreadDumper(minecraft.gameThread); } @Override diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index 1aeb2b1c..f2b038ff 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -29,11 +29,12 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -82,7 +83,7 @@ public static void register(ForgeSparkMod mod, ServerAboutToStartEvent event) { public ForgeServerSparkPlugin(ForgeSparkMod mod, MinecraftServer server) { super(mod); this.server = server; - this.gameThreadDumper = new ThreadDumper.Specific(server.getRunningThread()); + this.gameThreadDumper = new SpecificThreadDumper(server.getRunningThread()); } @Override diff --git a/spark-forge/src/main/resources/META-INF/mods.toml b/spark-forge/src/main/resources/META-INF/mods.toml index e892e24f..f610d335 100644 --- a/spark-forge/src/main/resources/META-INF/mods.toml +++ b/spark-forge/src/main/resources/META-INF/mods.toml @@ -15,3 +15,9 @@ description="${pluginDescription}" versionRange="[34,)" ordering="NONE" side="BOTH" +[[dependencies.spark]] + modId="minecraft" + mandatory=true + versionRange="[1.19,)" + ordering="NONE" + side="BOTH" \ No newline at end of file diff --git a/spark-proto/build.gradle b/spark-proto/build.gradle new file mode 100644 index 00000000..6707e7f1 --- /dev/null +++ b/spark-proto/build.gradle @@ -0,0 +1,105 @@ +plugins { + id 'maven-publish' + id 'com.google.protobuf' version '0.8.16' + id 'com.github.johnrengelman.shadow' version '7.0.0' +} + +version = api_version +group = 'me.lucko.spark' +archivesBaseName = 'proto' + +configurations { + shade + api.extendsFrom shade +} + +dependencies { + shade 'com.google.protobuf:protobuf-javalite:3.15.6' +} + +jar { + classifier 'lite' +} + +shadowJar { + configurations = [project.configurations.shade] + relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' + exclude 'module-info.class' + exclude 'META-INF/maven/**' + exclude 'META-INF/proguard/**' + + classifier '' +} + +license { + header = project.file('HEADER.txt') +} + +java.withSourcesJar() +java.withJavadocJar() + +components.java.withVariantsFromConfiguration(configurations.runtimeElements) { + skip() +} +components.java.withVariantsFromConfiguration(configurations.apiElements) { + skip() +} + +publishing { + repositories { + maven { + url = 'https://oss.sonatype.org/content/repositories/snapshots' + credentials { + username = findProperty('sonatypeUsername') ?: '' + password = findProperty('sonatypePassword') ?: '' + } + } + } + publications { + mavenJava(MavenPublication) { + artifacts = [ + shadowJar, javadocJar, sourcesJar + ] + pom { + name = 'spark' + description = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' + url = 'https://spark.lucko.me/' + } + } + } +} + +jar { + manifest.attributes([ + "Specification-Title" : 'SparkProto', + "Specification-Vendor" : 'Lucko', + "Specification-Version" : '1', // We are version 1 of ourselves + "Implementation-Title" : 'SparkProto', + "Implementation-Version" : api_version, + "Implementation-Vendor" : 'Lucko', + "Implementation-Timestamp": new Date().format("yyyy-MM-dd'T'HH:mm:ssZ") + ]) +} + +protobuf { + protoc { + if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") { + path = '/opt/homebrew/bin/protoc' + } else { + artifact = 'com.google.protobuf:protoc:3.15.6' + } + } + generateProtoTasks { + all().each { task -> + task.builtins { + java { + option 'lite' + } + } + } + } +} + +afterEvaluate { + tasks.generateProto.group = 'proto' +} \ No newline at end of file diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-proto/src/main/proto/spark/spark.proto similarity index 100% rename from spark-common/src/main/proto/spark/spark.proto rename to spark-proto/src/main/proto/spark/spark.proto diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-proto/src/main/proto/spark/spark_heap.proto similarity index 100% rename from spark-common/src/main/proto/spark/spark_heap.proto rename to spark-proto/src/main/proto/spark/spark_heap.proto diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-proto/src/main/proto/spark/spark_sampler.proto similarity index 100% rename from spark-common/src/main/proto/spark/spark_sampler.proto rename to spark-proto/src/main/proto/spark/spark_sampler.proto diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java index e6c9a041..126486c6 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java @@ -23,12 +23,13 @@ import com.google.inject.Inject; import me.lucko.spark.api.Spark; +import me.lucko.spark.api.profiler.dumper.GameThreadDumper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -73,7 +74,7 @@ public class Sponge7SparkPlugin implements SparkPlugin { private final Path configDirectory; private final SpongeExecutorService asyncExecutor; private final SpongeExecutorService syncExecutor; - private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread(); + private final GameThreadDumper gameThreadDumper = new GameThreadDumper(); private SparkPlatform platform; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java index 83b2ec2b..fe74869a 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java @@ -23,13 +23,14 @@ import com.google.common.base.Suppliers; import com.google.inject.Inject; +import me.lucko.spark.api.profiler.dumper.GameThreadDumper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -71,7 +72,7 @@ public class Sponge8SparkPlugin implements SparkPlugin { private final Path configDirectory; private final ExecutorService asyncExecutor; private final Supplier syncExecutor; - private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread(); + private final GameThreadDumper gameThreadDumper = new GameThreadDumper(); private SparkPlatform platform;