From 2b2e084901910102ec6d3b6c0a0bd33afd60fabb Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 17 Jul 2022 18:56:48 +0300 Subject: [PATCH 01/29] Initial attempt at API --- gradle.properties | 4 +- settings.gradle | 1 + spark-api/build.gradle | 30 +-- .../main/java/me/lucko/spark/api/Spark.java | 35 ++- .../me/lucko/spark/api/profiler/Profiler.java | 55 +++++ .../api/profiler/ProfilerConfiguration.java | 79 +++++++ .../ProfilerConfigurationBuilder.java | 88 ++++++++ .../api/profiler/dumper/GameThreadDumper.java | 21 ++ .../profiler/dumper/RegexThreadDumper.java | 68 ++++++ .../profiler/dumper/SpecificThreadDumper.java | 67 ++++++ .../api/profiler/dumper/ThreadDumper.java | 64 ++++++ .../api/profiler/report/ProfilerReport.java | 31 +++ .../profiler/report/ReportConfiguration.java | 58 +++++ .../report/ReportConfigurationBuilder.java | 70 ++++++ .../api/profiler/thread/ThreadGrouper.java | 75 +++++++ .../spark/api/profiler/thread/ThreadNode.java | 27 +++ .../api/profiler/thread/ThreadOrder.java | 29 +++ .../lucko/spark/api/util/StreamSupplier.java | 13 ++ .../lucko/spark/bukkit/BukkitSparkPlugin.java | 5 +- spark-common/build.gradle | 24 --- .../me/lucko/spark/common/SparkPlatform.java | 2 +- .../me/lucko/spark/common/SparkPlugin.java | 2 +- .../me/lucko/spark/common/api/SparkApi.java | 114 +++++++++- .../common/command/modules/SamplerModule.java | 133 +++++------- .../spark/common/sampler/AbstractSampler.java | 76 ++++++- .../spark/common/sampler/ProfilerService.java | 96 +++++++++ .../lucko/spark/common/sampler/Sampler.java | 13 +- .../spark/common/sampler/SamplerBuilder.java | 123 ++++++++--- .../spark/common/sampler/ThreadDumper.java | 201 ------------------ .../spark/common/sampler/ThreadGrouper.java | 148 ------------- .../spark/common/sampler/ThreadNodeOrder.java | 52 ----- .../aggregator/AbstractDataAggregator.java | 2 +- .../sampler/async/AsyncDataAggregator.java | 2 +- .../common/sampler/async/AsyncSampler.java | 15 +- .../sampler/java/JavaDataAggregator.java | 2 +- .../common/sampler/java/JavaSampler.java | 4 +- .../sampler/java/SimpleDataAggregator.java | 2 +- .../sampler/java/TickedDataAggregator.java | 2 +- .../spark/common/sampler/node/ThreadNode.java | 12 +- .../plugin/FabricClientSparkPlugin.java | 5 +- .../plugin/FabricServerSparkPlugin.java | 5 +- .../forge/plugin/ForgeClientSparkPlugin.java | 5 +- .../forge/plugin/ForgeServerSparkPlugin.java | 5 +- spark-proto/build.gradle | 61 ++++++ .../src/main/proto/spark/spark.proto | 0 .../src/main/proto/spark/spark_heap.proto | 0 .../src/main/proto/spark/spark_sampler.proto | 0 .../spark/sponge/Sponge7SparkPlugin.java | 5 +- .../spark/sponge/Sponge8SparkPlugin.java | 5 +- 49 files changed, 1340 insertions(+), 596 deletions(-) create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java delete mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java create mode 100644 spark-proto/build.gradle rename {spark-common => spark-proto}/src/main/proto/spark/spark.proto (100%) rename {spark-common => spark-proto}/src/main/proto/spark/spark_heap.proto (100%) rename {spark-common => spark-proto}/src/main/proto/spark/spark_sampler.proto (100%) diff --git a/gradle.properties b/gradle.properties index 1c3cd0da..76ae5353 100644 --- a/gradle.properties +++ b/gradle.properties @@ -2,4 +2,6 @@ org.gradle.jvmargs=-Xmx2G org.gradle.parallel=true # thanks, forge -org.gradle.daemon=false \ No newline at end of file +org.gradle.daemon=false + +api_version=0.1-SNAPSHOT \ No newline at end of file diff --git a/settings.gradle b/settings.gradle index 5dd95828..a7863270 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,6 +10,7 @@ pluginManagement { rootProject.name = 'spark' include ( + 'spark-proto', 'spark-api', 'spark-common', 'spark-bukkit', diff --git a/spark-api/build.gradle b/spark-api/build.gradle index 0fbe9e1a..5e9aca66 100644 --- a/spark-api/build.gradle +++ b/spark-api/build.gradle @@ -2,11 +2,15 @@ plugins { id 'maven-publish' } -version = '0.1-SNAPSHOT' +version = api_version +group = 'me.lucko.spark' +archivesBaseName = 'api' dependencies { - compileOnly 'org.checkerframework:checker-qual:3.8.0' - compileOnly 'org.jetbrains:annotations:20.1.0' + api project(':spark-proto') + compileOnly 'org.jetbrains:annotations:23.0.0' + compileOnly 'org.checkerframework:checker-qual:3.22.1' + compileOnly 'com.google.errorprone:error_prone_annotations:2.6.0' } license { @@ -14,15 +18,15 @@ license { } publishing { - //repositories { - // maven { - // url = 'https://oss.sonatype.org/content/repositories/snapshots' - // credentials { - // username = sonatypeUsername - // password = sonatypePassword - // } - // } - //} + repositories { + maven { + url = 'https://oss.sonatype.org/content/repositories/snapshots' + credentials { + username = findProperty('sonatypeUsername') ?: '' + password = findProperty('sonatypePassword') ?: '' + } + } + } publications { mavenJava(MavenPublication) { from components.java @@ -33,4 +37,4 @@ publishing { } } } -} +} \ No newline at end of file diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index 653eb536..266d8538 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -26,10 +26,14 @@ package me.lucko.spark.api; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; - +import me.lucko.spark.api.util.StreamSupplier; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata.DataAggregator; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.jetbrains.annotations.Unmodifiable; @@ -84,4 +88,33 @@ public interface Spark { */ @NonNull @Unmodifiable Map gc(); + /** + * Creates a thread finder. + * + * @return a thread finder + */ + @NonNull StreamSupplier threadFinder(); + + /** + * Creates a new {@link ProfilerConfigurationBuilder profiler configuration builder}. + * + * @return the builder + */ + @NonNull ProfilerConfigurationBuilder configurationBuilder(); + + /** + * Creates a new {@link Profiler profiler}. + * + * @return the profiler + */ + @NonNull Profiler profiler(); + + /** + * Gets the {@link ThreadGrouper} associated with a Proto {@link DataAggregator.ThreadGrouper}. + * + * @param type the Proto type + * @return the grouper + * @throws AssertionError if the type is {@link DataAggregator.ThreadGrouper#UNRECOGNIZED unknown}. + */ + @NonNull ThreadGrouper getGrouper(DataAggregator.ThreadGrouper type); } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java new file mode 100644 index 00000000..8d2aa04d --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -0,0 +1,55 @@ +package me.lucko.spark.api.profiler; + +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; +import org.jetbrains.annotations.Nullable; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Consumer; + +/** + * A profilers used for sampling. + */ +public interface Profiler { + /** + * Generates a new {@link Sampler}.
+ * Note: the sampler is not started by default, use {@link Sampler#start()} + * + * @param configuration the configuration to use for the profiler + * @param errorReporter a consumer that reports any errors encountered in the creation of the sampler + * @return the sampler, or if a validation error was caught, {@code null} + */ + @Nullable + Sampler create(ProfilerConfiguration configuration, Consumer errorReporter); + + /** + * Represents a sampler used for profiling. + */ + interface Sampler { + /** + * Gets a future to encapsulate the completion of the sampler, containing the report. + * + * @param configuration the configuration to use for generating the report + * @return a future + */ + CompletableFuture whenDone(ReportConfiguration configuration); + + /** + * Stops the sampler. + */ + void stop(); + + /** + * Dumps the report of the sampler. + * + * @param configuration the configuration to use for generating the report + * @return the report of the sampler + */ + ProfilerReport dumpReport(ReportConfiguration configuration); + + /** + * Starts the sampler. + */ + void start(); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java new file mode 100644 index 00000000..2e374d1e --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java @@ -0,0 +1,79 @@ +package me.lucko.spark.api.profiler; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import org.jetbrains.annotations.Nullable; + +import java.time.Duration; + +/** + * Configuration for {@link Profiler profilers}. + */ +public interface ProfilerConfiguration { + static ProfilerConfigurationBuilder builder() { + return SparkProvider.get().configurationBuilder(); + } + + /** + * Get the interval (in millis) of when the profiler should take samples. + * + * @return the sample interval + */ + double interval(); + + /** + * Get if sleeping threads should be ignored. + * + * @return if sleeping threads are ignored + */ + boolean ignoreSleeping(); + + /** + * Get if native threads should be ignored. + * + * @return if native threads are ignored + */ + boolean ignoreNative(); + + /** + * Get if the native Java sampler should be used. + * + * @return if the native Java sampler is used + */ + boolean forceJavaSampler(); + + /** + * Minimum duration (in millis) a tick has to take in order to be recorded. + * If this value is below 0, all ticks will be recorded. + * + * @return the minimum tick duration + */ + int minimumTickDuration(); + + /** + * Get how long the profiler should run, if the duration is null, the profiler runs indefinite. + * + * @return duration of the profile or null if indefinite + */ + @Nullable + Duration duration(); + + /** + * Get the choice of which dumper to use (i.e. ALL, Regex or Specific). + * If no dumper is defined, ALL is used. + * + * @return the thread dumper choice + */ + @Nullable + ThreadDumper dumper(); + + /** + * Get the choice of which thread grouper (AS_ONE, BY_NAME, BY_POOL) to use for this profiler. + * If the grouper is null, BY_POOL is used. + * + * @return the thread grouper choice + */ + @Nullable + ThreadGrouper grouper(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java new file mode 100644 index 00000000..0462bbc6 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java @@ -0,0 +1,88 @@ +package me.lucko.spark.api.profiler; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.api.Spark; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import org.jetbrains.annotations.Nullable; + +import java.time.Duration; + +/** + * A builder for {@link ProfilerConfiguration profiler configurations}. + * + * @see Spark#configurationBuilder() + */ +@CanIgnoreReturnValue +@SuppressWarnings("UnusedReturnValue") +public interface ProfilerConfigurationBuilder { + /** + * Set the sampling interval to a given value or 4 if value is below 0.
+ * Note: the interval is in milliseconds + * + * @param samplingInterval the interval + * @return the builder instance + */ + ProfilerConfigurationBuilder samplingInterval(double samplingInterval); + + /** + * Sets the duration of the profiler. + * + * @param duration the duration + * @return the builder instance + */ + ProfilerConfigurationBuilder duration(Duration duration); + + /** + * Set the minimum tick duration that will be profiled. + * If the minimumTickDuration is lower than 0 (default is -1), all ticks will be recorded. + * + * @param minimumTickDuration the minimum tick duration + * @return the builder instance + */ + ProfilerConfigurationBuilder minimumTickDuration(int minimumTickDuration); + + /** + * Set the {@link ThreadGrouper grouper} used to sort the report. + * + * @param threadGrouper the grouper + * @return the builder instance + */ + ProfilerConfigurationBuilder grouper(@Nullable ThreadGrouper threadGrouper); + + /** + * Set the {@link ThreadDumper dumper} used to generate the report. + * + * @param threadDumper the dumper + * @return the builder instance + */ + ProfilerConfigurationBuilder dumper(@Nullable ThreadDumper threadDumper); + + /** + * Makes the configuration to ignore sleeping threads. + * + * @return the builder instance + */ + ProfilerConfigurationBuilder ignoreSleeping(); + + /** + * Makes the configuration to ignore native threads. + * + * @return the builder instance + */ + ProfilerConfigurationBuilder ignoreNative(); + + /** + * Forces the configuration to use a non-async java sampler. + * + * @return the builder instance + */ + ProfilerConfigurationBuilder forceJavaSampler(); + + /** + * Builds the configuration. + * + * @return the built configuration + */ + ProfilerConfiguration build(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java new file mode 100644 index 00000000..649666cc --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java @@ -0,0 +1,21 @@ +package me.lucko.spark.api.profiler.dumper; + +import java.util.Objects; +import java.util.function.Supplier; + +/** + * Utility to cache the creation of a {@link ThreadDumper} targeting + * the game (server/client) thread. + */ +public final class GameThreadDumper implements Supplier { + private SpecificThreadDumper dumper = null; + + @Override + public ThreadDumper get() { + return Objects.requireNonNull(this.dumper, "dumper"); + } + + public void setThread(Thread thread) { + this.dumper = new SpecificThreadDumper(new long[] {thread.getId()}); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java new file mode 100644 index 00000000..8f3bf154 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java @@ -0,0 +1,68 @@ +package me.lucko.spark.api.profiler.dumper; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.api.util.StreamSupplier; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; +import java.util.stream.Collectors; + +/** + * Implementation of {@link ThreadDumper} that generates data for a regex matched set of threads. + */ +public final class RegexThreadDumper implements ThreadDumper { + private final StreamSupplier finder = SparkProvider.get().threadFinder(); + private final Set namePatterns; + private final Map cache = new HashMap<>(); + + public RegexThreadDumper(Set namePatterns) { + this.namePatterns = namePatterns.stream() + .map(regex -> { + try { + return Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + } catch (PatternSyntaxException e) { + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + } + + @Override + public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { + return finder.get() + .filter(thread -> { + Boolean result = this.cache.get(thread.getId()); + if (result != null) { + return result; + } + + for (Pattern pattern : this.namePatterns) { + if (pattern.matcher(thread.getName()).matches()) { + this.cache.put(thread.getId(), true); + return true; + } + } + this.cache.put(thread.getId(), false); + return false; + }) + .map(thread -> threadBean.getThreadInfo(thread.getId(), Integer.MAX_VALUE)) + .filter(Objects::nonNull) + .toArray(ThreadInfo[]::new); + } + + @Override + public SparkSamplerProtos.SamplerMetadata.ThreadDumper getMetadata() { + return SparkSamplerProtos.SamplerMetadata.ThreadDumper.newBuilder() + .setType(SparkSamplerProtos.SamplerMetadata.ThreadDumper.Type.REGEX) + .addAllPatterns(this.namePatterns.stream().map(Pattern::pattern).collect(Collectors.toList())) + .build(); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java new file mode 100644 index 00000000..bb49dfbe --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java @@ -0,0 +1,67 @@ +package me.lucko.spark.api.profiler.dumper; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Implementation of {@link ThreadDumper} that generates data for a specific set of threads. + */ +public final class SpecificThreadDumper implements ThreadDumper { + private final long[] ids; + private Set threads; + private Set threadNamesLowerCase; + + public SpecificThreadDumper(Thread thread) { + this.ids = new long[] {thread.getId()}; + } + + public SpecificThreadDumper(long[] ids) { + this.ids = ids; + } + + public SpecificThreadDumper(Set names) { + this.threadNamesLowerCase = names.stream().map(String::toLowerCase).collect(Collectors.toSet()); + this.ids = SparkProvider.get().threadFinder().get() + .filter(t -> this.threadNamesLowerCase.contains(t.getName().toLowerCase())) + .mapToLong(Thread::getId) + .toArray(); + Arrays.sort(this.ids); + } + + public Set getThreads() { + if (this.threads == null) { + this.threads = SparkProvider.get().threadFinder().get() + .filter(t -> Arrays.binarySearch(this.ids, t.getId()) >= 0) + .collect(Collectors.toSet()); + } + return this.threads; + } + + public Set getThreadNames() { + if (this.threadNamesLowerCase == null) { + this.threadNamesLowerCase = getThreads().stream() + .map(t -> t.getName().toLowerCase()) + .collect(Collectors.toSet()); + } + return this.threadNamesLowerCase; + } + + @Override + public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { + return threadBean.getThreadInfo(this.ids, Integer.MAX_VALUE); + } + + @Override + public SparkSamplerProtos.SamplerMetadata.ThreadDumper getMetadata() { + return SparkSamplerProtos.SamplerMetadata.ThreadDumper.newBuilder() + .setType(SparkSamplerProtos.SamplerMetadata.ThreadDumper.Type.SPECIFIC) + .addAllIds(Arrays.stream(this.ids).boxed().collect(Collectors.toList())) + .build(); + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java new file mode 100644 index 00000000..6e8f0bec --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java @@ -0,0 +1,64 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.api.profiler.dumper; + +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; + +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; + +/** + * Uses the {@link ThreadMXBean} to generate {@link ThreadInfo} instances for the threads being + * sampled. + */ +public interface ThreadDumper { + + /** + * Generates {@link ThreadInfo} data for the sampled threads. + * + * @param threadBean the thread bean instance to obtain the data from + * @return an array of generated thread info instances + */ + ThreadInfo[] dumpThreads(ThreadMXBean threadBean); + + /** + * Gets metadata about the thread dumper instance. + */ + SamplerMetadata.ThreadDumper getMetadata(); + + /** + * Implementation of {@link ThreadDumper} that generates data for all threads. + */ + ThreadDumper ALL = new ThreadDumper() { + @Override + public ThreadInfo[] dumpThreads(final ThreadMXBean threadBean) { + return threadBean.dumpAllThreads(false, false); + } + + @Override + public SamplerMetadata.ThreadDumper getMetadata() { + return SamplerMetadata.ThreadDumper.newBuilder() + .setType(SamplerMetadata.ThreadDumper.Type.ALL) + .build(); + } + }; + +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java new file mode 100644 index 00000000..6d7717bc --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java @@ -0,0 +1,31 @@ +package me.lucko.spark.api.profiler.report; + +import me.lucko.spark.proto.SparkSamplerProtos; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Represents the result of a profiler. + */ +public interface ProfilerReport { + /** + * Uploads this report online. + * @return the URL of the uploaded report + */ + String upload() throws IOException; + + /** + * Gets the data of this report + * @return the data + */ + SparkSamplerProtos.SamplerData data(); + + /** + * Saves this report to a local file. + * @param path the path to save to + * @return the {@code path} + * @throws IOException if an exception occurred + */ + Path saveToFile(Path path) throws IOException; +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java new file mode 100644 index 00000000..98d360f4 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java @@ -0,0 +1,58 @@ +package me.lucko.spark.api.profiler.report; + +import me.lucko.spark.api.profiler.thread.ThreadNode; +import me.lucko.spark.api.profiler.thread.ThreadOrder; +import org.jetbrains.annotations.Nullable; + +import java.util.Comparator; +import java.util.UUID; + +/** + * Configuration for {@link ProfilerReport reports}. + */ +public interface ReportConfiguration { + static ReportConfigurationBuilder builder() { + return new ReportConfigurationBuilder(); + } + + /** + * Gets the ordering used by the report. + * + * @return the ordering used by the report + * @see ThreadOrder + */ + Comparator threadOrder(); + + /** + * Gets the sender of the report + * + * @return the report's sender, or else {@code null} + */ + @Nullable + Sender sender(); + + /** + * If the thread viewer should separate parent calls. + * + * @return if the thread viewer should separate parent calls + */ + boolean separateParentCalls(); + + /** + * Gets the comment of the report. + * + * @return the report's comment + */ + @Nullable + String comment(); + + class Sender { + public final String name; + public final UUID uuid; + + public Sender(String name, UUID uuid) { + this.name = name; + this.uuid = uuid; + } + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java new file mode 100644 index 00000000..9d94e2b5 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java @@ -0,0 +1,70 @@ +package me.lucko.spark.api.profiler.report; + +import me.lucko.spark.api.profiler.thread.ThreadNode; +import me.lucko.spark.api.profiler.thread.ThreadOrder; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Comparator; +import java.util.UUID; + +public class ReportConfigurationBuilder { + private Comparator order = ThreadOrder.BY_NAME; + private ReportConfiguration.Sender sender; + private boolean separateParentCalls; + private String comment; + + /** + * Sets the order used by this builder. + * @param order the order + * @return the builder + * @see ThreadOrder + */ + public ReportConfigurationBuilder order(@NonNull Comparator order) { + this.order = order; + return this; + } + + public ReportConfigurationBuilder sender(@Nullable ReportConfiguration.Sender sender) { + this.sender = sender; + return this; + } + + public ReportConfigurationBuilder sender(@NonNull String name, @NonNull UUID uuid) { + return sender(new ReportConfiguration.Sender(name, uuid)); + } + + public ReportConfigurationBuilder separateParentCalls(boolean separateParentCalls) { + this.separateParentCalls = separateParentCalls; + return this; + } + + public ReportConfigurationBuilder comment(@Nullable String comment) { + this.comment = comment; + return this; + } + + public ReportConfiguration build() { + return new ReportConfiguration() { + @Override + public Comparator threadOrder() { + return order; + } + + @Override + public @Nullable Sender sender() { + return sender; + } + + @Override + public boolean separateParentCalls() { + return separateParentCalls; + } + + @Override + public @Nullable String comment() { + return comment; + } + }; + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java new file mode 100644 index 00000000..f16b6f9b --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java @@ -0,0 +1,75 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.api.profiler.thread; + +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata.DataAggregator; + +/** + * Function for grouping threads together + */ +public interface ThreadGrouper { + + /** + * Implementation of {@link ThreadGrouper} that just groups by thread name. + */ + ThreadGrouper BY_NAME = SparkProvider.get().getGrouper(DataAggregator.ThreadGrouper.BY_NAME); + + /** + * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool + * the thread originated from. + * + *

The regex pattern used to match pools expects a digit at the end of the thread name, + * separated from the pool name with any of one or more of ' ', '-', or '#'.

+ */ + ThreadGrouper BY_POOL = SparkProvider.get().getGrouper(DataAggregator.ThreadGrouper.BY_POOL); + + /** + * Implementation of {@link ThreadGrouper} which groups all threads as one, under + * the name "All". + */ + ThreadGrouper AS_ONE = SparkProvider.get().getGrouper(DataAggregator.ThreadGrouper.AS_ONE); + + /** + * Gets the group for the given thread. + * + * @param threadId the id of the thread + * @param threadName the name of the thread + * @return the group + */ + String getGroup(long threadId, String threadName); + + /** + * Gets the label to use for a given group. + * + * @param group the group + * @return the label + */ + String getLabel(String group); + + /** + * Gets the proto equivalent of this grouper.
+ * If this is a custom grouper, use {@link DataAggregator.ThreadGrouper#UNRECOGNIZED} + * + * @return the proto equivalent + */ + DataAggregator.ThreadGrouper asProto(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java new file mode 100644 index 00000000..b79ab640 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java @@ -0,0 +1,27 @@ +package me.lucko.spark.api.profiler.thread; + +/** + * Represents a thread + */ +public interface ThreadNode { + /** + * Gets the label of this thread. + * + * @return the label + */ + String getLabel(); + + /** + * Gets the group of this thread. + * + * @return the group + */ + String getGroup(); + + /** + * Gets the total lifetime of this thread. + * + * @return the lifetime + */ + double getTotalTime(); +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java new file mode 100644 index 00000000..1de25c8b --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java @@ -0,0 +1,29 @@ +package me.lucko.spark.api.profiler.thread; + +import java.util.Comparator; + +/** + * Methods of ordering {@link ThreadNode}s in the output data. + */ +public enum ThreadOrder implements Comparator { + + /** + * Order by the name of the thread (alphabetically) + */ + BY_NAME { + @Override + public int compare(ThreadNode o1, ThreadNode o2) { + return o1.getLabel().compareTo(o2.getLabel()); + } + }, + + /** + * Order by the time taken by the thread (most time taken first) + */ + BY_TIME { + @Override + public int compare(ThreadNode o1, ThreadNode o2) { + return -Double.compare(o1.getTotalTime(), o2.getTotalTime()); + } + } +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java b/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java new file mode 100644 index 00000000..6bf5de18 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java @@ -0,0 +1,13 @@ +package me.lucko.spark.api.util; + +import java.util.stream.Stream; + +/** + * A {@link java.util.function.Supplier supplier} returning a stream of the type {@code T}. + * + * @param the type of the stream + */ +@FunctionalInterface +public interface StreamSupplier { + Stream get(); +} diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index 5737d3dc..b172e736 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -21,6 +21,7 @@ package me.lucko.spark.bukkit; import me.lucko.spark.api.Spark; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.bukkit.placeholder.SparkMVdWPlaceholders; import me.lucko.spark.bukkit.placeholder.SparkPlaceholderApi; import me.lucko.spark.common.SparkPlatform; @@ -29,7 +30,7 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.ClassSourceLookup; @@ -59,7 +60,7 @@ public class BukkitSparkPlugin extends JavaPlugin implements SparkPlugin { @Override public void onEnable() { this.audienceFactory = BukkitAudiences.create(this); - this.gameThreadDumper = new ThreadDumper.Specific(Thread.currentThread()); + this.gameThreadDumper = new SpecificThreadDumper(Thread.currentThread()); this.platform = new SparkPlatform(this); this.platform.enable(); diff --git a/spark-common/build.gradle b/spark-common/build.gradle index fbd0db26..434096ba 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -1,7 +1,3 @@ -plugins { - id 'com.google.protobuf' version '0.8.16' -} - license { exclude '**/sampler/async/jfr/**' } @@ -10,7 +6,6 @@ dependencies { api project(':spark-api') implementation 'com.github.jvm-profiling-tools:async-profiler:v2.8.1' implementation 'org.ow2.asm:asm:9.1' - implementation 'com.google.protobuf:protobuf-javalite:3.15.6' implementation 'com.squareup.okhttp3:okhttp:3.14.1' implementation 'com.squareup.okio:okio:1.17.3' implementation 'net.bytebuddy:byte-buddy-agent:1.11.0' @@ -36,22 +31,3 @@ dependencies { compileOnly 'com.google.guava:guava:19.0' compileOnly 'org.checkerframework:checker-qual:3.8.0' } - -protobuf { - protoc { - if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") { - path = '/opt/homebrew/bin/protoc' - } else { - artifact = 'com.google.protobuf:protoc:3.15.6' - } - } - generateProtoTasks { - all().each { task -> - task.builtins { - java { - option 'lite' - } - } - } - } -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 0ef45563..a09c80a2 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -123,7 +123,7 @@ public SparkPlatform(SparkPlugin plugin) { this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false); this.commandModules = ImmutableList.of( - new SamplerModule(), + new SamplerModule(this), new HealthModule(), new TickMonitoringModule(), new GcMonitoringModule(), diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index 1116b04c..5296f179 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -26,7 +26,7 @@ import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.ClassSourceLookup; diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 5b1ec2b9..a5d1253b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -21,24 +21,35 @@ package me.lucko.spark.common.api; import com.google.common.collect.ImmutableMap; - import me.lucko.spark.api.Spark; import me.lucko.spark.api.SparkProvider; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.api.statistic.types.DoubleStatistic; import me.lucko.spark.api.statistic.types.GenericStatistic; +import me.lucko.spark.api.util.StreamSupplier; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; - +import me.lucko.spark.common.sampler.SamplerBuilder; +import me.lucko.spark.common.sampler.ProfilerService; +import me.lucko.spark.common.util.ThreadFinder; +import me.lucko.spark.proto.SparkSamplerProtos; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import java.lang.reflect.Method; +import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static me.lucko.spark.api.statistic.StatisticWindow.CpuUsage; import static me.lucko.spark.api.statistic.StatisticWindow.MillisPerTick; @@ -187,4 +198,103 @@ public static void unregister() { e.printStackTrace(); } } + + @Override + public @NonNull StreamSupplier threadFinder() { + final ThreadFinder finder = new ThreadFinder(); + return finder::getThreads; + } + + @Override + public @NonNull ProfilerConfigurationBuilder configurationBuilder() { + return new SamplerBuilder(); + } + + @Override + public @NonNull Profiler profiler() { + return new ProfilerService(platform); + } + + @Override + public @NonNull ThreadGrouper getGrouper(SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper type) { + switch (type) { + case AS_ONE: return new ThreadGrouper() { + private final Set seen = ConcurrentHashMap.newKeySet(); + + @Override + public String getGroup(long threadId, String threadName) { + this.seen.add(threadId); + return "root"; + } + + @Override + public String getLabel(String group) { + return "All (x" + this.seen.size() + ")"; + } + + @Override + public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() { + return SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper.AS_ONE; + } + }; + case BY_NAME: return new ThreadGrouper() { + @Override + public String getGroup(long threadId, String threadName) { + return threadName; + } + + @Override + public String getLabel(String group) { + return group; + } + + @Override + public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() { + return SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME; + } + }; + case BY_POOL: //noinspection EnumSwitchStatementWhichMissesCases + return new ThreadGrouper() { + private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$"); + + // thread id -> group + private final Map cache = new ConcurrentHashMap<>(); + // group -> thread ids + private final Map> seen = new ConcurrentHashMap<>(); + + @Override + public String getGroup(long threadId, String threadName) { + String cached = this.cache.get(threadId); + if (cached != null) { + return cached; + } + + Matcher matcher = this.pattern.matcher(threadName); + if (!matcher.matches()) { + return threadName; + } + + String group = matcher.group(1).trim(); + this.cache.put(threadId, group); + this.seen.computeIfAbsent(group, g -> ConcurrentHashMap.newKeySet()).add(threadId); + return group; + } + + @Override + public String getLabel(String group) { + int count = this.seen.getOrDefault(group, Collections.emptySet()).size(); + if (count == 0) { + return group; + } + return group + " (x" + count + ")"; + } + + @Override + public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() { + return SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL; + } + }; + default: throw new AssertionError("Unknown thread grouper!"); + } + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index fd5cd67e..d8d9ba74 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -21,7 +21,13 @@ package me.lucko.spark.common.command.modules; import com.google.common.collect.Iterables; - +import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadOrder; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; @@ -31,23 +37,16 @@ import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; +import me.lucko.spark.common.sampler.ProfilerService; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.SamplerBuilder; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; -import me.lucko.spark.common.sampler.ThreadNodeOrder; import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.sampler.node.MergeMode; -import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; - import net.kyori.adventure.text.event.ClickEvent; - import okhttp3.MediaType; import java.io.IOException; -import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; @@ -68,15 +67,15 @@ public class SamplerModule implements CommandModule { private static final MediaType SPARK_SAMPLER_MEDIA_TYPE = MediaType.parse("application/x-spark-sampler"); - /** The sampler instance currently running, if any */ - private Sampler activeSampler = null; + private final ProfilerService service; + + public SamplerModule(SparkPlatform platform) { + service = new ProfilerService(platform); + } @Override public void close() { - if (this.activeSampler != null) { - this.activeSampler.stop(); - this.activeSampler = null; - } + service.clearAndStop(); } @Override @@ -141,18 +140,9 @@ private void profiler(SparkPlatform platform, CommandSender sender, CommandRespo } private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + resp.broadcastPrefixed(text("Initializing a new profiler, please wait...")); + int timeoutSeconds = arguments.intFlag("timeout"); - if (timeoutSeconds != -1 && timeoutSeconds <= 10) { - resp.replyPrefixed(text("The specified timeout is not long enough for accurate results to be formed. " + - "Please choose a value greater than 10.", RED)); - return; - } - - if (timeoutSeconds != -1 && timeoutSeconds < 30) { - resp.replyPrefixed(text("The accuracy of the output will significantly improve when the profiler is able to run for longer periods. " + - "Consider setting a timeout value over 30 seconds.")); - } - double intervalMillis = arguments.doubleFlag("interval"); if (intervalMillis <= 0) { intervalMillis = 4; @@ -171,10 +161,10 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command threadDumper = ThreadDumper.ALL; } else { if (arguments.boolFlag("regex")) { - threadDumper = new ThreadDumper.Regex(threads); + threadDumper = new RegexThreadDumper(threads); } else { // specific matches - threadDumper = new ThreadDumper.Specific(threads); + threadDumper = new SpecificThreadDumper(threads); } } @@ -188,25 +178,10 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command } int ticksOver = arguments.intFlag("only-ticks-over"); - TickHook tickHook = null; - if (ticksOver != -1) { - tickHook = platform.getTickHook(); - if (tickHook == null) { - resp.replyPrefixed(text("Tick counting is not supported!", RED)); - return; - } - } - - if (this.activeSampler != null) { - resp.replyPrefixed(text("An active profiler is already running.")); - return; - } - - resp.broadcastPrefixed(text("Initializing a new profiler, please wait...")); SamplerBuilder builder = new SamplerBuilder(); - builder.threadDumper(threadDumper); - builder.threadGrouper(threadGrouper); + builder.dumper(threadDumper); + builder.grouper(threadGrouper); if (timeoutSeconds != -1) { builder.completeAfter(timeoutSeconds, TimeUnit.SECONDS); } @@ -215,9 +190,14 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command builder.ignoreNative(ignoreNative); builder.forceJavaSampler(forceJavaSampler); if (ticksOver != -1) { - builder.ticksOver(ticksOver, tickHook); + builder.minimumTickDuration(ticksOver); } - Sampler sampler = this.activeSampler = builder.start(platform); + final ProfilerService service = new ProfilerService(platform); + final Sampler sampler = service.create(builder.build(), e -> resp.replyPrefixed(text(e, RED))); + if (sampler == null) // Feedback is handled in the consumer + return; + + sampler.start(); resp.broadcastPrefixed(text() .append(text("Profiler now active!", GOLD)) @@ -231,42 +211,36 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); } - CompletableFuture future = this.activeSampler.getFuture(); + final CompletableFuture future = sampler.getFuture(); // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { if (throwable != null) { - resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable.toString(), RED)); + resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable, RED)); throwable.printStackTrace(); } }); - // set activeSampler to null when complete. - future.whenCompleteAsync((s, throwable) -> { - if (sampler == this.activeSampler) { - this.activeSampler = null; - } - }); - // await the result if (timeoutSeconds != -1) { - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; + ThreadOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadOrder.BY_TIME : ThreadOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); - MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); + boolean sepPar = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); future.thenAcceptAsync(s -> { resp.broadcastPrefixed(text("The active profiler has completed! Uploading results...")); - handleUpload(platform, resp, s, threadOrder, comment, mergeMode, saveToFile); + handleUpload(platform, resp, s, threadOrder, comment, sepPar, saveToFile); }); } } private void profilerInfo(CommandResponseHandler resp) { - if (this.activeSampler == null) { + final Sampler active = service.active(); + if (active == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - long timeout = this.activeSampler.getAutoEndTime(); + long timeout = active.getAutoEndTime(); if (timeout == -1) { resp.replyPrefixed(text("There is an active profiler currently running, with no defined timeout.")); } else { @@ -274,13 +248,13 @@ private void profilerInfo(CommandResponseHandler resp) { resp.replyPrefixed(text("There is an active profiler currently running, due to timeout in " + timeoutDiff + " seconds.")); } - long runningTime = (System.currentTimeMillis() - this.activeSampler.getStartTime()) / 1000L; + long runningTime = (System.currentTimeMillis() - active.getStartTime()) / 1000L; resp.replyPrefixed(text("It has been profiling for " + runningTime + " seconds so far.")); } } private void profilerCancel(CommandResponseHandler resp) { - if (this.activeSampler == null) { + if (service.active() == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { close(); @@ -289,31 +263,40 @@ private void profilerCancel(CommandResponseHandler resp) { } private void profilerStop(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { - if (this.activeSampler == null) { + final Sampler sampler = service.active(); + if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { - this.activeSampler.stop(); + sampler.stop(); resp.broadcastPrefixed(text("The active profiler has been stopped! Uploading results...")); - ThreadNodeOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadNodeOrder.BY_TIME : ThreadNodeOrder.BY_NAME; + final ThreadOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadOrder.BY_TIME : ThreadOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); - MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); - MergeMode mergeMode = arguments.boolFlag("separate-parent-calls") ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator); + boolean sepParentCalls = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); - handleUpload(platform, resp, this.activeSampler, threadOrder, comment, mergeMode, saveToFile); - this.activeSampler = null; + handleUpload(platform, resp, sampler, threadOrder, comment, sepParentCalls, saveToFile); + service.clear(); } } - private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) { - SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup()); + public static String postData(SparkPlatform platform, SparkSamplerProtos.SamplerData output) throws IOException { + String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); + return platform.getViewerUrl() + key; + } + + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadOrder threadOrder, String comment, boolean separateParentCalls, boolean saveToFileFlag) { + final ProfilerReport report = sampler.dumpReport(ReportConfiguration.builder() + .order(threadOrder) + .comment(comment) + .separateParentCalls(separateParentCalls) + .sender(resp.sender().getName(), resp.sender().getUniqueId()) + .build()); boolean saveToFile = false; if (saveToFileFlag) { saveToFile = true; } else { try { - String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); - String url = platform.getViewerUrl() + key; + final String url = report.upload(); resp.broadcastPrefixed(text("Profiler results:", GOLD)); resp.broadcast(text() @@ -334,7 +317,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S if (saveToFile) { Path file = platform.resolveSaveFile("profile", "sparkprofile"); try { - Files.write(file, output.toByteArray()); + report.saveToFile(file); resp.broadcastPrefixed(text() .content("Profile written to: ") diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 1c217dbb..411a510b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -20,7 +20,11 @@ package me.lucko.spark.common.sampler; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.modules.SamplerModule; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; @@ -29,12 +33,20 @@ import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; +import me.lucko.spark.common.util.MethodDisambiguator; +import me.lucko.spark.proto.SparkSamplerProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; +import net.kyori.adventure.text.Component; +import org.checkerframework.checker.nullness.qual.Nullable; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.UUID; import java.util.concurrent.CompletableFuture; /** @@ -109,16 +121,76 @@ public void start() { } } - protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, CommandSender creator, String comment, DataAggregator dataAggregator) { + @Override + public ProfilerReport dumpReport(ReportConfiguration configuration) { + return createReport(configuration); + } + + private ProfilerReport createReport(ReportConfiguration configuration) { + final MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); + final ReportConfiguration.Sender rSender = configuration.sender(); + final CommandSender sender = rSender == null ? null : new CommandSender() { + @Override + public String getName() { + return rSender.name; + } + + @Override + public UUID getUniqueId() { + return rSender.uuid; + } + + @Override + public void sendMessage(Component message) { + + } + + @Override + public boolean hasPermission(String permission) { + return true; + } + }; + return new ProfilerReport() { + final SparkSamplerProtos.SamplerData data = toProto(platform, sender, configuration.threadOrder()::compare, configuration.comment(), configuration.separateParentCalls() ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator), platform.createClassSourceLookup()); + + String uploadedUrl; + + @Override + public String upload() throws IOException { + if (uploadedUrl == null) + uploadedUrl = SamplerModule.postData(platform, data); + return uploadedUrl; + } + + @Override + public SparkSamplerProtos.SamplerData data() { + return data; + } + + @Override + public Path saveToFile(Path path) throws IOException { + return Files.write(path, data.toByteArray()); + } + }; + } + + @Override + public CompletableFuture whenDone(ReportConfiguration configuration) { + return getFuture().thenApply(samp -> createReport(configuration)); + } + + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, @Nullable CommandSender creator, @Nullable String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toData().toProto()) .setStartTime(this.startTime) .setEndTime(System.currentTimeMillis()) .setInterval(this.interval) .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(dataAggregator.getMetadata()); + if (creator != null) + metadata.setCreator(creator.toData().toProto()); + if (comment != null) { metadata.setComment(comment); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java new file mode 100644 index 00000000..7f99ea0e --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -0,0 +1,96 @@ +package me.lucko.spark.common.sampler; + +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfiguration; +import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.sampler.async.AsyncProfilerAccess; +import me.lucko.spark.common.sampler.async.AsyncSampler; +import me.lucko.spark.common.sampler.java.JavaSampler; +import me.lucko.spark.common.tick.TickHook; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.function.Consumer; + +public class ProfilerService implements Profiler { + private final SparkPlatform platform; + + public static final int MINIMUM_DURATION = 10; + + private me.lucko.spark.common.sampler.Sampler active; + + public ProfilerService(SparkPlatform platform) { + this.platform = platform; + } + + @Override + public me.lucko.spark.common.sampler.Sampler create(ProfilerConfiguration configuration, Consumer err) { + if (active != null) { + err.accept("A profiler is already running!"); + return null; + } + + Duration duration = configuration.duration(); + if (duration == null) + duration = Duration.of(MINIMUM_DURATION, ChronoUnit.SECONDS); + if (duration.getSeconds() <= MINIMUM_DURATION) { + err.accept("A profiler needs to run for at least " + MINIMUM_DURATION + " seconds!"); + return null; + } + + double interval = configuration.interval(); + if (interval <= 0) { + err.accept("Cannot run profiler with negative interval."); + return null; + } + + TickHook hook = null; + int minimum = configuration.minimumTickDuration(); + if (minimum >= 0) { + hook = platform.getTickHook(); + if (hook == null) { + err.accept("Tick counting is not supported!"); + return null; + } + } + + final int intervalMicros = (int) (interval * 1000d); + final long timeout = computeTimeout(duration); + me.lucko.spark.common.sampler.Sampler sampler; + if (minimum >= 1) { + sampler = new JavaSampler(platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.minimumTickDuration()); + } else if (!configuration.forceJavaSampler() && !(configuration.dumper() instanceof RegexThreadDumper) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { + sampler = new AsyncSampler(platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout); + } else { + sampler = new JavaSampler(platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); + } + // set activeSampler to null when complete. + sampler.getFuture().whenCompleteAsync((s, throwable) -> { + if (sampler == this.active) { + this.active = null; + } + }); + + return active = sampler; + } + + public me.lucko.spark.common.sampler.Sampler active() { + return active; + } + public void clear() { + if (active != null) { + active = null; + } + } + public void clearAndStop() { + if (active != null) { + active.stop(); + active = null; + } + } + + private static long computeTimeout(Duration duration) { + return System.currentTimeMillis() + duration.toMillis(); + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 84f2da1c..6b7867d7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.sampler; +import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; @@ -33,17 +34,7 @@ /** * Abstract superinterface for all sampler implementations. */ -public interface Sampler { - - /** - * Starts the sampler. - */ - void start(); - - /** - * Stops the sampler. - */ - void stop(); +public interface Sampler extends Profiler.Sampler { /** * Gets the time when the sampler started (unix timestamp in millis) diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 88b9d919..1fd1d96c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -20,36 +20,37 @@ package me.lucko.spark.common.sampler; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.sampler.async.AsyncProfilerAccess; -import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.sampler.java.JavaSampler; -import me.lucko.spark.common.tick.TickHook; - +import me.lucko.spark.api.profiler.ProfilerConfiguration; +import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import org.checkerframework.checker.nullness.qual.Nullable; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.concurrent.TimeUnit; /** * Builds {@link Sampler} instances. */ @SuppressWarnings("UnusedReturnValue") -public class SamplerBuilder { +public class SamplerBuilder implements ProfilerConfigurationBuilder { private double samplingInterval = 4; // milliseconds private boolean ignoreSleeping = false; private boolean ignoreNative = false; private boolean useAsyncProfiler = true; - private long timeout = -1; + private Duration duration; private ThreadDumper threadDumper = ThreadDumper.ALL; private ThreadGrouper threadGrouper = ThreadGrouper.BY_NAME; - private int ticksOver = -1; - private TickHook tickHook = null; + private int minimumTickDuration = -1; public SamplerBuilder() { } public SamplerBuilder samplingInterval(double samplingInterval) { - this.samplingInterval = samplingInterval; + this.samplingInterval = samplingInterval <= 0 ? 4 : samplingInterval; return this; } @@ -57,23 +58,41 @@ public SamplerBuilder completeAfter(long timeout, TimeUnit unit) { if (timeout <= 0) { throw new IllegalArgumentException("timeout > 0"); } - this.timeout = System.currentTimeMillis() + unit.toMillis(timeout); + this.duration = Duration.of(timeout, toChronoUnit(unit)); return this; } - public SamplerBuilder threadDumper(ThreadDumper threadDumper) { + private static ChronoUnit toChronoUnit(TimeUnit unit) { + switch (unit) { + case NANOSECONDS: return ChronoUnit.NANOS; + case MICROSECONDS: return ChronoUnit.MICROS; + case MILLISECONDS: return ChronoUnit.MILLIS; + case SECONDS: return ChronoUnit.SECONDS; + case MINUTES: return ChronoUnit.MINUTES; + case HOURS: return ChronoUnit.HOURS; + case DAYS: return ChronoUnit.DAYS; + default: throw new AssertionError(); + } + } + + @Override + public SamplerBuilder duration(Duration duration) { + return completeAfter(duration.toMillis(), TimeUnit.MILLISECONDS); + } + + public SamplerBuilder dumper(ThreadDumper threadDumper) { this.threadDumper = threadDumper; return this; } - public SamplerBuilder threadGrouper(ThreadGrouper threadGrouper) { + public SamplerBuilder grouper(ThreadGrouper threadGrouper) { this.threadGrouper = threadGrouper; return this; } - public SamplerBuilder ticksOver(int ticksOver, TickHook tickHook) { - this.ticksOver = ticksOver; - this.tickHook = tickHook; + @Override + public SamplerBuilder minimumTickDuration(int duration) { + this.minimumTickDuration = duration; return this; } @@ -82,30 +101,72 @@ public SamplerBuilder ignoreSleeping(boolean ignoreSleeping) { return this; } + @Override + public SamplerBuilder ignoreSleeping() { + return ignoreSleeping(true); + } + public SamplerBuilder ignoreNative(boolean ignoreNative) { this.ignoreNative = ignoreNative; return this; } + @Override + public SamplerBuilder ignoreNative() { + return ignoreNative(true); + } public SamplerBuilder forceJavaSampler(boolean forceJavaSampler) { this.useAsyncProfiler = !forceJavaSampler; return this; } + @Override + public SamplerBuilder forceJavaSampler() { + return forceJavaSampler(true); + } - public Sampler start(SparkPlatform platform) { - int intervalMicros = (int) (this.samplingInterval * 1000d); - - Sampler sampler; - if (this.ticksOver != -1 && this.tickHook != null) { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative, this.tickHook, this.ticksOver); - } else if (this.useAsyncProfiler && !(this.threadDumper instanceof ThreadDumper.Regex) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout); - } else { - sampler = new JavaSampler(platform, intervalMicros, this.threadDumper, this.threadGrouper, this.timeout, this.ignoreSleeping, this.ignoreNative); - } - - sampler.start(); - return sampler; + @Override + public ProfilerConfiguration build() { + return new ProfilerConfiguration() { + @Override + public double interval() { + return samplingInterval; + } + + @Override + public boolean ignoreSleeping() { + return ignoreSleeping; + } + + @Override + public boolean ignoreNative() { + return ignoreNative; + } + + @Override + public boolean forceJavaSampler() { + return !useAsyncProfiler; + } + + @Override + public int minimumTickDuration() { + return minimumTickDuration; + } + + @Override + public @Nullable Duration duration() { + return duration; + } + + @Override + public @Nullable ThreadDumper dumper() { + return threadDumper; + } + + @Override + public @Nullable ThreadGrouper grouper() { + return threadGrouper; + } + }; } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java deleted file mode 100644 index fe3a6a73..00000000 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadDumper.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.sampler; - -import me.lucko.spark.common.util.ThreadFinder; -import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; - -import java.lang.management.ThreadInfo; -import java.lang.management.ThreadMXBean; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; -import java.util.regex.Pattern; -import java.util.regex.PatternSyntaxException; -import java.util.stream.Collectors; - -/** - * Uses the {@link ThreadMXBean} to generate {@link ThreadInfo} instances for the threads being - * sampled. - */ -public interface ThreadDumper { - - /** - * Generates {@link ThreadInfo} data for the sampled threads. - * - * @param threadBean the thread bean instance to obtain the data from - * @return an array of generated thread info instances - */ - ThreadInfo[] dumpThreads(ThreadMXBean threadBean); - - /** - * Gets metadata about the thread dumper instance. - */ - SamplerMetadata.ThreadDumper getMetadata(); - - /** - * Implementation of {@link ThreadDumper} that generates data for all threads. - */ - ThreadDumper ALL = new ThreadDumper() { - @Override - public ThreadInfo[] dumpThreads(final ThreadMXBean threadBean) { - return threadBean.dumpAllThreads(false, false); - } - - @Override - public SamplerMetadata.ThreadDumper getMetadata() { - return SamplerMetadata.ThreadDumper.newBuilder() - .setType(SamplerMetadata.ThreadDumper.Type.ALL) - .build(); - } - }; - - /** - * Utility to cache the creation of a {@link ThreadDumper} targeting - * the game (server/client) thread. - */ - final class GameThread implements Supplier { - private Specific dumper = null; - - @Override - public ThreadDumper get() { - return Objects.requireNonNull(this.dumper, "dumper"); - } - - public void setThread(Thread thread) { - this.dumper = new Specific(new long[]{thread.getId()}); - } - } - - /** - * Implementation of {@link ThreadDumper} that generates data for a specific set of threads. - */ - final class Specific implements ThreadDumper { - private final long[] ids; - private Set threads; - private Set threadNamesLowerCase; - - public Specific(Thread thread) { - this.ids = new long[]{thread.getId()}; - } - - public Specific(long[] ids) { - this.ids = ids; - } - - public Specific(Set names) { - this.threadNamesLowerCase = names.stream().map(String::toLowerCase).collect(Collectors.toSet()); - this.ids = new ThreadFinder().getThreads() - .filter(t -> this.threadNamesLowerCase.contains(t.getName().toLowerCase())) - .mapToLong(Thread::getId) - .toArray(); - Arrays.sort(this.ids); - } - - public Set getThreads() { - if (this.threads == null) { - this.threads = new ThreadFinder().getThreads() - .filter(t -> Arrays.binarySearch(this.ids, t.getId()) >= 0) - .collect(Collectors.toSet()); - } - return this.threads; - } - - public Set getThreadNames() { - if (this.threadNamesLowerCase == null) { - this.threadNamesLowerCase = getThreads().stream() - .map(t -> t.getName().toLowerCase()) - .collect(Collectors.toSet()); - } - return this.threadNamesLowerCase; - } - - @Override - public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { - return threadBean.getThreadInfo(this.ids, Integer.MAX_VALUE); - } - - @Override - public SamplerMetadata.ThreadDumper getMetadata() { - return SamplerMetadata.ThreadDumper.newBuilder() - .setType(SamplerMetadata.ThreadDumper.Type.SPECIFIC) - .addAllIds(Arrays.stream(this.ids).boxed().collect(Collectors.toList())) - .build(); - } - } - - /** - * Implementation of {@link ThreadDumper} that generates data for a regex matched set of threads. - */ - final class Regex implements ThreadDumper { - private final ThreadFinder threadFinder = new ThreadFinder(); - private final Set namePatterns; - private final Map cache = new HashMap<>(); - - public Regex(Set namePatterns) { - this.namePatterns = namePatterns.stream() - .map(regex -> { - try { - return Pattern.compile(regex, Pattern.CASE_INSENSITIVE); - } catch (PatternSyntaxException e) { - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toSet()); - } - - @Override - public ThreadInfo[] dumpThreads(ThreadMXBean threadBean) { - return this.threadFinder.getThreads() - .filter(thread -> { - Boolean result = this.cache.get(thread.getId()); - if (result != null) { - return result; - } - - for (Pattern pattern : this.namePatterns) { - if (pattern.matcher(thread.getName()).matches()) { - this.cache.put(thread.getId(), true); - return true; - } - } - this.cache.put(thread.getId(), false); - return false; - }) - .map(thread -> threadBean.getThreadInfo(thread.getId(), Integer.MAX_VALUE)) - .filter(Objects::nonNull) - .toArray(ThreadInfo[]::new); - } - - @Override - public SamplerMetadata.ThreadDumper getMetadata() { - return SamplerMetadata.ThreadDumper.newBuilder() - .setType(SamplerMetadata.ThreadDumper.Type.REGEX) - .addAllPatterns(this.namePatterns.stream().map(Pattern::pattern).collect(Collectors.toList())) - .build(); - } - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java deleted file mode 100644 index 9ad84df3..00000000 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadGrouper.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.sampler; - -import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; - -import java.util.Collections; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Function for grouping threads together - */ -public interface ThreadGrouper { - - /** - * Implementation of {@link ThreadGrouper} that just groups by thread name. - */ - ThreadGrouper BY_NAME = new ThreadGrouper() { - @Override - public String getGroup(long threadId, String threadName) { - return threadName; - } - - @Override - public String getLabel(String group) { - return group; - } - - @Override - public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { - return SamplerMetadata.DataAggregator.ThreadGrouper.BY_NAME; - } - }; - - /** - * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool - * the thread originated from. - * - *

The regex pattern used to match pools expects a digit at the end of the thread name, - * separated from the pool name with any of one or more of ' ', '-', or '#'.

- */ - ThreadGrouper BY_POOL = new ThreadGrouper() { - private /* static */ final Pattern pattern = Pattern.compile("^(.*?)[-# ]+\\d+$"); - - // thread id -> group - private final Map cache = new ConcurrentHashMap<>(); - // group -> thread ids - private final Map> seen = new ConcurrentHashMap<>(); - - @Override - public String getGroup(long threadId, String threadName) { - String cached = this.cache.get(threadId); - if (cached != null) { - return cached; - } - - Matcher matcher = this.pattern.matcher(threadName); - if (!matcher.matches()) { - return threadName; - } - - String group = matcher.group(1).trim(); - this.cache.put(threadId, group); - this.seen.computeIfAbsent(group, g -> ConcurrentHashMap.newKeySet()).add(threadId); - return group; - } - - @Override - public String getLabel(String group) { - int count = this.seen.getOrDefault(group, Collections.emptySet()).size(); - if (count == 0) { - return group; - } - return group + " (x" + count + ")"; - } - - @Override - public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { - return SamplerMetadata.DataAggregator.ThreadGrouper.BY_POOL; - } - }; - - /** - * Implementation of {@link ThreadGrouper} which groups all threads as one, under - * the name "All". - */ - ThreadGrouper AS_ONE = new ThreadGrouper() { - private final Set seen = ConcurrentHashMap.newKeySet(); - - @Override - public String getGroup(long threadId, String threadName) { - this.seen.add(threadId); - return "root"; - } - - @Override - public String getLabel(String group) { - return "All (x" + this.seen.size() + ")"; - } - - @Override - public SamplerMetadata.DataAggregator.ThreadGrouper asProto() { - return SamplerMetadata.DataAggregator.ThreadGrouper.AS_ONE; - } - }; - - /** - * Gets the group for the given thread. - * - * @param threadId the id of the thread - * @param threadName the name of the thread - * @return the group - */ - String getGroup(long threadId, String threadName); - - /** - * Gets the label to use for a given group. - * - * @param group the group - * @return the label - */ - String getLabel(String group); - - SamplerMetadata.DataAggregator.ThreadGrouper asProto(); - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java deleted file mode 100644 index adcedcdb..00000000 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ThreadNodeOrder.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * This file is part of spark. - * - * Copyright (c) lucko (Luck) - * Copyright (c) contributors - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -package me.lucko.spark.common.sampler; - -import me.lucko.spark.common.sampler.node.ThreadNode; - -import java.util.Comparator; - -/** - * Methods of ordering {@link ThreadNode}s in the output data. - */ -public enum ThreadNodeOrder implements Comparator { - - /** - * Order by the name of the thread (alphabetically) - */ - BY_NAME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return o1.getThreadLabel().compareTo(o2.getThreadLabel()); - } - }, - - /** - * Order by the time taken by the thread (most time taken first) - */ - BY_TIME { - @Override - public int compare(ThreadNode o1, ThreadNode o2) { - return -Double.compare(o1.getTotalTime(), o2.getTotalTime()); - } - } - -} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java index ad9dee4b..47a9e738 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.aggregator; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.node.ThreadNode; import java.util.ArrayList; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java index 3de39432..6072664c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.async; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.AbstractDataAggregator; import me.lucko.spark.common.sampler.node.StackTraceNode; import me.lucko.spark.common.sampler.node.ThreadNode; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index d8288da7..a7aae2d2 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -22,11 +22,12 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; @@ -99,7 +100,7 @@ public void start() { } String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); - if (this.threadDumper instanceof ThreadDumper.Specific) { + if (this.threadDumper instanceof SpecificThreadDumper) { command += ",filter"; } @@ -108,8 +109,8 @@ public void start() { throw new RuntimeException("Unexpected response: " + resp); } - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; + if (this.threadDumper instanceof SpecificThreadDumper) { + SpecificThreadDumper threadDumper = (SpecificThreadDumper) this.threadDumper; for (Thread thread : threadDumper.getThreads()) { this.profiler.addThread(thread); } @@ -175,8 +176,8 @@ private void aggregateOutput() { this.outputComplete = true; Predicate threadFilter; - if (this.threadDumper instanceof ThreadDumper.Specific) { - ThreadDumper.Specific threadDumper = (ThreadDumper.Specific) this.threadDumper; + if (this.threadDumper instanceof SpecificThreadDumper) { + SpecificThreadDumper threadDumper = (SpecificThreadDumper) this.threadDumper; threadFilter = n -> threadDumper.getThreadNames().contains(n.toLowerCase()); } else { threadFilter = n -> true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java index cc530d6a..b274723c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.java; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.AbstractDataAggregator; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.StackTraceNode; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 913faee0..ccda6f40 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -25,8 +25,8 @@ import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.ThreadDumper; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java index 39e21aaa..58660f68 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/SimpleDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.java; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java index e062f31a..c25688a3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/TickedDataAggregator.java @@ -20,7 +20,7 @@ package me.lucko.spark.common.sampler.java; -import me.lucko.spark.common.sampler.ThreadGrouper; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.sampler.aggregator.DataAggregator; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java index ed97443e..e1209e51 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/node/ThreadNode.java @@ -25,7 +25,7 @@ /** * The root of a sampling stack for a given thread / thread group. */ -public final class ThreadNode extends AbstractNode { +public final class ThreadNode extends AbstractNode implements me.lucko.spark.api.profiler.thread.ThreadNode { /** * The name of this thread / thread group @@ -41,12 +41,14 @@ public ThreadNode(String name) { this.name = name; } - public String getThreadLabel() { + @Override + public String getLabel() { return this.label != null ? this.label : this.name; } - public String getThreadGroup() { - return this.name; + @Override + public String getGroup() { + return name; } public void setThreadLabel(String label) { @@ -55,7 +57,7 @@ public void setThreadLabel(String label) { public SparkSamplerProtos.ThreadNode toProto(MergeMode mergeMode) { SparkSamplerProtos.ThreadNode.Builder proto = SparkSamplerProtos.ThreadNode.newBuilder() - .setName(getThreadLabel()) + .setName(getLabel()) .setTime(getTotalTime()); for (StackTraceNode child : exportChildren(mergeMode)) { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java index 19d07071..555f9d8a 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricClientSparkPlugin.java @@ -28,9 +28,10 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -64,7 +65,7 @@ public static void register(FabricSparkMod mod, MinecraftClient client) { public FabricClientSparkPlugin(FabricSparkMod mod, MinecraftClient minecraft) { super(mod); this.minecraft = minecraft; - this.gameThreadDumper = new ThreadDumper.Specific(((MinecraftClientAccessor) minecraft).getThread()); + this.gameThreadDumper = new SpecificThreadDumper(((MinecraftClientAccessor) minecraft).getThread()); } @Override diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java index f840f5e8..4cf360ac 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricServerSparkPlugin.java @@ -29,11 +29,12 @@ import com.mojang.brigadier.suggestion.SuggestionsBuilder; import me.lucko.fabric.api.permissions.v0.Permissions; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.fabric.FabricCommandSender; @@ -69,7 +70,7 @@ public static FabricServerSparkPlugin register(FabricSparkMod mod, MinecraftServ public FabricServerSparkPlugin(FabricSparkMod mod, MinecraftServer server) { super(mod); this.server = server; - this.gameThreadDumper = new ThreadDumper.Specific(server.getThread()); + this.gameThreadDumper = new SpecificThreadDumper(server.getThread()); } @Override diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java index a4c6bd14..ad5e9fec 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeClientSparkPlugin.java @@ -27,9 +27,10 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -64,7 +65,7 @@ public static void register(ForgeSparkMod mod, FMLClientSetupEvent event) { public ForgeClientSparkPlugin(ForgeSparkMod mod, Minecraft minecraft) { super(mod); this.minecraft = minecraft; - this.gameThreadDumper = new ThreadDumper.Specific(minecraft.gameThread); + this.gameThreadDumper = new SpecificThreadDumper(minecraft.gameThread); } @Override diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java index 1aeb2b1c..f2b038ff 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeServerSparkPlugin.java @@ -29,11 +29,12 @@ import com.mojang.brigadier.suggestion.Suggestions; import com.mojang.brigadier.suggestion.SuggestionsBuilder; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.forge.ForgeCommandSender; @@ -82,7 +83,7 @@ public static void register(ForgeSparkMod mod, ServerAboutToStartEvent event) { public ForgeServerSparkPlugin(ForgeSparkMod mod, MinecraftServer server) { super(mod); this.server = server; - this.gameThreadDumper = new ThreadDumper.Specific(server.getRunningThread()); + this.gameThreadDumper = new SpecificThreadDumper(server.getRunningThread()); } @Override diff --git a/spark-proto/build.gradle b/spark-proto/build.gradle new file mode 100644 index 00000000..ff736e99 --- /dev/null +++ b/spark-proto/build.gradle @@ -0,0 +1,61 @@ +plugins { + id 'maven-publish' + id 'com.google.protobuf' version '0.8.16' +} + +version = api_version +group = 'me.lucko.spark' +archivesBaseName = 'proto' + +dependencies { + api 'com.google.protobuf:protobuf-javalite:3.15.6' +} + +license { + header = project.file('HEADER.txt') +} + +publishing { + repositories { + maven { + url = 'https://oss.sonatype.org/content/repositories/snapshots' + credentials { + username = findProperty('sonatypeUsername') ?: '' + password = findProperty('sonatypePassword') ?: '' + } + } + } + publications { + mavenJava(MavenPublication) { + from components.java + pom { + name = 'spark' + description = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' + url = 'https://spark.lucko.me/' + } + } + } +} + +protobuf { + protoc { + if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") { + path = '/opt/homebrew/bin/protoc' + } else { + artifact = 'com.google.protobuf:protoc:3.15.6' + } + } + generateProtoTasks { + all().each { task -> + task.builtins { + java { + option 'lite' + } + } + } + } +} + +afterEvaluate { + tasks.generateProto.group = 'proto' +} \ No newline at end of file diff --git a/spark-common/src/main/proto/spark/spark.proto b/spark-proto/src/main/proto/spark/spark.proto similarity index 100% rename from spark-common/src/main/proto/spark/spark.proto rename to spark-proto/src/main/proto/spark/spark.proto diff --git a/spark-common/src/main/proto/spark/spark_heap.proto b/spark-proto/src/main/proto/spark/spark_heap.proto similarity index 100% rename from spark-common/src/main/proto/spark/spark_heap.proto rename to spark-proto/src/main/proto/spark/spark_heap.proto diff --git a/spark-common/src/main/proto/spark/spark_sampler.proto b/spark-proto/src/main/proto/spark/spark_sampler.proto similarity index 100% rename from spark-common/src/main/proto/spark/spark_sampler.proto rename to spark-proto/src/main/proto/spark/spark_sampler.proto diff --git a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java index e6c9a041..126486c6 100644 --- a/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java +++ b/spark-sponge7/src/main/java/me/lucko/spark/sponge/Sponge7SparkPlugin.java @@ -23,12 +23,13 @@ import com.google.inject.Inject; import me.lucko.spark.api.Spark; +import me.lucko.spark.api.profiler.dumper.GameThreadDumper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -73,7 +74,7 @@ public class Sponge7SparkPlugin implements SparkPlugin { private final Path configDirectory; private final SpongeExecutorService asyncExecutor; private final SpongeExecutorService syncExecutor; - private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread(); + private final GameThreadDumper gameThreadDumper = new GameThreadDumper(); private SparkPlatform platform; diff --git a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java index 70e73b90..8b9dc6d3 100644 --- a/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java +++ b/spark-sponge8/src/main/java/me/lucko/spark/sponge/Sponge8SparkPlugin.java @@ -22,13 +22,14 @@ import com.google.inject.Inject; +import me.lucko.spark.api.profiler.dumper.GameThreadDumper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.SparkPlugin; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.ping.PlayerPingProvider; import me.lucko.spark.common.platform.PlatformInfo; import me.lucko.spark.common.platform.world.WorldInfoProvider; -import me.lucko.spark.common.sampler.ThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.util.ClassSourceLookup; @@ -69,7 +70,7 @@ public class Sponge8SparkPlugin implements SparkPlugin { private final Path configDirectory; private final ExecutorService asyncExecutor; private final ExecutorService syncExecutor; - private final ThreadDumper.GameThread gameThreadDumper = new ThreadDumper.GameThread(); + private final GameThreadDumper gameThreadDumper = new GameThreadDumper(); private SparkPlatform platform; From 2e3218d91a8a823fc053a7bfeaaebea57dbceeae Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 17 Jul 2022 18:57:54 +0300 Subject: [PATCH 02/29] Update licenses --- .../me/lucko/spark/api/profiler/Profiler.java | 59 +++++++++++++++---- .../api/profiler/ProfilerConfiguration.java | 25 ++++++++ .../ProfilerConfigurationBuilder.java | 25 ++++++++ .../api/profiler/dumper/GameThreadDumper.java | 25 ++++++++ .../profiler/dumper/RegexThreadDumper.java | 25 ++++++++ .../profiler/dumper/SpecificThreadDumper.java | 25 ++++++++ .../api/profiler/dumper/ThreadDumper.java | 27 +++++---- .../api/profiler/report/ProfilerReport.java | 25 ++++++++ .../profiler/report/ReportConfiguration.java | 25 ++++++++ .../report/ReportConfigurationBuilder.java | 25 ++++++++ .../api/profiler/thread/ThreadGrouper.java | 27 +++++---- .../spark/api/profiler/thread/ThreadNode.java | 25 ++++++++ .../api/profiler/thread/ThreadOrder.java | 25 ++++++++ .../lucko/spark/api/util/StreamSupplier.java | 25 ++++++++ .../spark/common/sampler/ProfilerService.java | 20 +++++++ .../lucko/spark/common/sampler/Sampler.java | 14 ----- 16 files changed, 376 insertions(+), 46 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index 8d2aa04d..df6796ee 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler; import me.lucko.spark.api.profiler.report.ProfilerReport; @@ -27,12 +52,9 @@ public interface Profiler { */ interface Sampler { /** - * Gets a future to encapsulate the completion of the sampler, containing the report. - * - * @param configuration the configuration to use for generating the report - * @return a future + * Starts the sampler. */ - CompletableFuture whenDone(ReportConfiguration configuration); + void start(); /** * Stops the sampler. @@ -40,16 +62,33 @@ interface Sampler { void stop(); /** - * Dumps the report of the sampler. + * Gets the time when the sampler started (unix timestamp in millis) + * + * @return the start time + */ + long getStartTime(); + + /** + * Gets the time when the sampler should automatically stop (unix timestamp in millis) + * + * @return the end time, or -1 if undefined + */ + long getAutoEndTime(); + + /** + * Gets a future to encapsulate the completion of the sampler, containing the report. * * @param configuration the configuration to use for generating the report - * @return the report of the sampler + * @return a future */ - ProfilerReport dumpReport(ReportConfiguration configuration); + CompletableFuture whenDone(ReportConfiguration configuration); /** - * Starts the sampler. + * Dumps the report of the sampler. + * + * @param configuration the configuration to use for generating the report + * @return the report of the sampler */ - void start(); + ProfilerReport dumpReport(ReportConfiguration configuration); } } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java index 2e374d1e..4543b293 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler; import me.lucko.spark.api.SparkProvider; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java index 0462bbc6..935c9676 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler; import com.google.errorprone.annotations.CanIgnoreReturnValue; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java index 649666cc..7f9f7ddf 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.dumper; import java.util.Objects; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java index 8f3bf154..05235859 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/RegexThreadDumper.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.dumper; import me.lucko.spark.api.SparkProvider; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java index bb49dfbe..cba018d9 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/SpecificThreadDumper.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.dumper; import me.lucko.spark.api.SparkProvider; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java index 6e8f0bec..5e497aa7 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/ThreadDumper.java @@ -1,21 +1,26 @@ /* - * This file is part of spark. + * This file is part of spark, licensed under the MIT License. * * Copyright (c) lucko (Luck) * Copyright (c) contributors * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. */ package me.lucko.spark.api.profiler.dumper; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java index 6d7717bc..87bcf734 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.report; import me.lucko.spark.proto.SparkSamplerProtos; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java index 98d360f4..4fe8946b 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.report; import me.lucko.spark.api.profiler.thread.ThreadNode; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java index 9d94e2b5..96187a74 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.report; import me.lucko.spark.api.profiler.thread.ThreadNode; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java index f16b6f9b..ab5d765c 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java @@ -1,21 +1,26 @@ /* - * This file is part of spark. + * This file is part of spark, licensed under the MIT License. * * Copyright (c) lucko (Luck) * Copyright (c) contributors * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. */ package me.lucko.spark.api.profiler.thread; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java index b79ab640..e3921af2 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadNode.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.thread; /** diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java index 1de25c8b..70d085d4 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadOrder.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.profiler.thread; import java.util.Comparator; diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java b/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java index 6bf5de18..f654dcac 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java +++ b/spark-api/src/main/java/me/lucko/spark/api/util/StreamSupplier.java @@ -1,3 +1,28 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + package me.lucko.spark.api.util; import java.util.stream.Stream; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index 7f99ea0e..897317ff 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -1,3 +1,23 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + package me.lucko.spark.common.sampler; import me.lucko.spark.api.profiler.Profiler; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 6b7867d7..49a6bd68 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -36,20 +36,6 @@ */ public interface Sampler extends Profiler.Sampler { - /** - * Gets the time when the sampler started (unix timestamp in millis) - * - * @return the start time - */ - long getStartTime(); - - /** - * Gets the time when the sampler should automatically stop (unix timestamp in millis) - * - * @return the end time, or -1 if undefined - */ - long getAutoEndTime(); - /** * Gets a future to encapsulate the completion of the sampler * From 3fc7b7c1a475aa6e26f01b546b77843a70682b8a Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 17 Jul 2022 19:32:58 +0300 Subject: [PATCH 03/29] Doc updates --- .../src/main/java/me/lucko/spark/api/profiler/Profiler.java | 3 ++- .../lucko/spark/api/profiler/ProfilerConfigurationBuilder.java | 2 ++ .../common/sampler/aggregator/AbstractDataAggregator.java | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index df6796ee..52089502 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -84,7 +84,8 @@ interface Sampler { CompletableFuture whenDone(ReportConfiguration configuration); /** - * Dumps the report of the sampler. + * Dumps the report of the sampler.
+ * Note: make sure to {@link #stop() stop} the sampler before generating the report. * * @param configuration the configuration to use for generating the report * @return the report of the sampler diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java index 935c9676..ba464eae 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfigurationBuilder.java @@ -26,6 +26,7 @@ package me.lucko.spark.api.profiler; import com.google.errorprone.annotations.CanIgnoreReturnValue; +import com.google.errorprone.annotations.CheckReturnValue; import me.lucko.spark.api.Spark; import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.api.profiler.thread.ThreadGrouper; @@ -109,5 +110,6 @@ public interface ProfilerConfigurationBuilder { * * @return the built configuration */ + @CheckReturnValue ProfilerConfiguration build(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java index 47a9e738..28607f8c 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/aggregator/AbstractDataAggregator.java @@ -55,7 +55,7 @@ protected ThreadNode getNode(String group) { public List exportData() { List data = new ArrayList<>(this.threadData.values()); for (ThreadNode node : data) { - node.setThreadLabel(this.threadGrouper.getLabel(node.getThreadGroup())); + node.setThreadLabel(this.threadGrouper.getLabel(node.getGroup())); } return data; } From e1563b179c1301d9265a63d75add70fa3738efb7 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 17 Jul 2022 19:55:45 +0300 Subject: [PATCH 04/29] Change build.gradle --- spark-api/build.gradle | 12 ++++++++++++ spark-proto/build.gradle | 12 ++++++++++++ 2 files changed, 24 insertions(+) diff --git a/spark-api/build.gradle b/spark-api/build.gradle index 5e9aca66..f333b376 100644 --- a/spark-api/build.gradle +++ b/spark-api/build.gradle @@ -17,6 +17,18 @@ license { header = project.file('HEADER.txt') } +jar { + manifest.attributes([ + "Specification-Title" : 'SparkAPI', + "Specification-Vendor" : 'Lucko', + "Specification-Version" : '1', // We are version 1 of ourselves + "Implementation-Title" : 'SparkAPI', + "Implementation-Version" : api_version, + "Implementation-Vendor" : 'Lucko', + "Implementation-Timestamp": new Date().format("yyyy-MM-dd'T'HH:mm:ssZ") + ]) +} + publishing { repositories { maven { diff --git a/spark-proto/build.gradle b/spark-proto/build.gradle index ff736e99..227db8b1 100644 --- a/spark-proto/build.gradle +++ b/spark-proto/build.gradle @@ -37,6 +37,18 @@ publishing { } } +jar { + manifest.attributes([ + "Specification-Title" : 'SparkProto', + "Specification-Vendor" : 'Lucko', + "Specification-Version" : '1', // We are version 1 of ourselves + "Implementation-Title" : 'SparkProto', + "Implementation-Version" : api_version, + "Implementation-Vendor" : 'Lucko', + "Implementation-Timestamp": new Date().format("yyyy-MM-dd'T'HH:mm:ssZ") + ]) +} + protobuf { protoc { if (System.getProperty("os.name") == "Mac OS X" && System.getProperty("os.arch") == "aarch64") { From 96f40d877855633c5b4b0aa6bf515910fc97c3d0 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 17 Jul 2022 20:00:00 +0300 Subject: [PATCH 05/29] Update GameThreadDumper.java --- .../api/profiler/dumper/GameThreadDumper.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java index 7f9f7ddf..87f474e9 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/dumper/GameThreadDumper.java @@ -33,14 +33,28 @@ * the game (server/client) thread. */ public final class GameThreadDumper implements Supplier { + private Supplier threadSupplier; private SpecificThreadDumper dumper = null; + public GameThreadDumper() { + + } + + public GameThreadDumper(Supplier threadSupplier) { + this.threadSupplier = threadSupplier; + } + @Override public ThreadDumper get() { + if (this.dumper == null) { + setThread(this.threadSupplier.get()); + this.threadSupplier = null; + } + return Objects.requireNonNull(this.dumper, "dumper"); } public void setThread(Thread thread) { - this.dumper = new SpecificThreadDumper(new long[] {thread.getId()}); + this.dumper = new SpecificThreadDumper(new long[]{thread.getId()}); } } From a15f13f44e9561f7855640697f750e0192e56f4a Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 17 Jul 2022 20:16:03 +0300 Subject: [PATCH 06/29] Update publishing buildscript --- gradle.properties | 2 +- spark-api/build.gradle | 3 +++ spark-proto/build.gradle | 36 ++++++++++++++++++++++++++++++++++-- 3 files changed, 38 insertions(+), 3 deletions(-) diff --git a/gradle.properties b/gradle.properties index 76ae5353..8f769909 100644 --- a/gradle.properties +++ b/gradle.properties @@ -4,4 +4,4 @@ org.gradle.parallel=true # thanks, forge org.gradle.daemon=false -api_version=0.1-SNAPSHOT \ No newline at end of file +api_version=1.0.0 \ No newline at end of file diff --git a/spark-api/build.gradle b/spark-api/build.gradle index f333b376..90d71a4a 100644 --- a/spark-api/build.gradle +++ b/spark-api/build.gradle @@ -17,6 +17,9 @@ license { header = project.file('HEADER.txt') } +java.withSourcesJar() +java.withJavadocJar() + jar { manifest.attributes([ "Specification-Title" : 'SparkAPI', diff --git a/spark-proto/build.gradle b/spark-proto/build.gradle index 227db8b1..6707e7f1 100644 --- a/spark-proto/build.gradle +++ b/spark-proto/build.gradle @@ -1,20 +1,50 @@ plugins { id 'maven-publish' id 'com.google.protobuf' version '0.8.16' + id 'com.github.johnrengelman.shadow' version '7.0.0' } version = api_version group = 'me.lucko.spark' archivesBaseName = 'proto' +configurations { + shade + api.extendsFrom shade +} + dependencies { - api 'com.google.protobuf:protobuf-javalite:3.15.6' + shade 'com.google.protobuf:protobuf-javalite:3.15.6' +} + +jar { + classifier 'lite' +} + +shadowJar { + configurations = [project.configurations.shade] + relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf' + exclude 'module-info.class' + exclude 'META-INF/maven/**' + exclude 'META-INF/proguard/**' + + classifier '' } license { header = project.file('HEADER.txt') } +java.withSourcesJar() +java.withJavadocJar() + +components.java.withVariantsFromConfiguration(configurations.runtimeElements) { + skip() +} +components.java.withVariantsFromConfiguration(configurations.apiElements) { + skip() +} + publishing { repositories { maven { @@ -27,7 +57,9 @@ publishing { } publications { mavenJava(MavenPublication) { - from components.java + artifacts = [ + shadowJar, javadocJar, sourcesJar + ] pom { name = 'spark' description = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' From b5d027fd1253b65c2a8f74217682048b60e75c62 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 17 Jul 2022 20:40:41 +0300 Subject: [PATCH 07/29] Fixes --- .../me/lucko/spark/common/command/modules/SamplerModule.java | 2 -- .../java/me/lucko/spark/common/sampler/ProfilerService.java | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index d99d7eb0..b6fdd258 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -191,7 +191,6 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command if (ticksOver != -1) { builder.minimumTickDuration(ticksOver); } - final ProfilerService service = new ProfilerService(platform); final Sampler sampler = service.create(builder.build(), e -> resp.replyPrefixed(text(e, RED))); if (sampler == null) // Feedback is handled in the consumer return; @@ -224,7 +223,6 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command if (timeoutSeconds != -1) { ThreadOrder threadOrder = arguments.boolFlag("order-by-time") ? ThreadOrder.BY_TIME : ThreadOrder.BY_NAME; String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); - MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); boolean sepPar = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); future.thenAcceptAsync(s -> { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index 897317ff..8bb3d6d5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -54,7 +54,7 @@ public me.lucko.spark.common.sampler.Sampler create(ProfilerConfiguration config Duration duration = configuration.duration(); if (duration == null) duration = Duration.of(MINIMUM_DURATION, ChronoUnit.SECONDS); - if (duration.getSeconds() <= MINIMUM_DURATION) { + if (duration.getSeconds() < MINIMUM_DURATION) { err.accept("A profiler needs to run for at least " + MINIMUM_DURATION + " seconds!"); return null; } From 001981b1fd1f27885f5e66bd943fc9c1e954dcab Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 18 Jul 2022 17:28:51 +0300 Subject: [PATCH 08/29] Small tweaks --- spark-api/build.gradle | 1 + .../me/lucko/spark/api/SparkProvider.java | 1 + .../me/lucko/spark/api/profiler/Profiler.java | 24 +++++++- .../profiler/report/ReportConfiguration.java | 27 ++++++++- .../report/ReportConfigurationBuilder.java | 2 +- .../common/command/modules/SamplerModule.java | 38 ++++++------- .../common/command/sender/CommandSender.java | 5 ++ .../spark/common/sampler/AbstractSampler.java | 56 +++++++------------ .../spark/common/sampler/ProfilerService.java | 11 ++-- .../lucko/spark/common/sampler/Sampler.java | 12 +--- .../common/sampler/async/AsyncSampler.java | 12 ++-- .../common/sampler/java/JavaSampler.java | 9 ++- 12 files changed, 111 insertions(+), 87 deletions(-) diff --git a/spark-api/build.gradle b/spark-api/build.gradle index 90d71a4a..4b21a1b1 100644 --- a/spark-api/build.gradle +++ b/spark-api/build.gradle @@ -21,6 +21,7 @@ java.withSourcesJar() java.withJavadocJar() jar { + from file('LICENSE.txt') manifest.attributes([ "Specification-Title" : 'SparkAPI', "Specification-Vendor" : 'Lucko', diff --git a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java index c8a5520d..2106ed4e 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java +++ b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java @@ -47,6 +47,7 @@ public final class SparkProvider { return instance; } + @SuppressWarnings("unused") static void set(Spark impl) { SparkProvider.instance = impl; } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index 52089502..0fbfb81f 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -33,7 +33,7 @@ import java.util.function.Consumer; /** - * A profilers used for sampling. + * A profiler used for sampling. */ public interface Profiler { /** @@ -76,12 +76,23 @@ interface Sampler { long getAutoEndTime(); /** - * Gets a future to encapsulate the completion of the sampler, containing the report. + * Gets a future that encapsulates the completion of the sampler, containing the report.
+ * Note: this future will not be completed unless this sampler is configured to automatically stop. * * @param configuration the configuration to use for generating the report * @return a future + * @see #onCompleted() */ - CompletableFuture whenDone(ReportConfiguration configuration); + CompletableFuture onCompleted(ReportConfiguration configuration); + + /** + * Gets a future that encapsulates the completion of the sampler, containing the sampler. + * Note: this future will not be completed unless this sampler is configured to automatically stop. + * + * @return a future + * @see #onCompleted(ReportConfiguration) + */ + CompletableFuture onCompleted(); /** * Dumps the report of the sampler.
@@ -91,5 +102,12 @@ interface Sampler { * @return the report of the sampler */ ProfilerReport dumpReport(ReportConfiguration configuration); + + /** + * Checks if this sampler is an async sampler. + * + * @return if this sampler is an async sampler + */ + boolean isAsync(); } } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java index 4fe8946b..c7d21afb 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java @@ -27,6 +27,7 @@ import me.lucko.spark.api.profiler.thread.ThreadNode; import me.lucko.spark.api.profiler.thread.ThreadOrder; +import me.lucko.spark.proto.SparkProtos; import org.jetbrains.annotations.Nullable; import java.util.Comparator; @@ -73,11 +74,35 @@ static ReportConfigurationBuilder builder() { class Sender { public final String name; + /** + * The UUID of the sender. May be {@code null} if it wasn't sent by a player. + */ + @Nullable public final UUID uuid; - public Sender(String name, UUID uuid) { + public Sender(String name, @Nullable UUID uuid) { this.name = name; this.uuid = uuid; } + + /** + * Checks if this sender is a player. + * @return if this sender is a player + */ + public boolean isPlayer() { + return uuid != null; + } + + public SparkProtos.CommandSenderMetadata toProto() { + SparkProtos.CommandSenderMetadata.Builder proto = SparkProtos.CommandSenderMetadata.newBuilder() + .setType(isPlayer() ? SparkProtos.CommandSenderMetadata.Type.PLAYER : SparkProtos.CommandSenderMetadata.Type.OTHER) + .setName(this.name); + + if (this.uuid != null) { + proto.setUniqueId(this.uuid.toString()); + } + + return proto.build(); + } } } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java index 96187a74..88893bdb 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java @@ -55,7 +55,7 @@ public ReportConfigurationBuilder sender(@Nullable ReportConfiguration.Sender se return this; } - public ReportConfigurationBuilder sender(@NonNull String name, @NonNull UUID uuid) { + public ReportConfigurationBuilder sender(@NonNull String name, @Nullable UUID uuid) { return sender(new ReportConfiguration.Sender(name, uuid)); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index b6fdd258..7a78631b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.command.modules; import com.google.common.collect.Iterables; +import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.api.profiler.dumper.ThreadDumper; @@ -34,14 +35,11 @@ import me.lucko.spark.common.command.Command; import me.lucko.spark.common.command.CommandModule; import me.lucko.spark.common.command.CommandResponseHandler; -import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; import me.lucko.spark.common.sampler.ProfilerService; import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.SamplerBuilder; -import me.lucko.spark.common.sampler.async.AsyncSampler; -import me.lucko.spark.common.util.MethodDisambiguator; import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.event.ClickEvent; @@ -95,7 +93,7 @@ public void registerCommands(Consumer consumer) { .argumentUsage("stop --comment", "comment") .argumentUsage("stop --order-by-time", null) .argumentUsage("stop --save-to-file", null) - .executor(this::profiler) + .executor((platform, sender, resp, args) -> profiler(platform, resp, args)) .tabCompleter((platform, sender, arguments) -> { if (arguments.contains("--info") || arguments.contains("--cancel")) { return Collections.emptyList(); @@ -119,7 +117,7 @@ public void registerCommands(Consumer consumer) { ); } - private void profiler(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + private void profiler(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { if (arguments.boolFlag("info")) { profilerInfo(resp); return; @@ -131,14 +129,14 @@ private void profiler(SparkPlatform platform, CommandSender sender, CommandRespo } if (arguments.boolFlag("stop") || arguments.boolFlag("upload")) { - profilerStop(platform, sender, resp, arguments); + profilerStop(platform, resp, arguments); return; } - profilerStart(platform, sender, resp, arguments); + profilerStart(platform, resp, arguments); } - private void profilerStart(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + private void profilerStart(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { resp.broadcastPrefixed(text("Initializing a new profiler, please wait...")); int timeoutSeconds = arguments.intFlag("timeout"); @@ -200,7 +198,7 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command resp.broadcastPrefixed(text() .append(text("Profiler now active!", GOLD)) .append(space()) - .append(text("(" + (sampler instanceof AsyncSampler ? "async" : "built-in java") + ")", DARK_GRAY)) + .append(text("(" + (sampler.isAsync() ? "async" : "built-in java") + ")", DARK_GRAY)) .build() ); if (timeoutSeconds == -1) { @@ -209,7 +207,7 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command resp.broadcastPrefixed(text("The results will be automatically returned after the profiler has been running for " + timeoutSeconds + " seconds.")); } - final CompletableFuture future = sampler.getFuture(); + final CompletableFuture future = sampler.onCompleted(); // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { @@ -225,9 +223,9 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); boolean sepPar = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); - future.thenAcceptAsync(s -> { + sampler.onCompleted(configuration(resp, comment, sepPar, threadOrder)).thenAcceptAsync(report -> { resp.broadcastPrefixed(text("The active profiler has completed! Uploading results...")); - handleUpload(platform, resp, s, threadOrder, comment, sepPar, saveToFile); + handleUpload(platform, resp, report, saveToFile); }); } } @@ -259,7 +257,7 @@ private void profilerCancel(CommandResponseHandler resp) { } } - private void profilerStop(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { + private void profilerStop(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { final Sampler sampler = service.active(); if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); @@ -270,7 +268,7 @@ private void profilerStop(SparkPlatform platform, CommandSender sender, CommandR String comment = Iterables.getFirst(arguments.stringFlag("comment"), null); boolean sepParentCalls = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); - handleUpload(platform, resp, sampler, threadOrder, comment, sepParentCalls, saveToFile); + handleUpload(platform, resp, sampler.dumpReport(configuration(resp, comment, sepParentCalls, threadOrder)), saveToFile); service.clear(); } } @@ -280,14 +278,16 @@ public static String postData(SparkPlatform platform, SparkSamplerProtos.Sampler return platform.getViewerUrl() + key; } - private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadOrder threadOrder, String comment, boolean separateParentCalls, boolean saveToFileFlag) { - final ProfilerReport report = sampler.dumpReport(ReportConfiguration.builder() - .order(threadOrder) + private ReportConfiguration configuration(CommandResponseHandler resp, String comment, boolean separateParentCalls, ThreadOrder order) { + return ReportConfiguration.builder() + .order(order) .comment(comment) .separateParentCalls(separateParentCalls) - .sender(resp.sender().getName(), resp.sender().getUniqueId()) - .build()); + .sender(resp.sender().asSender()) + .build(); + } + private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, ProfilerReport report, boolean saveToFileFlag) { boolean saveToFile = false; if (saveToFileFlag) { saveToFile = true; diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java index bae5ddfa..45be7f5a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java @@ -24,6 +24,7 @@ import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; +import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.proto.SparkProtos.CommandSenderMetadata; import net.kyori.adventure.text.Component; @@ -44,6 +45,10 @@ default Data toData() { return new Data(getName(), getUniqueId()); } + default ReportConfiguration.Sender asSender() { + return new ReportConfiguration.Sender(getName(), getUniqueId()); + } + final class Data { private final String name; private final UUID uniqueId; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 411a510b..f1e2c1ca 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -20,15 +20,16 @@ package me.lucko.spark.common.sampler; +import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.api.profiler.report.ProfilerReport; import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.modules.SamplerModule; -import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.platform.serverconfig.ServerConfigProvider; import me.lucko.spark.common.sampler.aggregator.DataAggregator; +import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; @@ -37,7 +38,7 @@ import me.lucko.spark.proto.SparkSamplerProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; -import net.kyori.adventure.text.Component; +import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import java.io.IOException; @@ -46,7 +47,6 @@ import java.util.Comparator; import java.util.List; import java.util.Map; -import java.util.UUID; import java.util.concurrent.CompletableFuture; /** @@ -73,7 +73,7 @@ public abstract class AbstractSampler implements Sampler { protected final long autoEndTime; // -1 for nothing /** A future to encapsulate the completion of this sampler instance */ - protected final CompletableFuture future = new CompletableFuture<>(); + protected final CompletableFuture future = new CompletableFuture<>(); /** The garbage collector statistics when profiling started */ protected Map initialGcStats; @@ -98,11 +98,6 @@ public long getAutoEndTime() { return this.autoEndTime; } - @Override - public CompletableFuture getFuture() { - return this.future; - } - protected void recordInitialGcStats() { this.initialGcStats = GarbageCollectorStatistics.pollStats(); } @@ -128,30 +123,8 @@ public ProfilerReport dumpReport(ReportConfiguration configuration) { private ProfilerReport createReport(ReportConfiguration configuration) { final MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); - final ReportConfiguration.Sender rSender = configuration.sender(); - final CommandSender sender = rSender == null ? null : new CommandSender() { - @Override - public String getName() { - return rSender.name; - } - - @Override - public UUID getUniqueId() { - return rSender.uuid; - } - - @Override - public void sendMessage(Component message) { - - } - - @Override - public boolean hasPermission(String permission) { - return true; - } - }; return new ProfilerReport() { - final SparkSamplerProtos.SamplerData data = toProto(platform, sender, configuration.threadOrder()::compare, configuration.comment(), configuration.separateParentCalls() ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator), platform.createClassSourceLookup()); + final SparkSamplerProtos.SamplerData data = toProto(platform, configuration.sender(), configuration.threadOrder()::compare, configuration.comment(), configuration.separateParentCalls() ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator), platform.createClassSourceLookup()); String uploadedUrl; @@ -175,11 +148,17 @@ public Path saveToFile(Path path) throws IOException { } @Override - public CompletableFuture whenDone(ReportConfiguration configuration) { - return getFuture().thenApply(samp -> createReport(configuration)); + public CompletableFuture onCompleted(ReportConfiguration configuration) { + return onCompleted().thenApply(samp -> createReport(configuration)); } - protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, @Nullable CommandSender creator, @Nullable String comment, DataAggregator dataAggregator) { + @NonNull + @Override + public CompletableFuture onCompleted() { + return future; + } + + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, ReportConfiguration.Sender creator, @Nullable String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) .setStartTime(this.startTime) @@ -189,7 +168,7 @@ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform pla .setDataAggregator(dataAggregator.getMetadata()); if (creator != null) - metadata.setCreator(creator.toData().toProto()); + metadata.setCreator(creator.toProto()); if (comment != null) { metadata.setComment(comment); @@ -240,4 +219,9 @@ protected void writeDataToProto(SamplerData.Builder proto, DataAggregator dataAg proto.putAllClassSources(classSourceVisitor.getMapping()); } } + + @Override + public boolean isAsync() { + return this instanceof AsyncSampler; + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index 8bb3d6d5..221eb44e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -28,6 +28,7 @@ import me.lucko.spark.common.sampler.async.AsyncSampler; import me.lucko.spark.common.sampler.java.JavaSampler; import me.lucko.spark.common.tick.TickHook; +import org.checkerframework.checker.nullness.qual.Nullable; import java.time.Duration; import java.time.temporal.ChronoUnit; @@ -52,9 +53,7 @@ public me.lucko.spark.common.sampler.Sampler create(ProfilerConfiguration config } Duration duration = configuration.duration(); - if (duration == null) - duration = Duration.of(MINIMUM_DURATION, ChronoUnit.SECONDS); - if (duration.getSeconds() < MINIMUM_DURATION) { + if (duration != null && duration.getSeconds() < MINIMUM_DURATION) { err.accept("A profiler needs to run for at least " + MINIMUM_DURATION + " seconds!"); return null; } @@ -86,7 +85,7 @@ public me.lucko.spark.common.sampler.Sampler create(ProfilerConfiguration config sampler = new JavaSampler(platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); } // set activeSampler to null when complete. - sampler.getFuture().whenCompleteAsync((s, throwable) -> { + sampler.onCompleted().whenCompleteAsync((s, throwable) -> { if (sampler == this.active) { this.active = null; } @@ -110,7 +109,9 @@ public void clearAndStop() { } } - private static long computeTimeout(Duration duration) { + private static long computeTimeout(@Nullable Duration duration) { + if (duration == null) + return -1; return System.currentTimeMillis() + duration.toMillis(); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index 49a6bd68..b2707b6b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -21,29 +21,21 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import java.util.Comparator; -import java.util.concurrent.CompletableFuture; /** * Abstract superinterface for all sampler implementations. */ public interface Sampler extends Profiler.Sampler { - /** - * Gets a future to encapsulate the completion of the sampler - * - * @return a future - */ - CompletableFuture getFuture(); - // Methods used to export the sampler data to the web viewer. - SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); + SamplerData toProto(SparkPlatform platform, ReportConfiguration.Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index a7aae2d2..e184afb6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -21,20 +21,18 @@ package me.lucko.spark.common.sampler.async; import com.google.common.util.concurrent.ThreadFactoryBuilder; - import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.util.ClassSourceLookup; import me.lucko.spark.common.util.TemporaryFiles; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; - import one.profiler.AsyncProfiler; import java.io.IOException; @@ -99,7 +97,7 @@ public void start() { throw new RuntimeException("Unable to create temporary output file", e); } - String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile.toString(); + String command = "start,event=" + AsyncProfilerAccess.INSTANCE.getProfilingEvent() + ",interval=" + this.interval + "us,threads,jfr,file=" + this.outputFile; if (this.threadDumper instanceof SpecificThreadDumper) { command += ",filter"; } @@ -161,7 +159,7 @@ public void stop() { } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, ReportConfiguration.Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); aggregateOutput(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index ccda6f40..583e5a05 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -21,12 +21,11 @@ package me.lucko.spark.common.sampler.java; import com.google.common.util.concurrent.ThreadFactoryBuilder; - -import me.lucko.spark.common.SparkPlatform; -import me.lucko.spark.common.command.sender.CommandSender; -import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; @@ -124,7 +123,7 @@ public void run() { } @Override - public SamplerData toProto(SparkPlatform platform, CommandSender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, ReportConfiguration.Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup); From 114d0798afe4d148f0401481f5a24ad3717c349e Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 18 Jul 2022 17:58:50 +0300 Subject: [PATCH 09/29] Correctly clear active sampler --- .../main/java/me/lucko/spark/api/Spark.java | 2 ++ .../me/lucko/spark/api/profiler/Profiler.java | 3 ++- .../spark/common/sampler/AbstractSampler.java | 11 ++++++++++- .../spark/common/sampler/ProfilerService.java | 18 ++++++++---------- .../common/sampler/async/AsyncSampler.java | 7 +++++-- .../spark/common/sampler/java/JavaSampler.java | 11 +++++++---- 6 files changed, 34 insertions(+), 18 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index 266d8538..d99b7eaa 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -104,6 +104,8 @@ public interface Spark { /** * Creates a new {@link Profiler profiler}. + * Note: this method creates a new profiler every time. Each profiler can only + * manage a sampler at a time. * * @return the profiler */ diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index 0fbfb81f..4ad83488 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -33,7 +33,8 @@ import java.util.function.Consumer; /** - * A profiler used for sampling. + * A profiler used for sampling.
+ * A profiler can only manage one sampler at a time. */ public interface Profiler { /** diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index f1e2c1ca..d7eba23d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -48,12 +48,15 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.function.Consumer; /** * Base implementation class for {@link Sampler}s. */ public abstract class AbstractSampler implements Sampler { + protected final Consumer whenStopped; + /** The spark platform instance */ protected final SparkPlatform platform; @@ -78,7 +81,8 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + protected AbstractSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + this.whenStopped = whenStopped; this.platform = platform; this.interval = interval; this.threadDumper = threadDumper; @@ -106,6 +110,11 @@ protected Map getInitialGcStats() { return this.initialGcStats; } + @Override + public void stop() { + whenStopped.accept(this); + } + @Override public void start() { this.startTime = System.currentTimeMillis(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index 221eb44e..de0daaa3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -31,7 +31,6 @@ import org.checkerframework.checker.nullness.qual.Nullable; import java.time.Duration; -import java.time.temporal.ChronoUnit; import java.util.function.Consumer; public class ProfilerService implements Profiler { @@ -77,19 +76,18 @@ public me.lucko.spark.common.sampler.Sampler create(ProfilerConfiguration config final int intervalMicros = (int) (interval * 1000d); final long timeout = computeTimeout(duration); me.lucko.spark.common.sampler.Sampler sampler; + // set activeSampler to null when stopped. + final Consumer whenStopped = s -> { + if (s == this.active) + this.active = null; + }; if (minimum >= 1) { - sampler = new JavaSampler(platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.minimumTickDuration()); + sampler = new JavaSampler(whenStopped, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.minimumTickDuration()); } else if (!configuration.forceJavaSampler() && !(configuration.dumper() instanceof RegexThreadDumper) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout); + sampler = new AsyncSampler(whenStopped, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout); } else { - sampler = new JavaSampler(platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); + sampler = new JavaSampler(whenStopped, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); } - // set activeSampler to null when complete. - sampler.onCompleted().whenCompleteAsync((s, throwable) -> { - if (sampler == this.active) { - this.active = null; - } - }); return active = sampler; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index e184afb6..de444557 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -27,6 +27,7 @@ import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; +import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; @@ -44,6 +45,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import java.util.function.Predicate; /** @@ -64,8 +66,8 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for timeouts */ private ScheduledExecutorService timeoutExecutor; - public AsyncSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(platform, interval, threadDumper, endTime); + public AsyncSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + super(whenStopped, platform, interval, threadDumper, endTime); this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler(); this.dataAggregator = new AsyncDataAggregator(threadGrouper); } @@ -143,6 +145,7 @@ private void scheduleTimeout() { */ @Override public void stop() { + super.stop(); try { this.profiler.stop(); } catch (IllegalStateException e) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 583e5a05..52f146fa 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -26,6 +26,7 @@ import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; +import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; @@ -41,6 +42,7 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; /** * A sampler implementation using Java (WarmRoast). @@ -62,13 +64,13 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(platform, interval, threadDumper, endTime); + public JavaSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { + super(whenStopped, platform, interval, threadDumper, endTime); this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); } - public JavaSampler(SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(platform, interval, threadDumper, endTime); + public JavaSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(whenStopped, platform, interval, threadDumper, endTime); this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } @@ -80,6 +82,7 @@ public void start() { @Override public void stop() { + super.stop(); this.task.cancel(false); } From 2acdb6cc73d801ba12d7298c32994a73072827ce Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 18 Jul 2022 18:14:55 +0300 Subject: [PATCH 10/29] No longer use internal objects --- .../me/lucko/spark/api/profiler/Profiler.java | 9 ++++++++- .../common/command/modules/SamplerModule.java | 16 ++++++++-------- .../spark/common/sampler/ProfilerService.java | 16 +++------------- 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index 4ad83488..1dcd5c27 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -46,7 +46,14 @@ public interface Profiler { * @return the sampler, or if a validation error was caught, {@code null} */ @Nullable - Sampler create(ProfilerConfiguration configuration, Consumer errorReporter); + Sampler createSampler(ProfilerConfiguration configuration, Consumer errorReporter); + + /** + * Gets the active sampler of this profiler. + * @return the active sampler, or {@code null} if one isn't active + */ + @Nullable + Sampler activeSampler(); /** * Represents a sampler used for profiling. diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 7a78631b..088ff258 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -38,7 +38,6 @@ import me.lucko.spark.common.command.tabcomplete.CompletionSupplier; import me.lucko.spark.common.command.tabcomplete.TabCompleter; import me.lucko.spark.common.sampler.ProfilerService; -import me.lucko.spark.common.sampler.Sampler; import me.lucko.spark.common.sampler.SamplerBuilder; import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.event.ClickEvent; @@ -64,7 +63,7 @@ public class SamplerModule implements CommandModule { private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; - private final ProfilerService service; + private final Profiler service; public SamplerModule(SparkPlatform platform) { service = new ProfilerService(platform); @@ -72,7 +71,9 @@ public SamplerModule(SparkPlatform platform) { @Override public void close() { - service.clearAndStop(); + final Profiler.Sampler active = service.activeSampler(); + if (active != null) + active.stop(); } @Override @@ -189,7 +190,7 @@ private void profilerStart(SparkPlatform platform, CommandResponseHandler resp, if (ticksOver != -1) { builder.minimumTickDuration(ticksOver); } - final Sampler sampler = service.create(builder.build(), e -> resp.replyPrefixed(text(e, RED))); + final Profiler.Sampler sampler = service.createSampler(builder.build(), e -> resp.replyPrefixed(text(e, RED))); if (sampler == null) // Feedback is handled in the consumer return; @@ -231,7 +232,7 @@ private void profilerStart(SparkPlatform platform, CommandResponseHandler resp, } private void profilerInfo(CommandResponseHandler resp) { - final Sampler active = service.active(); + final Profiler.Sampler active = service.activeSampler(); if (active == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { @@ -249,7 +250,7 @@ private void profilerInfo(CommandResponseHandler resp) { } private void profilerCancel(CommandResponseHandler resp) { - if (service.active() == null) { + if (service.activeSampler() == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { close(); @@ -258,7 +259,7 @@ private void profilerCancel(CommandResponseHandler resp) { } private void profilerStop(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { - final Sampler sampler = service.active(); + final Profiler.Sampler sampler = service.activeSampler(); if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { @@ -269,7 +270,6 @@ private void profilerStop(SparkPlatform platform, CommandResponseHandler resp, A boolean sepParentCalls = arguments.boolFlag("separate-parent-calls"); boolean saveToFile = arguments.boolFlag("save-to-file"); handleUpload(platform, resp, sampler.dumpReport(configuration(resp, comment, sepParentCalls, threadOrder)), saveToFile); - service.clear(); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index de0daaa3..8d11af56 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -45,7 +45,7 @@ public ProfilerService(SparkPlatform platform) { } @Override - public me.lucko.spark.common.sampler.Sampler create(ProfilerConfiguration configuration, Consumer err) { + public me.lucko.spark.common.sampler.Sampler createSampler(ProfilerConfiguration configuration, Consumer err) { if (active != null) { err.accept("A profiler is already running!"); return null; @@ -92,20 +92,10 @@ public me.lucko.spark.common.sampler.Sampler create(ProfilerConfiguration config return active = sampler; } - public me.lucko.spark.common.sampler.Sampler active() { + @Override + public me.lucko.spark.common.sampler.Sampler activeSampler() { return active; } - public void clear() { - if (active != null) { - active = null; - } - } - public void clearAndStop() { - if (active != null) { - active.stop(); - active = null; - } - } private static long computeTimeout(@Nullable Duration duration) { if (duration == null) From 25cfe1ab45ba951f462fa62d2591c9ec44348fff Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 18 Jul 2022 19:14:34 +0300 Subject: [PATCH 11/29] Add multi-sampler support --- .../main/java/me/lucko/spark/api/Spark.java | 6 +- .../me/lucko/spark/api/profiler/Profiler.java | 28 +++++-- .../me/lucko/spark/common/api/SparkApi.java | 4 +- .../common/command/modules/SamplerModule.java | 22 +++--- .../spark/common/sampler/AbstractSampler.java | 11 +-- .../spark/common/sampler/ProfilerService.java | 73 ++++++++++++++----- .../spark/common/sampler/SamplerManager.java | 28 +++++++ .../common/sampler/async/AsyncSampler.java | 7 +- .../common/sampler/java/JavaSampler.java | 11 ++- 9 files changed, 136 insertions(+), 54 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerManager.java diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index d99b7eaa..6180fbdb 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -104,12 +104,12 @@ public interface Spark { /** * Creates a new {@link Profiler profiler}. - * Note: this method creates a new profiler every time. Each profiler can only - * manage a sampler at a time. * + * @param maxSamplers the maximum amount of active samplers the profiler can manage * @return the profiler + * @throws IllegalArgumentException if {@code maxSamplers} <= 0 */ - @NonNull Profiler profiler(); + @NonNull Profiler profiler(int maxSamplers); /** * Gets the {@link ThreadGrouper} associated with a Proto {@link DataAggregator.ThreadGrouper}. diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index 1dcd5c27..33d2774a 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -28,18 +28,20 @@ import me.lucko.spark.api.profiler.report.ProfilerReport; import me.lucko.spark.api.profiler.report.ReportConfiguration; import org.jetbrains.annotations.Nullable; +import org.jetbrains.annotations.Unmodifiable; +import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Consumer; /** - * A profiler used for sampling.
- * A profiler can only manage one sampler at a time. + * A profiler used for sampling. */ public interface Profiler { /** * Generates a new {@link Sampler}.
- * Note: the sampler is not started by default, use {@link Sampler#start()} + * Note: the sampler is not started by default, use {@link Sampler#start()}.
+ * This method is thread-safe. * * @param configuration the configuration to use for the profiler * @param errorReporter a consumer that reports any errors encountered in the creation of the sampler @@ -49,11 +51,23 @@ public interface Profiler { Sampler createSampler(ProfilerConfiguration configuration, Consumer errorReporter); /** - * Gets the active sampler of this profiler. - * @return the active sampler, or {@code null} if one isn't active + * Gets the active samplers of this profiler. + * + * @return the active samplers */ - @Nullable - Sampler activeSampler(); + @Unmodifiable List activeSamplers(); + + /** + * Gets the maximum amount of samplers managed by this profiler. + * + * @return the maximum amount of samplers + */ + int maxSamplers(); + + /** + * Stops this profiler and any {@link #activeSamplers() active children}.
+ */ + void stop(); /** * Represents a sampler used for profiling. diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index a5d1253b..f1466e25 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -211,8 +211,8 @@ public static void unregister() { } @Override - public @NonNull Profiler profiler() { - return new ProfilerService(platform); + public @NonNull Profiler profiler(int maxSamplers) { + return new ProfilerService(platform, maxSamplers); } @Override diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 088ff258..4e61f225 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -21,6 +21,7 @@ package me.lucko.spark.common.command.modules; import com.google.common.collect.Iterables; +import me.lucko.spark.api.SparkProvider; import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; @@ -63,17 +64,15 @@ public class SamplerModule implements CommandModule { private static final String SPARK_SAMPLER_MEDIA_TYPE = "application/x-spark-sampler"; - private final Profiler service; + private final Profiler profiler; public SamplerModule(SparkPlatform platform) { - service = new ProfilerService(platform); + profiler = new ProfilerService(platform, 1); } @Override public void close() { - final Profiler.Sampler active = service.activeSampler(); - if (active != null) - active.stop(); + profiler.stop(); } @Override @@ -190,7 +189,7 @@ private void profilerStart(SparkPlatform platform, CommandResponseHandler resp, if (ticksOver != -1) { builder.minimumTickDuration(ticksOver); } - final Profiler.Sampler sampler = service.createSampler(builder.build(), e -> resp.replyPrefixed(text(e, RED))); + final Profiler.Sampler sampler = profiler.createSampler(builder.build(), e -> resp.replyPrefixed(text(e, RED))); if (sampler == null) // Feedback is handled in the consumer return; @@ -232,7 +231,7 @@ private void profilerStart(SparkPlatform platform, CommandResponseHandler resp, } private void profilerInfo(CommandResponseHandler resp) { - final Profiler.Sampler active = service.activeSampler(); + final Profiler.Sampler active = activeSampler(); if (active == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { @@ -250,7 +249,7 @@ private void profilerInfo(CommandResponseHandler resp) { } private void profilerCancel(CommandResponseHandler resp) { - if (service.activeSampler() == null) { + if (activeSampler() == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { close(); @@ -259,7 +258,7 @@ private void profilerCancel(CommandResponseHandler resp) { } private void profilerStop(SparkPlatform platform, CommandResponseHandler resp, Arguments arguments) { - final Profiler.Sampler sampler = service.activeSampler(); + final Profiler.Sampler sampler = activeSampler(); if (sampler == null) { resp.replyPrefixed(text("There isn't an active profiler running.")); } else { @@ -272,6 +271,11 @@ private void profilerStop(SparkPlatform platform, CommandResponseHandler resp, A handleUpload(platform, resp, sampler.dumpReport(configuration(resp, comment, sepParentCalls, threadOrder)), saveToFile); } } + + private Profiler.Sampler activeSampler() { + if (profiler.activeSamplers().isEmpty()) return null; + return profiler.activeSamplers().get(0); + } public static String postData(SparkPlatform platform, SparkSamplerProtos.SamplerData output) throws IOException { String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index d7eba23d..4d3fc803 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -48,14 +48,14 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.function.Consumer; /** * Base implementation class for {@link Sampler}s. */ public abstract class AbstractSampler implements Sampler { - protected final Consumer whenStopped; + /** The manager associated with this sampler */ + protected final SamplerManager manager; /** The spark platform instance */ protected final SparkPlatform platform; @@ -81,8 +81,8 @@ public abstract class AbstractSampler implements Sampler { /** The garbage collector statistics when profiling started */ protected Map initialGcStats; - protected AbstractSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { - this.whenStopped = whenStopped; + protected AbstractSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, long autoEndTime) { + this.manager = manager; this.platform = platform; this.interval = interval; this.threadDumper = threadDumper; @@ -112,11 +112,12 @@ protected Map getInitialGcStats() { @Override public void stop() { - whenStopped.accept(this); + manager.markStopped(this); } @Override public void start() { + manager.markStarted(this); this.startTime = System.currentTimeMillis(); TickHook tickHook = this.platform.getTickHook(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index 8d11af56..469efabd 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -31,23 +31,38 @@ import org.checkerframework.checker.nullness.qual.Nullable; import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Consumer; -public class ProfilerService implements Profiler { +public class ProfilerService implements Profiler, SamplerManager { private final SparkPlatform platform; public static final int MINIMUM_DURATION = 10; - private me.lucko.spark.common.sampler.Sampler active; + private final int maxSamplers; + private final List active; + private final List activeView; + + public ProfilerService(SparkPlatform platform, int samplerAmount) { + if (samplerAmount <= 0) + throw new IllegalArgumentException("samplerAmount <= 0"); - public ProfilerService(SparkPlatform platform) { this.platform = platform; + this.maxSamplers = samplerAmount; + this.active = new CopyOnWriteArrayList<>(); + this.activeView = Collections.unmodifiableList(active); } @Override - public me.lucko.spark.common.sampler.Sampler createSampler(ProfilerConfiguration configuration, Consumer err) { - if (active != null) { - err.accept("A profiler is already running!"); + public Sampler createSampler(ProfilerConfiguration configuration, Consumer err) { + if (active.size() >= maxSamplers) { + if (maxSamplers == 1) { + err.accept("A profiling sampler is already running!"); + } else { + err.accept(String.format("Maximum amount of %s profiling samplers are already running!", active.size())); + } return null; } @@ -75,26 +90,36 @@ public me.lucko.spark.common.sampler.Sampler createSampler(ProfilerConfiguration final int intervalMicros = (int) (interval * 1000d); final long timeout = computeTimeout(duration); - me.lucko.spark.common.sampler.Sampler sampler; - // set activeSampler to null when stopped. - final Consumer whenStopped = s -> { - if (s == this.active) - this.active = null; - }; + + final me.lucko.spark.common.sampler.Sampler sampler; if (minimum >= 1) { - sampler = new JavaSampler(whenStopped, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.minimumTickDuration()); + sampler = new JavaSampler(this, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.minimumTickDuration()); } else if (!configuration.forceJavaSampler() && !(configuration.dumper() instanceof RegexThreadDumper) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(whenStopped, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout); + sampler = new AsyncSampler(this, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout); } else { - sampler = new JavaSampler(whenStopped, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); + sampler = new JavaSampler(this, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); } - return active = sampler; + return sampler; + } + + @Override + public List activeSamplers() { + return activeView; + } + + @Override + public int maxSamplers() { + return maxSamplers; } @Override - public me.lucko.spark.common.sampler.Sampler activeSampler() { - return active; + public void stop() { + // Prevent concurrent modifications + //noinspection ForLoopReplaceableByForEach + for (int i = 0; i < active.size(); i++) { + active.get(i).stop(); + } } private static long computeTimeout(@Nullable Duration duration) { @@ -102,4 +127,16 @@ private static long computeTimeout(@Nullable Duration duration) { return -1; return System.currentTimeMillis() + duration.toMillis(); } + + @Override + public void markStopped(Sampler sampler) { + active.remove(sampler); + } + + @Override + public void markStarted(Sampler sampler) { + if (active.size() >= maxSamplers) + throw new ArrayIndexOutOfBoundsException("Maximum amount of active samplers has been reached!"); + active.add(sampler); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerManager.java new file mode 100644 index 00000000..c2175c44 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerManager.java @@ -0,0 +1,28 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.sampler; + +import me.lucko.spark.api.profiler.Profiler; + +public interface SamplerManager { + void markStopped(Profiler.Sampler sampler); + void markStarted(Profiler.Sampler sampler); +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index de444557..0075d73a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -27,7 +27,7 @@ import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.Sampler; +import me.lucko.spark.common.sampler.SamplerManager; import me.lucko.spark.common.sampler.async.jfr.JfrReader; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; @@ -45,7 +45,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; import java.util.function.Predicate; /** @@ -66,8 +65,8 @@ public class AsyncSampler extends AbstractSampler { /** The executor used for timeouts */ private ScheduledExecutorService timeoutExecutor; - public AsyncSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { - super(whenStopped, platform, interval, threadDumper, endTime); + public AsyncSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime) { + super(manager, platform, interval, threadDumper, endTime); this.profiler = AsyncProfilerAccess.INSTANCE.getProfiler(); this.dataAggregator = new AsyncDataAggregator(threadGrouper); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index 52f146fa..b1b795d5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -26,7 +26,7 @@ import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; -import me.lucko.spark.common.sampler.Sampler; +import me.lucko.spark.common.sampler.SamplerManager; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; import me.lucko.spark.common.tick.TickHook; @@ -42,7 +42,6 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; /** * A sampler implementation using Java (WarmRoast). @@ -64,13 +63,13 @@ public class JavaSampler extends AbstractSampler implements Runnable { /** Responsible for aggregating and then outputting collected sampling data */ private final JavaDataAggregator dataAggregator; - public JavaSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { - super(whenStopped, platform, interval, threadDumper, endTime); + public JavaSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative) { + super(manager, platform, interval, threadDumper, endTime); this.dataAggregator = new SimpleDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative); } - public JavaSampler(Consumer whenStopped, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { - super(whenStopped, platform, interval, threadDumper, endTime); + public JavaSampler(SamplerManager manager, SparkPlatform platform, int interval, ThreadDumper threadDumper, ThreadGrouper threadGrouper, long endTime, boolean ignoreSleeping, boolean ignoreNative, TickHook tickHook, int tickLengthThreshold) { + super(manager, platform, interval, threadDumper, endTime); this.dataAggregator = new TickedDataAggregator(this.workerPool, threadGrouper, interval, ignoreSleeping, ignoreNative, tickHook, tickLengthThreshold); } From 7e3ff2bb52c52abff872655273e30c42e0dbbeaf Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 18 Jul 2022 19:20:14 +0300 Subject: [PATCH 12/29] Use dedicated error handler interface --- .../me/lucko/spark/api/profiler/Profiler.java | 4 +- .../me/lucko/spark/api/util/ErrorHandler.java | 57 +++++++++++++++++++ .../spark/common/sampler/ProfilerService.java | 4 +- 3 files changed, 61 insertions(+), 4 deletions(-) create mode 100644 spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index 33d2774a..83ed3c31 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -27,12 +27,12 @@ import me.lucko.spark.api.profiler.report.ProfilerReport; import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.util.ErrorHandler; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Unmodifiable; import java.util.List; import java.util.concurrent.CompletableFuture; -import java.util.function.Consumer; /** * A profiler used for sampling. @@ -48,7 +48,7 @@ public interface Profiler { * @return the sampler, or if a validation error was caught, {@code null} */ @Nullable - Sampler createSampler(ProfilerConfiguration configuration, Consumer errorReporter); + Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler errorReporter); /** * Gets the active samplers of this profiler. diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java new file mode 100644 index 00000000..8b1a389d --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java @@ -0,0 +1,57 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.util; + +import java.util.function.Function; + +/** + * Interface used for reporting errors during execution of methods. + */ +@FunctionalInterface +public interface ErrorHandler { + /** + * Accepts and reports an error. + * + * @param error the error to report + */ + void accept(String error); + + /** + * Creates an {@link ErrorHandler} that throws exceptions. + * + * @param supplier a factory to use for creating the exceptions + * @param the type of the exception + * @return the handler + */ + static ErrorHandler throwing(Function supplier) throws T { + return e -> throwAsUnchecked(supplier.apply(e)); + } + + @SuppressWarnings("unchecked") + static void throwAsUnchecked(Throwable exception) throws E { + throw (E) exception; + } +} \ No newline at end of file diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index 469efabd..d7d5e235 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -23,6 +23,7 @@ import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.ProfilerConfiguration; import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; +import me.lucko.spark.api.util.ErrorHandler; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.async.AsyncProfilerAccess; import me.lucko.spark.common.sampler.async.AsyncSampler; @@ -34,7 +35,6 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.function.Consumer; public class ProfilerService implements Profiler, SamplerManager { private final SparkPlatform platform; @@ -56,7 +56,7 @@ public ProfilerService(SparkPlatform platform, int samplerAmount) { } @Override - public Sampler createSampler(ProfilerConfiguration configuration, Consumer err) { + public Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler err) { if (active.size() >= maxSamplers) { if (maxSamplers == 1) { err.accept("A profiling sampler is already running!"); From 076238c10775a6bcf5405cd43bc944917354fa79 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 18 Jul 2022 23:27:17 +0300 Subject: [PATCH 13/29] Add heap analysis API --- .../main/java/me/lucko/spark/api/Spark.java | 8 ++ .../me/lucko/spark/api/heap/HeapAnalysis.java | 61 +++++++++++++++ .../spark/api/heap/HeapSummaryReport.java | 67 +++++++++++++++++ .../me/lucko/spark/api/profiler/Profiler.java | 8 +- .../api/profiler/report/ProfilerReport.java | 12 +++ .../profiler/report/ReportConfiguration.java | 36 +-------- .../report/ReportConfigurationBuilder.java | 7 +- .../java/me/lucko/spark/api/util/Sender.java | 69 +++++++++++++++++ spark-common/build.gradle | 1 + .../me/lucko/spark/common/api/SparkApi.java | 9 +++ .../command/modules/HeapAnalysisModule.java | 12 ++- .../common/command/sender/CommandSender.java | 6 +- .../common/heapdump/HeapAnalysisProvider.java | 75 +++++++++++++++++++ .../common/heapdump/HeapDumpSummary.java | 11 ++- .../spark/common/sampler/AbstractSampler.java | 12 ++- .../lucko/spark/common/sampler/Sampler.java | 4 +- .../common/sampler/async/AsyncSampler.java | 4 +- .../common/sampler/java/JavaSampler.java | 4 +- 18 files changed, 347 insertions(+), 59 deletions(-) create mode 100644 spark-api/src/main/java/me/lucko/spark/api/heap/HeapAnalysis.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java create mode 100644 spark-api/src/main/java/me/lucko/spark/api/util/Sender.java create mode 100644 spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index 6180fbdb..c6a551c7 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -26,6 +26,7 @@ package me.lucko.spark.api; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.heap.HeapAnalysis; import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; import me.lucko.spark.api.profiler.thread.ThreadGrouper; @@ -119,4 +120,11 @@ public interface Spark { * @throws AssertionError if the type is {@link DataAggregator.ThreadGrouper#UNRECOGNIZED unknown}. */ @NonNull ThreadGrouper getGrouper(DataAggregator.ThreadGrouper type); + + /** + * Gets a {@link HeapAnalysis} instance. + * + * @return the heap analysis instance + */ + @NonNull HeapAnalysis heapAnalysis(); } diff --git a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapAnalysis.java b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapAnalysis.java new file mode 100644 index 00000000..c8f9e92f --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapAnalysis.java @@ -0,0 +1,61 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.heap; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.api.Spark; +import me.lucko.spark.api.util.Sender; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.nio.file.Path; + +/** + * Utility interface used for heap analysis. + * + * @see Spark#heapAnalysis() + */ +public interface HeapAnalysis { + + /** + * Creates a summary of the heap. + * + * @param sender the sender of the report + * @return the report + */ + @NotNull + HeapSummaryReport summary(@Nullable Sender sender); + + /** + * Creates a heap dump at the given output path. + * + * @param outputPath the path to write the snapshot to + * @param liveOnly if true dump only live objects i.e. objects that are reachable from others + */ + @NotNull + @CanIgnoreReturnValue + Path dumpHeap(Path outputPath, boolean liveOnly) throws Exception; +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java new file mode 100644 index 00000000..5dfb480b --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.heap; + +import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.proto.SparkHeapProtos; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Represents the result of a heap summary. + * + * @see HeapAnalysis#summary() + */ +public interface HeapSummaryReport { + /** + * Uploads this report online. + * + * @return the URL of the uploaded report + */ + @NotNull + String upload() throws IOException; + + /** + * Gets the data of this report + * + * @return the data + */ + @NotNull + SparkHeapProtos.HeapData data(); + + /** + * Saves this report to a local file. + * + * @param path the path to save to + * @return the {@code path} + * @throws IOException if an exception occurred + */ + @NotNull + @CanIgnoreReturnValue + Path saveToFile(Path path) throws IOException; +} diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index 83ed3c31..a7b4b100 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -35,7 +35,10 @@ import java.util.concurrent.CompletableFuture; /** - * A profiler used for sampling. + * The base interface of profilers.
+ * Profilers monitor the activity of the JVM, using {@link Sampler samplers}. + * + * @see me.lucko.spark.api.Spark#profiler(int) */ public interface Profiler { /** @@ -55,7 +58,8 @@ public interface Profiler { * * @return the active samplers */ - @Unmodifiable List activeSamplers(); + @Unmodifiable + List activeSamplers(); /** * Gets the maximum amount of samplers managed by this profiler. diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java index 87bcf734..5f78289e 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java @@ -25,32 +25,44 @@ package me.lucko.spark.api.profiler.report; +import com.google.errorprone.annotations.CanIgnoreReturnValue; import me.lucko.spark.proto.SparkSamplerProtos; +import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.nio.file.Path; /** * Represents the result of a profiler. + * + * @see me.lucko.spark.api.profiler.Profiler.Sampler#dumpReport(ReportConfiguration) + * @see me.lucko.spark.api.profiler.Profiler.Sampler#onCompleted(ReportConfiguration) */ public interface ProfilerReport { /** * Uploads this report online. + * * @return the URL of the uploaded report */ + @NotNull String upload() throws IOException; /** * Gets the data of this report + * * @return the data */ + @NotNull SparkSamplerProtos.SamplerData data(); /** * Saves this report to a local file. + * * @param path the path to save to * @return the {@code path} * @throws IOException if an exception occurred */ + @NotNull + @CanIgnoreReturnValue Path saveToFile(Path path) throws IOException; } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java index c7d21afb..fe1b0904 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java @@ -27,11 +27,10 @@ import me.lucko.spark.api.profiler.thread.ThreadNode; import me.lucko.spark.api.profiler.thread.ThreadOrder; -import me.lucko.spark.proto.SparkProtos; +import me.lucko.spark.api.util.Sender; import org.jetbrains.annotations.Nullable; import java.util.Comparator; -import java.util.UUID; /** * Configuration for {@link ProfilerReport reports}. @@ -72,37 +71,4 @@ static ReportConfigurationBuilder builder() { @Nullable String comment(); - class Sender { - public final String name; - /** - * The UUID of the sender. May be {@code null} if it wasn't sent by a player. - */ - @Nullable - public final UUID uuid; - - public Sender(String name, @Nullable UUID uuid) { - this.name = name; - this.uuid = uuid; - } - - /** - * Checks if this sender is a player. - * @return if this sender is a player - */ - public boolean isPlayer() { - return uuid != null; - } - - public SparkProtos.CommandSenderMetadata toProto() { - SparkProtos.CommandSenderMetadata.Builder proto = SparkProtos.CommandSenderMetadata.newBuilder() - .setType(isPlayer() ? SparkProtos.CommandSenderMetadata.Type.PLAYER : SparkProtos.CommandSenderMetadata.Type.OTHER) - .setName(this.name); - - if (this.uuid != null) { - proto.setUniqueId(this.uuid.toString()); - } - - return proto.build(); - } - } } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java index 88893bdb..64d75f2d 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java @@ -27,6 +27,7 @@ import me.lucko.spark.api.profiler.thread.ThreadNode; import me.lucko.spark.api.profiler.thread.ThreadOrder; +import me.lucko.spark.api.util.Sender; import org.checkerframework.checker.nullness.qual.NonNull; import org.jetbrains.annotations.Nullable; @@ -35,7 +36,7 @@ public class ReportConfigurationBuilder { private Comparator order = ThreadOrder.BY_NAME; - private ReportConfiguration.Sender sender; + private Sender sender; private boolean separateParentCalls; private String comment; @@ -50,13 +51,13 @@ public ReportConfigurationBuilder order(@NonNull Comparator order) { return this; } - public ReportConfigurationBuilder sender(@Nullable ReportConfiguration.Sender sender) { + public ReportConfigurationBuilder sender(@Nullable Sender sender) { this.sender = sender; return this; } public ReportConfigurationBuilder sender(@NonNull String name, @Nullable UUID uuid) { - return sender(new ReportConfiguration.Sender(name, uuid)); + return sender(new Sender(name, uuid)); } public ReportConfigurationBuilder separateParentCalls(boolean separateParentCalls) { diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java b/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java new file mode 100644 index 00000000..10eb987e --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java @@ -0,0 +1,69 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.util; + +import me.lucko.spark.proto.SparkProtos; +import org.jetbrains.annotations.Nullable; + +import java.util.UUID; + +/** + * Represents a sender used for online uploading of data. + */ +public class Sender { + public final String name; + /** + * The UUID of the sender. May be {@code null} if it wasn't sent by a player. + */ + @Nullable + public final UUID uuid; + + public Sender(String name, @Nullable UUID uuid) { + this.name = name; + this.uuid = uuid; + } + + /** + * Checks if this sender is a player. + * + * @return if this sender is a player + */ + public boolean isPlayer() { + return uuid != null; + } + + public SparkProtos.CommandSenderMetadata toProto() { + SparkProtos.CommandSenderMetadata.Builder proto = SparkProtos.CommandSenderMetadata.newBuilder() + .setType(isPlayer() ? SparkProtos.CommandSenderMetadata.Type.PLAYER : SparkProtos.CommandSenderMetadata.Type.OTHER) + .setName(this.name); + + if (this.uuid != null) { + proto.setUniqueId(this.uuid.toString()); + } + + return proto.build(); + } +} diff --git a/spark-common/build.gradle b/spark-common/build.gradle index a9d836ca..3d417f4d 100644 --- a/spark-common/build.gradle +++ b/spark-common/build.gradle @@ -27,4 +27,5 @@ dependencies { compileOnly 'com.google.code.gson:gson:2.7' compileOnly 'com.google.guava:guava:19.0' compileOnly 'org.checkerframework:checker-qual:3.8.0' + compileOnly 'org.jetbrains:annotations:23.0.0' } diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index f1466e25..9d8eb1ad 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -24,6 +24,7 @@ import me.lucko.spark.api.Spark; import me.lucko.spark.api.SparkProvider; import me.lucko.spark.api.gc.GarbageCollector; +import me.lucko.spark.api.heap.HeapAnalysis; import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; import me.lucko.spark.api.profiler.thread.ThreadGrouper; @@ -32,6 +33,7 @@ import me.lucko.spark.api.statistic.types.GenericStatistic; import me.lucko.spark.api.util.StreamSupplier; import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.heapdump.HeapAnalysisProvider; import me.lucko.spark.common.monitor.cpu.CpuMonitor; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; import me.lucko.spark.common.monitor.tick.TickStatistics; @@ -68,9 +70,11 @@ public class SparkApi implements Spark { } private final SparkPlatform platform; + private final HeapAnalysis heapAnalysis; public SparkApi(SparkPlatform platform) { this.platform = platform; + heapAnalysis = new HeapAnalysisProvider(platform); } @Override @@ -297,4 +301,9 @@ public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() default: throw new AssertionError("Unknown thread grouper!"); } } + + @Override + public @NonNull HeapAnalysis heapAnalysis() { + return heapAnalysis; + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 5bd62a89..3758ba92 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -73,6 +73,11 @@ public void registerCommands(Consumer consumer) { ); } + public static String upload(SparkPlatform platform, SparkHeapProtos.HeapData output) throws IOException { + String key = platform.getBytebinClient().postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); + return platform.getViewerUrl() + key; + } + private static void heapSummary(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { if (arguments.boolFlag("run-gc-before")) { resp.broadcastPrefixed(text("Running garbage collector...")); @@ -90,17 +95,16 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co return; } - SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender); + SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender.asSender()); boolean saveToFile = false; if (arguments.boolFlag("save-to-file")) { saveToFile = true; } else { try { - String key = platform.getBytebinClient().postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); - String url = platform.getViewerUrl() + key; + final String url = upload(platform, output); - resp.broadcastPrefixed(text("Heap dump summmary output:", GOLD)); + resp.broadcastPrefixed(text("Heap dump summary output:", GOLD)); resp.broadcast(text() .content(url) .color(GRAY) diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java index 45be7f5a..476fd827 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/sender/CommandSender.java @@ -24,7 +24,7 @@ import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; -import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.proto.SparkProtos.CommandSenderMetadata; import net.kyori.adventure.text.Component; @@ -45,8 +45,8 @@ default Data toData() { return new Data(getName(), getUniqueId()); } - default ReportConfiguration.Sender asSender() { - return new ReportConfiguration.Sender(getName(), getUniqueId()); + default Sender asSender() { + return new Sender(getName(), getUniqueId()); } final class Data { diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java new file mode 100644 index 00000000..156862be --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java @@ -0,0 +1,75 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.heapdump; + +import me.lucko.spark.api.heap.HeapAnalysis; +import me.lucko.spark.api.heap.HeapSummaryReport; +import me.lucko.spark.api.util.Sender; +import me.lucko.spark.common.SparkPlatform; +import me.lucko.spark.common.command.modules.HeapAnalysisModule; +import me.lucko.spark.proto.SparkHeapProtos; +import org.checkerframework.checker.nullness.qual.NonNull; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +public class HeapAnalysisProvider implements HeapAnalysis { + private final SparkPlatform platform; + + public HeapAnalysisProvider(SparkPlatform platform) { + this.platform = platform; + } + + @Override + public @NotNull HeapSummaryReport summary(Sender sender) { + final SparkHeapProtos.HeapData data = HeapDumpSummary.createNew().toProto(platform, sender); + return new HeapSummaryReport() { + String uploadedUrl; + + @Override + @NonNull + public String upload() throws IOException { + if (uploadedUrl == null) + uploadedUrl = HeapAnalysisModule.upload(platform, data); + return uploadedUrl; + } + + @NotNull + @Override + public SparkHeapProtos.HeapData data() { + return data; + } + + @Override + public @NotNull Path saveToFile(Path path) throws IOException { + return Files.write(path, data.toByteArray()); + } + }; + } + + @Override + public @NotNull Path dumpHeap(Path outputPath, boolean liveOnly) throws Exception { + HeapDump.dumpHeap(outputPath, liveOnly); + return outputPath; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java index c0980e79..97d3fd7d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapDumpSummary.java @@ -20,12 +20,14 @@ package me.lucko.spark.common.heapdump; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.sender.CommandSender; import me.lucko.spark.proto.SparkHeapProtos.HeapData; import me.lucko.spark.proto.SparkHeapProtos.HeapEntry; import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata; +import org.jetbrains.annotations.Nullable; import org.objectweb.asm.Type; import java.lang.management.ManagementFactory; @@ -125,10 +127,13 @@ private HeapDumpSummary(List entries) { this.entries = entries; } - public HeapData toProto(SparkPlatform platform, CommandSender creator) { + public HeapData toProto(SparkPlatform platform, @Nullable Sender creator) { HeapMetadata.Builder metadata = HeapMetadata.newBuilder() - .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) - .setCreator(creator.toData().toProto()); + .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()); + if (creator != null) { + metadata.setCreator(creator.toProto()); + } + try { metadata.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null)); } catch (Exception e) { diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 4d3fc803..25a1e748 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -24,6 +24,7 @@ import me.lucko.spark.api.profiler.dumper.ThreadDumper; import me.lucko.spark.api.profiler.report.ProfilerReport; import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.modules.SamplerModule; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; @@ -40,6 +41,7 @@ import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.nio.file.Files; @@ -139,6 +141,7 @@ private ProfilerReport createReport(ReportConfiguration configuration) { String uploadedUrl; @Override + @NonNull public String upload() throws IOException { if (uploadedUrl == null) uploadedUrl = SamplerModule.postData(platform, data); @@ -146,11 +149,13 @@ public String upload() throws IOException { } @Override + @NotNull public SparkSamplerProtos.SamplerData data() { return data; } @Override + @NotNull public Path saveToFile(Path path) throws IOException { return Files.write(path, data.toByteArray()); } @@ -168,7 +173,7 @@ public CompletableFuture onCompleted() { return future; } - protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, ReportConfiguration.Sender creator, @Nullable String comment, DataAggregator dataAggregator) { + protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform platform, @org.jetbrains.annotations.Nullable Sender creator, @Nullable String comment, DataAggregator dataAggregator) { SamplerMetadata.Builder metadata = SamplerMetadata.newBuilder() .setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto()) .setStartTime(this.startTime) @@ -177,8 +182,9 @@ protected void writeMetadataToProto(SamplerData.Builder proto, SparkPlatform pla .setThreadDumper(this.threadDumper.getMetadata()) .setDataAggregator(dataAggregator.getMetadata()); - if (creator != null) - metadata.setCreator(creator.toProto()); + if (creator != null) { + metadata.setCreator(creator.toProto()); + } if (comment != null) { metadata.setComment(comment); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java index b2707b6b..7a8e4f17 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/Sampler.java @@ -21,7 +21,7 @@ package me.lucko.spark.common.sampler; import me.lucko.spark.api.profiler.Profiler; -import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.node.MergeMode; import me.lucko.spark.common.sampler.node.ThreadNode; @@ -36,6 +36,6 @@ public interface Sampler extends Profiler.Sampler { // Methods used to export the sampler data to the web viewer. - SamplerData toProto(SparkPlatform platform, ReportConfiguration.Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); + SamplerData toProto(SparkPlatform platform, Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 0075d73a..ba76f4a6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -23,8 +23,8 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; import me.lucko.spark.api.profiler.dumper.ThreadDumper; -import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.SamplerManager; @@ -161,7 +161,7 @@ public void stop() { } @Override - public SamplerData toProto(SparkPlatform platform, ReportConfiguration.Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); aggregateOutput(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java index b1b795d5..8b235674 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/java/JavaSampler.java @@ -22,8 +22,8 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import me.lucko.spark.api.profiler.dumper.ThreadDumper; -import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.sampler.AbstractSampler; import me.lucko.spark.common.sampler.SamplerManager; @@ -125,7 +125,7 @@ public void run() { } @Override - public SamplerData toProto(SparkPlatform platform, ReportConfiguration.Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { + public SamplerData toProto(SparkPlatform platform, Sender creator, Comparator outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) { SamplerData.Builder proto = SamplerData.newBuilder(); writeMetadataToProto(proto, platform, creator, comment, this.dataAggregator); writeDataToProto(proto, this.dataAggregator, outputOrder, mergeMode, classSourceLookup); From d36505e0c62d5170e89ddf0ed3c12bb8beaa02e3 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 18 Jul 2022 23:39:33 +0300 Subject: [PATCH 14/29] Use link tags --- .../java/me/lucko/spark/api/profiler/ProfilerConfiguration.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java index 4543b293..c83d4e8d 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java @@ -94,7 +94,7 @@ static ProfilerConfigurationBuilder builder() { ThreadDumper dumper(); /** - * Get the choice of which thread grouper (AS_ONE, BY_NAME, BY_POOL) to use for this profiler. + * Get the choice of which thread grouper ({@link ThreadGrouper#AS_ONE}, {@link ThreadGrouper#BY_NAME}, {@link ThreadGrouper#BY_POOL}) to use for this profiler. * If the grouper is null, BY_POOL is used. * * @return the thread grouper choice From 10f62ea0d17cee27c3b88ead17bfc773c65735b6 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sat, 23 Jul 2022 14:35:27 +0300 Subject: [PATCH 15/29] Fix javadoc --- spark-api/src/main/java/me/lucko/spark/api/Spark.java | 2 +- .../main/java/me/lucko/spark/api/heap/HeapSummaryReport.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index c6a551c7..b24ca324 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -108,7 +108,7 @@ public interface Spark { * * @param maxSamplers the maximum amount of active samplers the profiler can manage * @return the profiler - * @throws IllegalArgumentException if {@code maxSamplers} <= 0 + * @throws IllegalArgumentException if {@code maxSamplers <= 0} */ @NonNull Profiler profiler(int maxSamplers); diff --git a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java index 5dfb480b..f0724b26 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java +++ b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java @@ -26,6 +26,7 @@ package me.lucko.spark.api.heap; import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.api.util.Sender; import me.lucko.spark.proto.SparkHeapProtos; import org.jetbrains.annotations.NotNull; @@ -35,7 +36,7 @@ /** * Represents the result of a heap summary. * - * @see HeapAnalysis#summary() + * @see HeapAnalysis#summary(Sender) */ public interface HeapSummaryReport { /** From f63d495ded86742199a8a0232594d18aa4275028 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Mon, 25 Jul 2022 17:41:28 +0300 Subject: [PATCH 16/29] Update mods.toml --- spark-forge/src/main/resources/META-INF/mods.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/spark-forge/src/main/resources/META-INF/mods.toml b/spark-forge/src/main/resources/META-INF/mods.toml index e892e24f..f610d335 100644 --- a/spark-forge/src/main/resources/META-INF/mods.toml +++ b/spark-forge/src/main/resources/META-INF/mods.toml @@ -15,3 +15,9 @@ description="${pluginDescription}" versionRange="[34,)" ordering="NONE" side="BOTH" +[[dependencies.spark]] + modId="minecraft" + mandatory=true + versionRange="[1.19,)" + ordering="NONE" + side="BOTH" \ No newline at end of file From 88604b9f9f84b63a0741414b4ad01cc8664cf713 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Tue, 26 Jul 2022 16:06:22 +0300 Subject: [PATCH 17/29] Expose ping monitoring --- .../main/java/me/lucko/spark/api/Spark.java | 9 +++ .../lucko/spark/api/ping/PingStatistics.java | 63 +++++++++++++++++ .../me/lucko/spark/api}/ping/PingSummary.java | 29 ++++---- .../me/lucko/spark/api/ping/PlayerPing.java | 67 +++++++++++++++++++ .../me/lucko/spark/common/api/SparkApi.java | 6 ++ .../common/command/modules/HealthModule.java | 5 +- .../common/monitor/ping/PingStatistics.java | 34 +++++----- 7 files changed, 180 insertions(+), 33 deletions(-) create mode 100644 spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java rename {spark-common/src/main/java/me/lucko/spark/common/monitor => spark-api/src/main/java/me/lucko/spark/api}/ping/PingSummary.java (53%) create mode 100644 spark-api/src/main/java/me/lucko/spark/api/ping/PlayerPing.java diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index b24ca324..7c5dc95a 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -27,6 +27,7 @@ import me.lucko.spark.api.gc.GarbageCollector; import me.lucko.spark.api.heap.HeapAnalysis; +import me.lucko.spark.api.ping.PingStatistics; import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; import me.lucko.spark.api.profiler.thread.ThreadGrouper; @@ -47,6 +48,7 @@ /** * The spark API. + * @see SparkProvider#get() */ public interface Spark { @@ -127,4 +129,11 @@ public interface Spark { * @return the heap analysis instance */ @NonNull HeapAnalysis heapAnalysis(); + + /** + * Gets a {@link PingStatistics} instance. + * + * @return the ping statistics instance, or {@code null} if the platform cannot provide that info + */ + @Nullable PingStatistics ping(); } diff --git a/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java b/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java new file mode 100644 index 00000000..d24fb5a9 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java @@ -0,0 +1,63 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.ping; + +import me.lucko.spark.api.Spark; +import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.NotNull; + +/** + * Utility interface used for ping analysis. + * + * @see Spark#ping() + */ +public interface PingStatistics { + /** + * Queries a summary of current player pings. + * + * @return a summary of current pings + */ + @NotNull + PingSummary summary(); + + /** + * Gets the ping average. + * + * @return the average + */ + @NotNull + DoubleAverageInfo average(); + + /** + * Queries the ping of a given player. + * + * @param playerName the name of the player + * @return the ping, if available + */ + @Nullable + PlayerPing query(String playerName); +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java b/spark-api/src/main/java/me/lucko/spark/api/ping/PingSummary.java similarity index 53% rename from spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java rename to spark-api/src/main/java/me/lucko/spark/api/ping/PingSummary.java index 024d27d9..7d3723a4 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingSummary.java +++ b/spark-api/src/main/java/me/lucko/spark/api/ping/PingSummary.java @@ -1,24 +1,29 @@ /* - * This file is part of spark. + * This file is part of spark, licensed under the MIT License. * * Copyright (c) lucko (Luck) * Copyright (c) contributors * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. */ -package me.lucko.spark.common.monitor.ping; +package me.lucko.spark.api.ping; import java.util.Arrays; diff --git a/spark-api/src/main/java/me/lucko/spark/api/ping/PlayerPing.java b/spark-api/src/main/java/me/lucko/spark/api/ping/PlayerPing.java new file mode 100644 index 00000000..3c1027d8 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/ping/PlayerPing.java @@ -0,0 +1,67 @@ +/* + * This file is part of spark, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.spark.api.ping; + +import java.util.Objects; + +public final class PlayerPing { + private final String name; + private final int ping; + + public PlayerPing(String name, int ping) { + this.name = name; + this.ping = ping; + } + + public String name() { + return this.name; + } + + public int ping() { + return this.ping; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PlayerPing that = (PlayerPing) o; + return ping == that.ping && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name, ping); + } + + @Override + public String toString() { + return "PlayerPing{" + + "name='" + name + '\'' + + ", ping=" + ping + + '}'; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 9d8eb1ad..38290153 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -25,6 +25,7 @@ import me.lucko.spark.api.SparkProvider; import me.lucko.spark.api.gc.GarbageCollector; import me.lucko.spark.api.heap.HeapAnalysis; +import me.lucko.spark.api.ping.PingStatistics; import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.ProfilerConfigurationBuilder; import me.lucko.spark.api.profiler.thread.ThreadGrouper; @@ -306,4 +307,9 @@ public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() public @NonNull HeapAnalysis heapAnalysis() { return heapAnalysis; } + + @Override + public @Nullable PingStatistics ping() { + return platform.getPingStatistics(); + } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java index 16eadc8a..a34d3398 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.command.modules; +import me.lucko.spark.api.ping.PlayerPing; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.Arguments; import me.lucko.spark.common.command.Command; @@ -33,7 +34,7 @@ import me.lucko.spark.common.monitor.net.NetworkInterfaceAverages; import me.lucko.spark.common.monitor.net.NetworkMonitor; import me.lucko.spark.common.monitor.ping.PingStatistics; -import me.lucko.spark.common.monitor.ping.PingSummary; +import me.lucko.spark.api.ping.PingSummary; import me.lucko.spark.common.monitor.tick.TickStatistics; import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.common.util.RollingAverage; @@ -150,7 +151,7 @@ private static void ping(SparkPlatform platform, CommandSender sender, CommandRe Set players = arguments.stringFlag("player"); if (!players.isEmpty()) { for (String player : players) { - PingStatistics.PlayerPing playerPing = pingStatistics.query(player); + PlayerPing playerPing = pingStatistics.query(player); if (playerPing == null) { resp.replyPrefixed(text("Ping data is not available for '" + player + "'.")); } else { diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java index 49fcbe1b..5e8c14b7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java @@ -20,10 +20,14 @@ package me.lucko.spark.common.monitor.ping; +import me.lucko.spark.api.ping.PingSummary; +import me.lucko.spark.api.ping.PlayerPing; +import me.lucko.spark.api.statistic.misc.DoubleAverageInfo; import me.lucko.spark.common.monitor.MonitoringExecutor; import me.lucko.spark.common.util.RollingAverage; import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.NotNull; import java.math.BigDecimal; import java.util.Map; @@ -33,7 +37,7 @@ /** * Provides statistics for player ping RTT to the server. */ -public final class PingStatistics implements Runnable, AutoCloseable { +public final class PingStatistics implements Runnable, AutoCloseable, me.lucko.spark.api.ping.PingStatistics { private static final int QUERY_RATE_SECONDS = 10; private static final int WINDOW_SIZE_SECONDS = (int) TimeUnit.MINUTES.toSeconds(15); // 900 private static final int WINDOW_SIZE = WINDOW_SIZE_SECONDS / QUERY_RATE_SECONDS; // 90 @@ -100,6 +104,16 @@ public PingSummary currentSummary() { : new PingSummary(values); } + @Override + public @NotNull PingSummary summary() { + return currentSummary(); + } + + @Override + public @NotNull DoubleAverageInfo average() { + return getPingAverage(); + } + /** * Queries the ping of a given player. * @@ -128,22 +142,4 @@ public PingSummary currentSummary() { return null; } - public static final class PlayerPing { - private final String name; - private final int ping; - - PlayerPing(String name, int ping) { - this.name = name; - this.ping = ping; - } - - public String name() { - return this.name; - } - - public int ping() { - return this.ping; - } - } - } From d6db984bc6b935fbadf74c201e99c89cb3cb5259 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Thu, 28 Jul 2022 11:35:39 +0300 Subject: [PATCH 18/29] Rename some methods --- inquisition.gradle | 60 +++++++++++++++++++ .../main/java/me/lucko/spark/api/Spark.java | 25 ++++---- .../lucko/spark/api/ping/PingStatistics.java | 4 +- .../api/profiler/ProfilerConfiguration.java | 10 ++-- .../profiler/report/ReportConfiguration.java | 11 +++- .../report/ReportConfigurationBuilder.java | 10 +++- .../api/profiler/thread/ThreadGrouper.java | 6 +- .../me/lucko/spark/common/api/SparkApi.java | 2 +- .../common/monitor/ping/PingStatistics.java | 4 +- .../spark/common/sampler/AbstractSampler.java | 2 +- .../spark/common/sampler/ProfilerService.java | 14 ++--- .../spark/common/sampler/SamplerBuilder.java | 10 ++-- 12 files changed, 116 insertions(+), 42 deletions(-) create mode 100644 inquisition.gradle diff --git a/inquisition.gradle b/inquisition.gradle new file mode 100644 index 00000000..e4403001 --- /dev/null +++ b/inquisition.gradle @@ -0,0 +1,60 @@ +final def cfg = { Project proj -> + proj.afterEvaluate { + proj.publishing { + repositories { + maven { + name = 'inquisition' + url = 'https://maven.moddinginquisition.org/snapshots' + credentials { + username = findProperty('inquisitionMavenUser') ?: '' + password = findProperty('inquisitionMavenPassword') ?: '' + } + } + } + } + } +} +cfg.call(project(':spark-proto')) +cfg.call(project(':spark-api')) +project(':spark-forge').apply plugin: 'maven-publish' + +project(':spark-forge').afterEvaluate { Project proj -> + proj.group = 'me.lucko.spark' + def actualVersion = "$api_version" + proj.shadowJar { + archiveName = "spark-forge-${actualVersion}.jar" + configurations = [project.configurations.shade] + classifier '' + finalizedBy('reobfShadowJar') + } + proj.jar { + classifier 'lite' + } + proj.publishing { + repositories { + maven { + name = 'inquisition' + url = 'https://maven.moddinginquisition.org/snapshots' + credentials { + username = findProperty('inquisitionMavenUser') ?: '' + password = findProperty('inquisitionMavenPassword') ?: '' + } + } + } + publications { + mavenJava(MavenPublication) { + artifacts = [ + proj.shadowJar + ] + group = 'me.lucko.spark' + artifactId = 'spark-forge' + version = actualVersion + pom { + name = 'spark' + description = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' + url = 'https://spark.lucko.me/' + } + } + } + } +} \ No newline at end of file diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index 7c5dc95a..d098cd80 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -38,6 +38,7 @@ import me.lucko.spark.proto.SparkSamplerProtos.SamplerMetadata.DataAggregator; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; +import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.Unmodifiable; import java.util.Map; @@ -114,15 +115,6 @@ public interface Spark { */ @NonNull Profiler profiler(int maxSamplers); - /** - * Gets the {@link ThreadGrouper} associated with a Proto {@link DataAggregator.ThreadGrouper}. - * - * @param type the Proto type - * @return the grouper - * @throws AssertionError if the type is {@link DataAggregator.ThreadGrouper#UNRECOGNIZED unknown}. - */ - @NonNull ThreadGrouper getGrouper(DataAggregator.ThreadGrouper type); - /** * Gets a {@link HeapAnalysis} instance. * @@ -136,4 +128,17 @@ public interface Spark { * @return the ping statistics instance, or {@code null} if the platform cannot provide that info */ @Nullable PingStatistics ping(); -} + + /** + * Gets the {@link ThreadGrouper} associated with a Proto {@link DataAggregator.ThreadGrouper}. + * + * @param type the Proto type + * @return the grouper + * @see ThreadGrouper#BY_POOL + * @see ThreadGrouper#BY_NAME + * @see ThreadGrouper#AS_ONE + * @throws AssertionError if the type is {@link DataAggregator.ThreadGrouper#UNRECOGNIZED unknown}. + */ + @ApiStatus.Internal + @NonNull ThreadGrouper grouper(DataAggregator.ThreadGrouper type); +} \ No newline at end of file diff --git a/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java b/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java index d24fb5a9..8e61e35f 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java +++ b/spark-api/src/main/java/me/lucko/spark/api/ping/PingStatistics.java @@ -42,7 +42,7 @@ public interface PingStatistics { * @return a summary of current pings */ @NotNull - PingSummary summary(); + PingSummary getSummary(); /** * Gets the ping average. @@ -50,7 +50,7 @@ public interface PingStatistics { * @return the average */ @NotNull - DoubleAverageInfo average(); + DoubleAverageInfo getAverage(); /** * Queries the ping of a given player. diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java index c83d4e8d..54febe77 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java @@ -45,7 +45,7 @@ static ProfilerConfigurationBuilder builder() { * * @return the sample interval */ - double interval(); + double getInterval(); /** * Get if sleeping threads should be ignored. @@ -74,7 +74,7 @@ static ProfilerConfigurationBuilder builder() { * * @return the minimum tick duration */ - int minimumTickDuration(); + int getMinimumTickDuration(); /** * Get how long the profiler should run, if the duration is null, the profiler runs indefinite. @@ -82,7 +82,7 @@ static ProfilerConfigurationBuilder builder() { * @return duration of the profile or null if indefinite */ @Nullable - Duration duration(); + Duration getDuration(); /** * Get the choice of which dumper to use (i.e. ALL, Regex or Specific). @@ -91,7 +91,7 @@ static ProfilerConfigurationBuilder builder() { * @return the thread dumper choice */ @Nullable - ThreadDumper dumper(); + ThreadDumper getDumper(); /** * Get the choice of which thread grouper ({@link ThreadGrouper#AS_ONE}, {@link ThreadGrouper#BY_NAME}, {@link ThreadGrouper#BY_POOL}) to use for this profiler. @@ -100,5 +100,5 @@ static ProfilerConfigurationBuilder builder() { * @return the thread grouper choice */ @Nullable - ThreadGrouper grouper(); + ThreadGrouper getGrouper(); } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java index fe1b0904..f21d3bd5 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java @@ -28,6 +28,7 @@ import me.lucko.spark.api.profiler.thread.ThreadNode; import me.lucko.spark.api.profiler.thread.ThreadOrder; import me.lucko.spark.api.util.Sender; +import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Comparator; @@ -40,13 +41,17 @@ static ReportConfigurationBuilder builder() { return new ReportConfigurationBuilder(); } + static ReportConfiguration onlySender(@NotNull Sender sender) { + return builder().sender(sender).build(); + } + /** * Gets the ordering used by the report. * * @return the ordering used by the report * @see ThreadOrder */ - Comparator threadOrder(); + Comparator getThreadOrder(); /** * Gets the sender of the report @@ -54,7 +59,7 @@ static ReportConfigurationBuilder builder() { * @return the report's sender, or else {@code null} */ @Nullable - Sender sender(); + Sender getSender(); /** * If the thread viewer should separate parent calls. @@ -69,6 +74,6 @@ static ReportConfigurationBuilder builder() { * @return the report's comment */ @Nullable - String comment(); + String getComment(); } diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java index 64d75f2d..666ecceb 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfigurationBuilder.java @@ -56,6 +56,10 @@ public ReportConfigurationBuilder sender(@Nullable Sender sender) { return this; } + public ReportConfigurationBuilder sender(@NonNull String name) { + return sender(new Sender(name, null)); + } + public ReportConfigurationBuilder sender(@NonNull String name, @Nullable UUID uuid) { return sender(new Sender(name, uuid)); } @@ -73,12 +77,12 @@ public ReportConfigurationBuilder comment(@Nullable String comment) { public ReportConfiguration build() { return new ReportConfiguration() { @Override - public Comparator threadOrder() { + public Comparator getThreadOrder() { return order; } @Override - public @Nullable Sender sender() { + public @Nullable Sender getSender() { return sender; } @@ -88,7 +92,7 @@ public boolean separateParentCalls() { } @Override - public @Nullable String comment() { + public @Nullable String getComment() { return comment; } }; diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java index ab5d765c..bda1ca52 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/thread/ThreadGrouper.java @@ -36,7 +36,7 @@ public interface ThreadGrouper { /** * Implementation of {@link ThreadGrouper} that just groups by thread name. */ - ThreadGrouper BY_NAME = SparkProvider.get().getGrouper(DataAggregator.ThreadGrouper.BY_NAME); + ThreadGrouper BY_NAME = SparkProvider.get().grouper(DataAggregator.ThreadGrouper.BY_NAME); /** * Implementation of {@link ThreadGrouper} that attempts to group by the name of the pool @@ -45,13 +45,13 @@ public interface ThreadGrouper { *

The regex pattern used to match pools expects a digit at the end of the thread name, * separated from the pool name with any of one or more of ' ', '-', or '#'.

*/ - ThreadGrouper BY_POOL = SparkProvider.get().getGrouper(DataAggregator.ThreadGrouper.BY_POOL); + ThreadGrouper BY_POOL = SparkProvider.get().grouper(DataAggregator.ThreadGrouper.BY_POOL); /** * Implementation of {@link ThreadGrouper} which groups all threads as one, under * the name "All". */ - ThreadGrouper AS_ONE = SparkProvider.get().getGrouper(DataAggregator.ThreadGrouper.AS_ONE); + ThreadGrouper AS_ONE = SparkProvider.get().grouper(DataAggregator.ThreadGrouper.AS_ONE); /** * Gets the group for the given thread. diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 38290153..8252bc1b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -221,7 +221,7 @@ public static void unregister() { } @Override - public @NonNull ThreadGrouper getGrouper(SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper type) { + public @NonNull ThreadGrouper grouper(SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper type) { switch (type) { case AS_ONE: return new ThreadGrouper() { private final Set seen = ConcurrentHashMap.newKeySet(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java index 5e8c14b7..2138804d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java +++ b/spark-common/src/main/java/me/lucko/spark/common/monitor/ping/PingStatistics.java @@ -105,12 +105,12 @@ public PingSummary currentSummary() { } @Override - public @NotNull PingSummary summary() { + public @NotNull PingSummary getSummary() { return currentSummary(); } @Override - public @NotNull DoubleAverageInfo average() { + public @NotNull DoubleAverageInfo getAverage() { return getPingAverage(); } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 25a1e748..0e5048eb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -136,7 +136,7 @@ public ProfilerReport dumpReport(ReportConfiguration configuration) { private ProfilerReport createReport(ReportConfiguration configuration) { final MethodDisambiguator methodDisambiguator = new MethodDisambiguator(); return new ProfilerReport() { - final SparkSamplerProtos.SamplerData data = toProto(platform, configuration.sender(), configuration.threadOrder()::compare, configuration.comment(), configuration.separateParentCalls() ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator), platform.createClassSourceLookup()); + final SparkSamplerProtos.SamplerData data = toProto(platform, configuration.getSender(), configuration.getThreadOrder()::compare, configuration.getComment(), configuration.separateParentCalls() ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator), platform.createClassSourceLookup()); String uploadedUrl; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index d7d5e235..1f1be413 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -66,20 +66,20 @@ public Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler e return null; } - Duration duration = configuration.duration(); + Duration duration = configuration.getDuration(); if (duration != null && duration.getSeconds() < MINIMUM_DURATION) { err.accept("A profiler needs to run for at least " + MINIMUM_DURATION + " seconds!"); return null; } - double interval = configuration.interval(); + double interval = configuration.getInterval(); if (interval <= 0) { err.accept("Cannot run profiler with negative interval."); return null; } TickHook hook = null; - int minimum = configuration.minimumTickDuration(); + int minimum = configuration.getMinimumTickDuration(); if (minimum >= 0) { hook = platform.getTickHook(); if (hook == null) { @@ -93,11 +93,11 @@ public Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler e final me.lucko.spark.common.sampler.Sampler sampler; if (minimum >= 1) { - sampler = new JavaSampler(this, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.minimumTickDuration()); - } else if (!configuration.forceJavaSampler() && !(configuration.dumper() instanceof RegexThreadDumper) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { - sampler = new AsyncSampler(this, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout); + sampler = new JavaSampler(this, platform, intervalMicros, configuration.getDumper(), configuration.getGrouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative(), hook, configuration.getMinimumTickDuration()); + } else if (!configuration.forceJavaSampler() && !(configuration.getDumper() instanceof RegexThreadDumper) && AsyncProfilerAccess.INSTANCE.checkSupported(platform)) { + sampler = new AsyncSampler(this, platform, intervalMicros, configuration.getDumper(), configuration.getGrouper(), timeout); } else { - sampler = new JavaSampler(this, platform, intervalMicros, configuration.dumper(), configuration.grouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); + sampler = new JavaSampler(this, platform, intervalMicros, configuration.getDumper(), configuration.getGrouper(), timeout, configuration.ignoreSleeping(), configuration.ignoreNative()); } return sampler; diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java index 1fd1d96c..7ef61359 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/SamplerBuilder.java @@ -128,7 +128,7 @@ public SamplerBuilder forceJavaSampler() { public ProfilerConfiguration build() { return new ProfilerConfiguration() { @Override - public double interval() { + public double getInterval() { return samplingInterval; } @@ -148,22 +148,22 @@ public boolean forceJavaSampler() { } @Override - public int minimumTickDuration() { + public int getMinimumTickDuration() { return minimumTickDuration; } @Override - public @Nullable Duration duration() { + public @Nullable Duration getDuration() { return duration; } @Override - public @Nullable ThreadDumper dumper() { + public @Nullable ThreadDumper getDumper() { return threadDumper; } @Override - public @Nullable ThreadGrouper grouper() { + public @Nullable ThreadGrouper getGrouper() { return threadGrouper; } }; From 822e8b539c636c5f84b40b4bd7b10877489696c6 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Thu, 28 Jul 2022 12:17:44 +0300 Subject: [PATCH 19/29] Remove file --- inquisition.gradle | 60 ---------------------------------------------- 1 file changed, 60 deletions(-) delete mode 100644 inquisition.gradle diff --git a/inquisition.gradle b/inquisition.gradle deleted file mode 100644 index e4403001..00000000 --- a/inquisition.gradle +++ /dev/null @@ -1,60 +0,0 @@ -final def cfg = { Project proj -> - proj.afterEvaluate { - proj.publishing { - repositories { - maven { - name = 'inquisition' - url = 'https://maven.moddinginquisition.org/snapshots' - credentials { - username = findProperty('inquisitionMavenUser') ?: '' - password = findProperty('inquisitionMavenPassword') ?: '' - } - } - } - } - } -} -cfg.call(project(':spark-proto')) -cfg.call(project(':spark-api')) -project(':spark-forge').apply plugin: 'maven-publish' - -project(':spark-forge').afterEvaluate { Project proj -> - proj.group = 'me.lucko.spark' - def actualVersion = "$api_version" - proj.shadowJar { - archiveName = "spark-forge-${actualVersion}.jar" - configurations = [project.configurations.shade] - classifier '' - finalizedBy('reobfShadowJar') - } - proj.jar { - classifier 'lite' - } - proj.publishing { - repositories { - maven { - name = 'inquisition' - url = 'https://maven.moddinginquisition.org/snapshots' - credentials { - username = findProperty('inquisitionMavenUser') ?: '' - password = findProperty('inquisitionMavenPassword') ?: '' - } - } - } - publications { - mavenJava(MavenPublication) { - artifacts = [ - proj.shadowJar - ] - group = 'me.lucko.spark' - artifactId = 'spark-forge' - version = actualVersion - pom { - name = 'spark' - description = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.' - url = 'https://spark.lucko.me/' - } - } - } - } -} \ No newline at end of file From 9d4c0e27f7dd9637c5db69120df02a4b6ffd015c Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Thu, 28 Jul 2022 13:00:23 +0300 Subject: [PATCH 20/29] Changes to the ERROR_HANDLER --- .../me/lucko/spark/api/profiler/Profiler.java | 11 ++++- .../me/lucko/spark/api/util/ErrorHandler.java | 43 +++++++++++++++++-- .../common/command/modules/SamplerModule.java | 4 +- .../spark/common/sampler/ProfilerService.java | 23 +++++----- 4 files changed, 62 insertions(+), 19 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index a7b4b100..b23ded06 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -70,6 +70,9 @@ public interface Profiler { /** * Stops this profiler and any {@link #activeSamplers() active children}.
+ * Note that {@link Sampler#onCompleted() completion callbacks} will not be completed. + * + * @see Sampler#stop() */ void stop(); @@ -77,13 +80,19 @@ public interface Profiler { * Represents a sampler used for profiling. */ interface Sampler { + /** + * The minimum amount of seconds a sampler may run for. + */ + int MINIMUM_DURATION = 10; + /** * Starts the sampler. */ void start(); /** - * Stops the sampler. + * Stops the sampler.
+ * Note that {@link #onCompleted() completion callbacks} will not be completed. */ void stop(); diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java index 8b1a389d..4cf34b7f 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java @@ -35,9 +35,44 @@ public interface ErrorHandler { /** * Accepts and reports an error. * - * @param error the error to report + * @param error the error to report + * @param message a detailed message of the error */ - void accept(String error); + void accept(ErrorType error, String message); + + /** + * Represents the type of an error. + * + * @see #accept(ErrorType, String) + */ + enum ErrorType { + /** + * Indicates that the maximum amount of active samplers the profiler can manage has been reached. + */ + MAX_AMOUNT_REACHED, + /** + * Indicates that the platform does not support tick counting. + */ + TICK_COUNTING_NOT_SUPPORTED, + /** + * Indicates that an invalid duration that the sampler should run for has been supplied. + * + * @see me.lucko.spark.api.profiler.Profiler.Sampler#MINIMUM_DURATION + */ + INVALID_DURATION, + + /** + * A more general error; indicates that an invalid argument for constructing the sampler has been provided.
+ * The message will include more information. + */ + INVALID_ARGUMENT, + + /** + * Represents an 'unknown' error type.
+ * The message will include more information. + */ + UNKNOWN + } /** * Creates an {@link ErrorHandler} that throws exceptions. @@ -46,8 +81,8 @@ public interface ErrorHandler { * @param the type of the exception * @return the handler */ - static ErrorHandler throwing(Function supplier) throws T { - return e -> throwAsUnchecked(supplier.apply(e)); + static ErrorHandler throwing(Function supplier) { + return (e, msg) -> throwAsUnchecked(supplier.apply(e.toString() + ": " + msg)); } @SuppressWarnings("unchecked") diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 4e61f225..047d583a 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -189,7 +189,7 @@ private void profilerStart(SparkPlatform platform, CommandResponseHandler resp, if (ticksOver != -1) { builder.minimumTickDuration(ticksOver); } - final Profiler.Sampler sampler = profiler.createSampler(builder.build(), e -> resp.replyPrefixed(text(e, RED))); + final Profiler.Sampler sampler = profiler.createSampler(builder.build(), (e, msg) -> resp.replyPrefixed(text(e.toString() + ": " + msg, RED))); if (sampler == null) // Feedback is handled in the consumer return; @@ -271,7 +271,7 @@ private void profilerStop(SparkPlatform platform, CommandResponseHandler resp, A handleUpload(platform, resp, sampler.dumpReport(configuration(resp, comment, sepParentCalls, threadOrder)), saveToFile); } } - + private Profiler.Sampler activeSampler() { if (profiler.activeSamplers().isEmpty()) return null; return profiler.activeSamplers().get(0); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java index 1f1be413..65d982b6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/ProfilerService.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.sampler; +import com.google.common.collect.Lists; import me.lucko.spark.api.profiler.Profiler; import me.lucko.spark.api.profiler.ProfilerConfiguration; import me.lucko.spark.api.profiler.dumper.RegexThreadDumper; @@ -36,11 +37,11 @@ import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; +import static me.lucko.spark.api.profiler.Profiler.Sampler.MINIMUM_DURATION; + public class ProfilerService implements Profiler, SamplerManager { private final SparkPlatform platform; - public static final int MINIMUM_DURATION = 10; - private final int maxSamplers; private final List active; private final List activeView; @@ -59,22 +60,22 @@ public ProfilerService(SparkPlatform platform, int samplerAmount) { public Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler err) { if (active.size() >= maxSamplers) { if (maxSamplers == 1) { - err.accept("A profiling sampler is already running!"); + err.accept(ErrorHandler.ErrorType.MAX_AMOUNT_REACHED, "A profiling sampler is already running!"); } else { - err.accept(String.format("Maximum amount of %s profiling samplers are already running!", active.size())); + err.accept(ErrorHandler.ErrorType.MAX_AMOUNT_REACHED, String.format("Maximum amount of %s profiling samplers are already running!", active.size())); } return null; } Duration duration = configuration.getDuration(); if (duration != null && duration.getSeconds() < MINIMUM_DURATION) { - err.accept("A profiler needs to run for at least " + MINIMUM_DURATION + " seconds!"); + err.accept(ErrorHandler.ErrorType.INVALID_DURATION, "A profiler needs to run for at least " + MINIMUM_DURATION + " seconds!"); return null; } double interval = configuration.getInterval(); if (interval <= 0) { - err.accept("Cannot run profiler with negative interval."); + err.accept(ErrorHandler.ErrorType.INVALID_ARGUMENT, "Cannot run profiler with negative interval."); return null; } @@ -83,7 +84,7 @@ public Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler e if (minimum >= 0) { hook = platform.getTickHook(); if (hook == null) { - err.accept("Tick counting is not supported!"); + err.accept(ErrorHandler.ErrorType.TICK_COUNTING_NOT_SUPPORTED, "Tick counting is not supported!"); return null; } } @@ -115,11 +116,9 @@ public int maxSamplers() { @Override public void stop() { - // Prevent concurrent modifications - //noinspection ForLoopReplaceableByForEach - for (int i = 0; i < active.size(); i++) { - active.get(i).stop(); - } + // Copy the list of active samplers before stopping them, so we make sure we stop all of them + final List copy = Lists.newArrayList(active); + copy.forEach(Sampler::stop); } private static long computeTimeout(@Nullable Duration duration) { From d6fc6470d53aeb9ed5baaa25fe560810a8db0aab Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Thu, 28 Jul 2022 14:42:46 +0300 Subject: [PATCH 21/29] Add test mod --- api-test/build.gradle | 45 ++++++ .../java/me/lucko/spark/test/SparkTest.java | 146 ++++++++++++++++++ .../src/main/resources/META-INF/mods.toml | 10 ++ gradle.properties | 2 + settings.gradle | 9 +- .../me/lucko/spark/api/SparkProvider.java | 15 ++ .../api/profiler/ProfilerConfiguration.java | 5 + .../profiler/report/ReportConfiguration.java | 7 + .../me/lucko/spark/api/util/ErrorHandler.java | 7 +- .../java/me/lucko/spark/api/util/Sender.java | 3 + .../spark/common/sampler/AbstractSampler.java | 3 + spark-forge/build.gradle | 15 +- 12 files changed, 252 insertions(+), 15 deletions(-) create mode 100644 api-test/build.gradle create mode 100644 api-test/src/main/java/me/lucko/spark/test/SparkTest.java create mode 100644 api-test/src/main/resources/META-INF/mods.toml diff --git a/api-test/build.gradle b/api-test/build.gradle new file mode 100644 index 00000000..be4c8efd --- /dev/null +++ b/api-test/build.gradle @@ -0,0 +1,45 @@ +plugins { + id 'net.minecraftforge.gradle' version '5.1.+' +} + +tasks.withType(JavaCompile) { + // override, compile targeting J17 + options.release = 17 +} + +configurations { + library + implementation.extendsFrom library +} + +minecraft.runs.all { + lazyToken('minecraft_classpath') { + configurations.library.copyRecursive().resolve().collect { it.absolutePath }.join(File.pathSeparator) + } +} + +minecraft { + mappings channel: 'official', version: '1.19' + + runs { + client { + workingDirectory project.file('run') + property 'forge.enabledGameTestNamespaces', 'sparktest' + mods { + sparktest { + source sourceSets.main + } + } + } + } +} + +dependencies { + minecraft "net.minecraftforge:forge:${rootProject.forge_version}" + + compileOnly project(':spark-api') + compileOnly project(':spark-proto') + + library project(':spark-common') + runtimeOnly project(':spark-forge') +} \ No newline at end of file diff --git a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java new file mode 100644 index 00000000..4a339a60 --- /dev/null +++ b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java @@ -0,0 +1,146 @@ +package me.lucko.spark.test; + +import com.mojang.brigadier.Command; +import com.mojang.brigadier.context.CommandContext; +import cpw.mods.modlauncher.api.LamdbaExceptionUtils; +import me.lucko.spark.api.SparkProvider; +import me.lucko.spark.api.profiler.Profiler; +import me.lucko.spark.api.profiler.ProfilerConfiguration; +import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper; +import me.lucko.spark.api.profiler.dumper.ThreadDumper; +import me.lucko.spark.api.profiler.report.ProfilerReport; +import me.lucko.spark.api.profiler.report.ReportConfiguration; +import me.lucko.spark.api.profiler.thread.ThreadGrouper; +import me.lucko.spark.api.util.ErrorHandler; +import me.lucko.spark.proto.SparkSamplerProtos; +import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; +import net.minecraft.commands.CommandRuntimeException; +import net.minecraft.commands.CommandSourceStack; +import net.minecraft.commands.Commands; +import net.minecraft.network.chat.Component; +import net.minecraftforge.event.RegisterCommandsEvent; +import net.minecraftforge.event.server.ServerStoppingEvent; +import net.minecraftforge.eventbus.api.SubscribeEvent; +import net.minecraftforge.fml.common.Mod; +import net.minecraftforge.fml.loading.FMLPaths; +import net.minecraftforge.server.ServerLifecycleHooks; + +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Duration; + +import static net.minecraft.commands.Commands.literal; + +@Mod("sparktest") +@Mod.EventBusSubscriber +public class SparkTest { + + private static Profiler profiler; + private static Path savePath; + + public SparkTest() { + // Mod loading is parallel, so we're not assured that spark will be loaded before us + // As such, get the profiler once spark loads + SparkProvider.whenLoaded(spark -> profiler = spark.profiler(12) /* Request a profiler capable of managing 12 active samplers */); + + savePath = FMLPaths.GAMEDIR.get().resolve("sparktest"); + } + + @SubscribeEvent + static void serverStop(final ServerStoppingEvent event) { + profiler.stop(); + } + + @SubscribeEvent + static void registerCommand(final RegisterCommandsEvent event) { + event.getDispatcher().register(Commands.literal("sparktest") + .then(literal("test1") + .executes(throwingCommand(SparkTest::test1))) + .then(literal("test2") + .executes(throwingCommand(SparkTest::test2)))); + } + + private static void test1(CommandContext ctx) throws Exception { + final var source = ctx.getSource(); + source.sendFailure(Component.literal("Building sampler... stand by.")); + // Create the sampler + final Profiler.Sampler sampler = profiler.createSampler(ProfilerConfiguration.builder() + .dumper(new SpecificThreadDumper(ServerLifecycleHooks.getCurrentServer().getRunningThread())) + .grouper(ThreadGrouper.BY_NAME) + .ignoreSleeping() + .samplingInterval(12) + .forceJavaSampler() + .duration(Duration.ofSeconds(20)) + .build(), ErrorHandler.throwing(IllegalArgumentException::new)); + if (sampler == null) + return; + + sampler.start(); // Start the sampler + + source.sendSuccess(Component.literal("Started sampler. Please await the results in the next 20 seconds."), false); + + // Await sampler completion and execute callback once the sampler is completed + sampler.onCompleted(ReportConfiguration.builder() + .separateParentCalls(true).build()) + .whenComplete(LamdbaExceptionUtils.rethrowBiConsumer((report, t) -> { + final SamplerData data = report.data(); + source.sendSuccess(Component.literal("Profiling done. Profiled threads: " + data.getThreadsList() + .stream() + .map(SparkSamplerProtos.ThreadNode::getName) + .toList()), false); + final Path path = report.saveToFile(savePath.resolve("test1.sparkprofile")); + try (final var is = Files.newInputStream(path)) { + final SamplerData fromBytes = SparkSamplerProtos.SamplerData.parseFrom(is); + final var isEqual = data.equals(fromBytes); + if (isEqual) { + source.sendSuccess(Component.literal("Results from bytes and from memory are equal!"), false); + } else { + source.sendFailure(Component.literal("Results from bytes and from memory do not match!")); + } + } + })); + } + + private static void test2(final CommandContext context) throws Exception { + final var source = context.getSource(); + source.sendFailure(Component.literal("Building sampler... Please stand by.")); + // Create the sampler + final Profiler.Sampler sampler = profiler.createSampler(ProfilerConfiguration.builder() + .dumper(ThreadDumper.ALL) + .grouper(ThreadGrouper.AS_ONE) + .ignoreNative() + .build(), ErrorHandler.throwing(IllegalArgumentException::new)); + if (sampler == null) + return; + + sampler.start(); // Start the profiler + source.sendSuccess(Component.literal("Profiler started..."), true); + Thread.sleep(1000 * 5); // Wait 5 seconds + sampler.stop(); // Stop the profiler + + // Dump the report + final ProfilerReport report = sampler.dumpReport(ReportConfiguration.onlySender("My test")); + final Path saveFile = report.saveToFile(savePath.resolve("test2.sparkprofile")); // Save the report + try (final var localIs = Files.newInputStream(saveFile)) { + final SamplerData data = report.data(); + final SamplerData fromLocal = SamplerData.parseFrom(localIs); + if (data.equals(fromLocal)) { + source.sendSuccess(Component.literal("Results from local file and memory are equal!"), false); + } else { + source.sendFailure(Component.literal("Results do not match!")); + } + } + } + + private static Command throwingCommand(LamdbaExceptionUtils.Consumer_WithExceptions, Exception> consumer) { + return ctx -> { + try { + consumer.accept(ctx); + return 1; + } catch (Exception e) { + throw new CommandRuntimeException(Component.literal(e.toString())); + } + }; + } +} diff --git a/api-test/src/main/resources/META-INF/mods.toml b/api-test/src/main/resources/META-INF/mods.toml new file mode 100644 index 00000000..ed4d7b61 --- /dev/null +++ b/api-test/src/main/resources/META-INF/mods.toml @@ -0,0 +1,10 @@ +modLoader="javafml" +loaderVersion="[34,)" +authors="Luck" +license="GPLv3" + +[[mods]] +modId="sparktest" +displayName="sparktest" +version="1.0.0" +description="Spark testing" \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 8f769909..31552c8a 100644 --- a/gradle.properties +++ b/gradle.properties @@ -4,4 +4,6 @@ org.gradle.parallel=true # thanks, forge org.gradle.daemon=false +forge_version=1.19-41.0.11 + api_version=1.0.0 \ No newline at end of file diff --git a/settings.gradle b/settings.gradle index a7863270..7eaa0e55 100644 --- a/settings.gradle +++ b/settings.gradle @@ -4,6 +4,10 @@ pluginManagement { name = 'Fabric' url = 'https://maven.fabricmc.net/' } + maven { + name = 'Forge' + url = "https://maven.minecraftforge.net" + } gradlePluginPortal() } } @@ -23,5 +27,8 @@ include ( 'spark-fabric', 'spark-nukkit', 'spark-waterdog', - 'spark-minestom' + 'spark-minestom', + + // A Forge project for testing the API + 'api-test' ) diff --git a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java index 2106ed4e..e82d622f 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java +++ b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java @@ -27,11 +27,16 @@ import org.checkerframework.checker.nullness.qual.NonNull; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; + /** * Singleton provider for {@link Spark}. */ public final class SparkProvider { + private static final List> WHEN_LOADED = new CopyOnWriteArrayList<>(); private static Spark instance; /** @@ -47,9 +52,19 @@ public final class SparkProvider { return instance; } + /** + * Registers a listener called when spark is loaded. + * + * @param listener the listener + */ + public static void whenLoaded(Consumer listener) { + WHEN_LOADED.add(listener); + } + @SuppressWarnings("unused") static void set(Spark impl) { SparkProvider.instance = impl; + WHEN_LOADED.forEach(cons -> cons.accept(impl)); } private SparkProvider() { diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java index 54febe77..b9cba658 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/ProfilerConfiguration.java @@ -40,6 +40,11 @@ static ProfilerConfigurationBuilder builder() { return SparkProvider.get().configurationBuilder(); } + /** + * The default profiler configuration + */ + ProfilerConfiguration DEFAULT = builder().build(); + /** * Get the interval (in millis) of when the profiler should take samples. * diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java index f21d3bd5..2ea7938d 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ReportConfiguration.java @@ -32,6 +32,7 @@ import org.jetbrains.annotations.Nullable; import java.util.Comparator; +import java.util.UUID; /** * Configuration for {@link ProfilerReport reports}. @@ -44,6 +45,12 @@ static ReportConfigurationBuilder builder() { static ReportConfiguration onlySender(@NotNull Sender sender) { return builder().sender(sender).build(); } + static ReportConfiguration onlySender(@NotNull String sender) { + return onlySender(new Sender(sender, null)); + } + static ReportConfiguration onlySender(@NotNull String sender, @Nullable UUID uuid) { + return onlySender(new Sender(sender, uuid)); + } /** * Gets the ordering used by the report. diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java index 4cf34b7f..fb175ed6 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java @@ -25,6 +25,9 @@ package me.lucko.spark.api.util; +import org.jetbrains.annotations.ApiStatus; +import org.jetbrains.annotations.NotNull; + import java.util.function.Function; /** @@ -81,10 +84,12 @@ enum ErrorType { * @param the type of the exception * @return the handler */ - static ErrorHandler throwing(Function supplier) { + @NotNull + static ErrorHandler throwing(@NotNull Function supplier) { return (e, msg) -> throwAsUnchecked(supplier.apply(e.toString() + ": " + msg)); } + @ApiStatus.Internal @SuppressWarnings("unchecked") static void throwAsUnchecked(Throwable exception) throws E { throw (E) exception; diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java b/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java index 10eb987e..8b9dc7b8 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java +++ b/spark-api/src/main/java/me/lucko/spark/api/util/Sender.java @@ -45,6 +45,9 @@ public Sender(String name, @Nullable UUID uuid) { this.name = name; this.uuid = uuid; } + public Sender(String name) { + this(name, null); + } /** * Checks if this sender is a player. diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 0e5048eb..43498ddc 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -157,6 +157,9 @@ public SparkSamplerProtos.SamplerData data() { @Override @NotNull public Path saveToFile(Path path) throws IOException { + if (path.getParent() != null) + Files.createDirectories(path.getParent()); + Files.deleteIfExists(path); return Files.write(path, data.toByteArray()); } }; diff --git a/spark-forge/build.gradle b/spark-forge/build.gradle index 46ac08cf..ac261e4f 100644 --- a/spark-forge/build.gradle +++ b/spark-forge/build.gradle @@ -1,19 +1,8 @@ -buildscript { - repositories { - maven { url = "https://maven.minecraftforge.net" } - mavenCentral() - } - dependencies { - classpath group: 'net.minecraftforge.gradle', name: 'ForgeGradle', version: '5.1.+', changing: true - } -} - plugins { id 'com.github.johnrengelman.shadow' version '7.0.0' + id 'net.minecraftforge.gradle' version '5.1.+' } -apply plugin: 'net.minecraftforge.gradle' - tasks.withType(JavaCompile) { // override, compile targeting J17 options.release = 17 @@ -30,7 +19,7 @@ configurations { } dependencies { - minecraft 'net.minecraftforge:forge:1.19-41.0.11' + minecraft "net.minecraftforge:forge:${rootProject.forge_version}" shade project(':spark-common') } From bb37a4e4f357f701eec174a26a16fd1cb2aa90b0 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Fri, 29 Jul 2022 17:44:55 +0300 Subject: [PATCH 22/29] Fix --- .../me/lucko/spark/api/SparkProvider.java | 19 ++++++++++- .../me/lucko/spark/common/api/SparkApi.java | 32 +++++++++---------- 2 files changed, 34 insertions(+), 17 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java index e82d622f..ca41e260 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java +++ b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java @@ -37,6 +37,7 @@ public final class SparkProvider { private static final List> WHEN_LOADED = new CopyOnWriteArrayList<>(); + private static final List WHEN_UNLOADED = new CopyOnWriteArrayList<>(); private static Spark instance; /** @@ -61,10 +62,26 @@ public static void whenLoaded(Consumer listener) { WHEN_LOADED.add(listener); } + /** + * Registers a listener called when spark is unloaded. + * + * @param listener the listener + */ + public static void whenUnloaded(Runnable listener) { + WHEN_UNLOADED.add(listener); + } + @SuppressWarnings("unused") static void set(Spark impl) { SparkProvider.instance = impl; - WHEN_LOADED.forEach(cons -> cons.accept(impl)); + // If null, we are unregistered + if (impl == null) { + WHEN_UNLOADED.forEach(Runnable::run); + } + // If non-null we are registered + else { + WHEN_LOADED.forEach(cons -> cons.accept(impl)); + } } private SparkProvider() { diff --git a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java index 8252bc1b..4f382840 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java +++ b/spark-common/src/main/java/me/lucko/spark/common/api/SparkApi.java @@ -188,22 +188,6 @@ public DoubleAverageInfo poll(@NonNull MillisPerTick window) { return ImmutableMap.copyOf(map); } - public static void register(Spark spark) { - try { - SINGLETON_SET_METHOD.invoke(null, spark); - } catch (ReflectiveOperationException e) { - e.printStackTrace(); - } - } - - public static void unregister() { - try { - SINGLETON_SET_METHOD.invoke(null, new Object[]{null}); - } catch (ReflectiveOperationException e) { - e.printStackTrace(); - } - } - @Override public @NonNull StreamSupplier threadFinder() { final ThreadFinder finder = new ThreadFinder(); @@ -312,4 +296,20 @@ public SparkSamplerProtos.SamplerMetadata.DataAggregator.ThreadGrouper asProto() public @Nullable PingStatistics ping() { return platform.getPingStatistics(); } + + public static void register(Spark spark) { + try { + SINGLETON_SET_METHOD.invoke(null, spark); + } catch (ReflectiveOperationException e) { + e.printStackTrace(); + } + } + + public static void unregister() { + try { + SINGLETON_SET_METHOD.invoke(null, new Object[]{null}); + } catch (ReflectiveOperationException e) { + e.printStackTrace(); + } + } } From b52a58f323784132bac2d2d716d859e18f2766ec Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 31 Jul 2022 19:35:20 +0300 Subject: [PATCH 23/29] Add a static get method in spark --- .../src/main/java/me/lucko/spark/api/Spark.java | 12 +++++++++++- .../main/java/me/lucko/spark/api/SparkProvider.java | 2 +- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/spark-api/src/main/java/me/lucko/spark/api/Spark.java b/spark-api/src/main/java/me/lucko/spark/api/Spark.java index d098cd80..49a061f5 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/Spark.java +++ b/spark-api/src/main/java/me/lucko/spark/api/Spark.java @@ -49,10 +49,20 @@ /** * The spark API. - * @see SparkProvider#get() + * @see #get() */ public interface Spark { + /** + * Gets the singleton spark API instance. + * + * @return the spark API instance + * @see SparkProvider#get() + */ + static @NonNull Spark get() { + return SparkProvider.get(); + } + /** * Gets the CPU usage statistic for the current process. * diff --git a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java index ca41e260..91ea648a 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java +++ b/spark-api/src/main/java/me/lucko/spark/api/SparkProvider.java @@ -34,6 +34,7 @@ /** * Singleton provider for {@link Spark}. */ +@SuppressWarnings("unused") public final class SparkProvider { private static final List> WHEN_LOADED = new CopyOnWriteArrayList<>(); @@ -71,7 +72,6 @@ public static void whenUnloaded(Runnable listener) { WHEN_UNLOADED.add(listener); } - @SuppressWarnings("unused") static void set(Spark impl) { SparkProvider.instance = impl; // If null, we are unregistered From e916af5f7635ed5b7c657186f34f9deeb1f0948f Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 31 Jul 2022 19:36:20 +0300 Subject: [PATCH 24/29] Add license --- .../java/me/lucko/spark/test/SparkTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java index 4a339a60..781783e0 100644 --- a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java +++ b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java @@ -1,3 +1,23 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + package me.lucko.spark.test; import com.mojang.brigadier.Command; From d408550269143ab3d4d9f680314de77cd1d203e5 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 31 Jul 2022 19:41:11 +0300 Subject: [PATCH 25/29] ErrorHandler changes --- .../java/me/lucko/spark/test/SparkTest.java | 5 ++--- .../me/lucko/spark/api/util/ErrorHandler.java | 19 +++++++++++++++++-- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java index 781783e0..20c2a84c 100644 --- a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java +++ b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java @@ -45,7 +45,6 @@ import net.minecraftforge.fml.loading.FMLPaths; import net.minecraftforge.server.ServerLifecycleHooks; -import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; @@ -92,7 +91,7 @@ private static void test1(CommandContext ctx) throws Excepti .samplingInterval(12) .forceJavaSampler() .duration(Duration.ofSeconds(20)) - .build(), ErrorHandler.throwing(IllegalArgumentException::new)); + .build(), ErrorHandler.throwingConcat(IllegalArgumentException::new)); if (sampler == null) return; @@ -130,7 +129,7 @@ private static void test2(final CommandContext context) thro .dumper(ThreadDumper.ALL) .grouper(ThreadGrouper.AS_ONE) .ignoreNative() - .build(), ErrorHandler.throwing(IllegalArgumentException::new)); + .build(), ErrorHandler.throwingConcat(IllegalArgumentException::new)); if (sampler == null) return; diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java index fb175ed6..ffec10f2 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/util/ErrorHandler.java @@ -28,6 +28,7 @@ import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; +import java.util.function.BiFunction; import java.util.function.Function; /** @@ -85,8 +86,22 @@ enum ErrorType { * @return the handler */ @NotNull - static ErrorHandler throwing(@NotNull Function supplier) { - return (e, msg) -> throwAsUnchecked(supplier.apply(e.toString() + ": " + msg)); + static ErrorHandler throwing(@NotNull BiFunction supplier) { + return (e, msg) -> throwAsUnchecked(supplier.apply(e, msg)); + } + + /** + * Creates an {@link ErrorHandler} that throws exceptions.
+ * Note: the message passed in the {@code supplier} is obtained in the following way: + * {@code errorType + ": " + message} + * + * @param supplier a factory to use for creating the exceptions + * @param the type of the exception + * @return the handler + */ + @NotNull + static ErrorHandler throwingConcat(@NotNull Function supplier) { + return throwing((e, msg) -> supplier.apply(e + ": " + msg)); } @ApiStatus.Internal From 9409e2796e2767fc722ad3205d6891d6a4d558e6 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 31 Jul 2022 20:00:56 +0300 Subject: [PATCH 26/29] Throwing util methods --- .../java/me/lucko/spark/test/SparkTest.java | 12 ++--- .../me/lucko/spark/api/profiler/Profiler.java | 47 ++++++++++++++++++- 2 files changed, 50 insertions(+), 9 deletions(-) diff --git a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java index 20c2a84c..77e445d6 100644 --- a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java +++ b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java @@ -84,16 +84,14 @@ private static void test1(CommandContext ctx) throws Excepti final var source = ctx.getSource(); source.sendFailure(Component.literal("Building sampler... stand by.")); // Create the sampler - final Profiler.Sampler sampler = profiler.createSampler(ProfilerConfiguration.builder() + final Profiler.Sampler sampler = profiler.createSamplerThrowing(ProfilerConfiguration.builder() .dumper(new SpecificThreadDumper(ServerLifecycleHooks.getCurrentServer().getRunningThread())) .grouper(ThreadGrouper.BY_NAME) .ignoreSleeping() .samplingInterval(12) .forceJavaSampler() .duration(Duration.ofSeconds(20)) - .build(), ErrorHandler.throwingConcat(IllegalArgumentException::new)); - if (sampler == null) - return; + .build()); sampler.start(); // Start the sampler @@ -125,13 +123,11 @@ private static void test2(final CommandContext context) thro final var source = context.getSource(); source.sendFailure(Component.literal("Building sampler... Please stand by.")); // Create the sampler - final Profiler.Sampler sampler = profiler.createSampler(ProfilerConfiguration.builder() + final Profiler.Sampler sampler = profiler.createSamplerThrowing(ProfilerConfiguration.builder() .dumper(ThreadDumper.ALL) .grouper(ThreadGrouper.AS_ONE) .ignoreNative() - .build(), ErrorHandler.throwingConcat(IllegalArgumentException::new)); - if (sampler == null) - return; + .build()); sampler.start(); // Start the profiler source.sendSuccess(Component.literal("Profiler started..."), true); diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java index b23ded06..14649f4a 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/Profiler.java @@ -28,6 +28,7 @@ import me.lucko.spark.api.profiler.report.ProfilerReport; import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.util.ErrorHandler; +import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Unmodifiable; @@ -44,7 +45,12 @@ public interface Profiler { /** * Generates a new {@link Sampler}.
* Note: the sampler is not started by default, use {@link Sampler#start()}.
- * This method is thread-safe. + * This method is thread-safe.

+ * + * Contracts: + *
    + *
  • If this method returns {@code null}, then the {@code errorReporter} must have an error reported. Moreover, the error will be reported before the method finishes its execution.
  • + *
* * @param configuration the configuration to use for the profiler * @param errorReporter a consumer that reports any errors encountered in the creation of the sampler @@ -53,6 +59,19 @@ public interface Profiler { @Nullable Sampler createSampler(ProfilerConfiguration configuration, ErrorHandler errorReporter); + /** + * Generates a new {@link Sampler}, throwing a {@link CreationException} if an error occurred creating the sampler. + * @see #createSampler(ProfilerConfiguration, ErrorHandler) + * @param configuration the configuration to use for the profiler + * @return the sampler + */ + @NotNull + @SuppressWarnings("RedundantThrows") + default Sampler createSamplerThrowing(ProfilerConfiguration configuration) throws CreationException { + // noinspection ConstantConditions Exception will be thrown before method finishes execution + return createSampler(configuration, ErrorHandler.throwing(CreationException::new)); + } + /** * Gets the active samplers of this profiler. * @@ -145,4 +164,30 @@ interface Sampler { */ boolean isAsync(); } + + /** + * Represents an exception that may occur during the creation of a {@link Sampler}. + * @see #createSamplerThrowing(ProfilerConfiguration) + */ + class CreationException extends Exception { + private final ErrorHandler.ErrorType type; + + public CreationException(ErrorHandler.ErrorType type, String message) { + super(message); + this.type = type; + } + + @Override + public String getMessage() { + return type + ": " + super.getMessage(); + } + + /** + * Gets the type of this exception. + * @return the type of this exception + */ + public ErrorHandler.ErrorType getType() { + return type; + } + } } From aea22d48b345fc3a67b99dba2bee0f17c6f8659c Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 31 Jul 2022 20:56:20 +0300 Subject: [PATCH 27/29] Change upload methods to return a `UploadResult` --- .../spark/api/heap/HeapSummaryReport.java | 5 ++- .../api/profiler/report/ProfilerReport.java | 5 ++- .../me/lucko/spark/api/util/UploadResult.java | 43 +++++++++++++++++++ .../me/lucko/spark/common/SparkPlatform.java | 7 ++- .../command/modules/HeapAnalysisModule.java | 9 ++-- .../common/command/modules/SamplerModule.java | 10 +++-- .../common/heapdump/HeapAnalysisProvider.java | 11 ++--- .../spark/common/sampler/AbstractSampler.java | 11 ++--- .../lucko/spark/common/util/FormatUtil.java | 5 +++ 9 files changed, 83 insertions(+), 23 deletions(-) create mode 100644 spark-api/src/main/java/me/lucko/spark/api/util/UploadResult.java diff --git a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java index f0724b26..615b909e 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java +++ b/spark-api/src/main/java/me/lucko/spark/api/heap/HeapSummaryReport.java @@ -27,6 +27,7 @@ import com.google.errorprone.annotations.CanIgnoreReturnValue; import me.lucko.spark.api.util.Sender; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.proto.SparkHeapProtos; import org.jetbrains.annotations.NotNull; @@ -42,10 +43,10 @@ public interface HeapSummaryReport { /** * Uploads this report online. * - * @return the URL of the uploaded report + * @return the result of the upload */ @NotNull - String upload() throws IOException; + UploadResult upload() throws IOException; /** * Gets the data of this report diff --git a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java index 5f78289e..8ce1dce0 100644 --- a/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java +++ b/spark-api/src/main/java/me/lucko/spark/api/profiler/report/ProfilerReport.java @@ -26,6 +26,7 @@ package me.lucko.spark.api.profiler.report; import com.google.errorprone.annotations.CanIgnoreReturnValue; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.proto.SparkSamplerProtos; import org.jetbrains.annotations.NotNull; @@ -42,10 +43,10 @@ public interface ProfilerReport { /** * Uploads this report online. * - * @return the URL of the uploaded report + * @return the result of the upload */ @NotNull - String upload() throws IOException; + UploadResult upload() throws IOException; /** * Gets the data of this report diff --git a/spark-api/src/main/java/me/lucko/spark/api/util/UploadResult.java b/spark-api/src/main/java/me/lucko/spark/api/util/UploadResult.java new file mode 100644 index 00000000..34572629 --- /dev/null +++ b/spark-api/src/main/java/me/lucko/spark/api/util/UploadResult.java @@ -0,0 +1,43 @@ +package me.lucko.spark.api.util; + +import java.util.Objects; + +public final class UploadResult { + private final String viewerUrl, bytebinUrl; + + public UploadResult(String viewerUrl, String bytebinUrl) { + this.viewerUrl = viewerUrl; + this.bytebinUrl = bytebinUrl; + } + + /** Gets the viewer URL */ + public String getViewerUrl() { + return viewerUrl; + } + + /** Gets the Bytebin URL */ + public String getBytebinUrl() { + return bytebinUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UploadResult that = (UploadResult) o; + return Objects.equals(viewerUrl, that.viewerUrl) && Objects.equals(bytebinUrl, that.bytebinUrl); + } + + @Override + public int hashCode() { + return Objects.hash(viewerUrl, bytebinUrl); + } + + @Override + public String toString() { + return "UploadResult{" + + "viewerUrl='" + viewerUrl + '\'' + + ", bytebinUrl='" + bytebinUrl + '\'' + + '}'; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index 5d72a71d..8732ac23 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -91,6 +91,7 @@ public class SparkPlatform { private final SparkPlugin plugin; private final Configuration configuration; private final String viewerUrl; + private final String bytebinUrl; private final BytebinClient bytebinClient; private final boolean disableResponseBroadcast; private final List commandModules; @@ -112,7 +113,7 @@ public SparkPlatform(SparkPlugin plugin) { this.configuration = new Configuration(this.plugin.getPluginDirectory().resolve("config.json")); this.viewerUrl = this.configuration.getString("viewerUrl", "https://spark.lucko.me/"); - String bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/"); + this.bytebinUrl = this.configuration.getString("bytebinUrl", "https://bytebin.lucko.me/"); this.bytebinClient = new BytebinClient(bytebinUrl, "spark-plugin"); this.disableResponseBroadcast = this.configuration.getBoolean("disableResponseBroadcast", false); @@ -207,6 +208,10 @@ public String getViewerUrl() { return this.viewerUrl; } + public String getBytebinUrl() { + return this.bytebinUrl; + } + public BytebinClient getBytebinClient() { return this.bytebinClient; } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 3758ba92..b70feb12 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -20,6 +20,7 @@ package me.lucko.spark.common.command.modules; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; @@ -73,9 +74,9 @@ public void registerCommands(Consumer consumer) { ); } - public static String upload(SparkPlatform platform, SparkHeapProtos.HeapData output) throws IOException { - String key = platform.getBytebinClient().postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); - return platform.getViewerUrl() + key; + public static UploadResult upload(SparkPlatform platform, SparkHeapProtos.HeapData output) throws IOException { + final String key = platform.getBytebinClient().postContent(output, SPARK_HEAP_MEDIA_TYPE).key(); + return new UploadResult(FormatUtil.getBaseDomainUrl(platform.getViewerUrl()) + key, FormatUtil.getBaseDomainUrl(platform.getBytebinUrl()) + key); } private static void heapSummary(SparkPlatform platform, CommandSender sender, CommandResponseHandler resp, Arguments arguments) { @@ -102,7 +103,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co saveToFile = true; } else { try { - final String url = upload(platform, output); + final String url = upload(platform, output).getViewerUrl(); resp.broadcastPrefixed(text("Heap dump summary output:", GOLD)); resp.broadcast(text() diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index 047d583a..4c404220 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -30,6 +30,7 @@ import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.api.profiler.thread.ThreadOrder; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.activitylog.Activity; import me.lucko.spark.common.command.Arguments; @@ -40,6 +41,7 @@ import me.lucko.spark.common.command.tabcomplete.TabCompleter; import me.lucko.spark.common.sampler.ProfilerService; import me.lucko.spark.common.sampler.SamplerBuilder; +import me.lucko.spark.common.util.FormatUtil; import me.lucko.spark.proto.SparkSamplerProtos; import net.kyori.adventure.text.event.ClickEvent; @@ -277,9 +279,9 @@ private Profiler.Sampler activeSampler() { return profiler.activeSamplers().get(0); } - public static String postData(SparkPlatform platform, SparkSamplerProtos.SamplerData output) throws IOException { - String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); - return platform.getViewerUrl() + key; + public static UploadResult postData(SparkPlatform platform, SparkSamplerProtos.SamplerData output) throws IOException { + final String key = platform.getBytebinClient().postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key(); + return new UploadResult(FormatUtil.getBaseDomainUrl(platform.getViewerUrl()) + key, FormatUtil.getBaseDomainUrl(platform.getBytebinUrl()) + key); } private ReportConfiguration configuration(CommandResponseHandler resp, String comment, boolean separateParentCalls, ThreadOrder order) { @@ -297,7 +299,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, P saveToFile = true; } else { try { - final String url = report.upload(); + final String url = report.upload().getViewerUrl(); resp.broadcastPrefixed(text("Profiler results:", GOLD)); resp.broadcast(text() diff --git a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java index 156862be..5f02b59b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/heapdump/HeapAnalysisProvider.java @@ -23,6 +23,7 @@ import me.lucko.spark.api.heap.HeapAnalysis; import me.lucko.spark.api.heap.HeapSummaryReport; import me.lucko.spark.api.util.Sender; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.modules.HeapAnalysisModule; import me.lucko.spark.proto.SparkHeapProtos; @@ -44,14 +45,14 @@ public HeapAnalysisProvider(SparkPlatform platform) { public @NotNull HeapSummaryReport summary(Sender sender) { final SparkHeapProtos.HeapData data = HeapDumpSummary.createNew().toProto(platform, sender); return new HeapSummaryReport() { - String uploadedUrl; + UploadResult uploadResult; @Override @NonNull - public String upload() throws IOException { - if (uploadedUrl == null) - uploadedUrl = HeapAnalysisModule.upload(platform, data); - return uploadedUrl; + public UploadResult upload() throws IOException { + if (uploadResult == null) + uploadResult = HeapAnalysisModule.upload(platform, data); + return uploadResult; } @NotNull diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index 43498ddc..b9ae7709 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -25,6 +25,7 @@ import me.lucko.spark.api.profiler.report.ProfilerReport; import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.util.Sender; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.common.SparkPlatform; import me.lucko.spark.common.command.modules.SamplerModule; import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics; @@ -138,14 +139,14 @@ private ProfilerReport createReport(ReportConfiguration configuration) { return new ProfilerReport() { final SparkSamplerProtos.SamplerData data = toProto(platform, configuration.getSender(), configuration.getThreadOrder()::compare, configuration.getComment(), configuration.separateParentCalls() ? MergeMode.separateParentCalls(methodDisambiguator) : MergeMode.sameMethod(methodDisambiguator), platform.createClassSourceLookup()); - String uploadedUrl; + UploadResult uploadResult; @Override @NonNull - public String upload() throws IOException { - if (uploadedUrl == null) - uploadedUrl = SamplerModule.postData(platform, data); - return uploadedUrl; + public UploadResult upload() throws IOException { + if (uploadResult == null) + uploadResult = SamplerModule.postData(platform, data); + return uploadResult; } @Override diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java index c4a3d666..9f258ea7 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/FormatUtil.java @@ -62,4 +62,9 @@ public static Component formatBytes(long bytes, TextColor color, String suffix) .append(Component.text(unit)) .build(); } + + public static String getBaseDomainUrl(String input) { + if (input.endsWith("/")) return input; + return input + "/"; + } } From 6a2a78496a75aa57eef7639cc2ed489b26bf8308 Mon Sep 17 00:00:00 2001 From: Matyrobbrt Date: Sun, 31 Jul 2022 21:29:02 +0300 Subject: [PATCH 28/29] Update test mod to use online upload results --- .../java/me/lucko/spark/test/SparkTest.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java index 77e445d6..0ccefcad 100644 --- a/api-test/src/main/java/me/lucko/spark/test/SparkTest.java +++ b/api-test/src/main/java/me/lucko/spark/test/SparkTest.java @@ -20,6 +20,7 @@ package me.lucko.spark.test; +import com.google.protobuf.CodedInputStream; import com.mojang.brigadier.Command; import com.mojang.brigadier.context.CommandContext; import cpw.mods.modlauncher.api.LamdbaExceptionUtils; @@ -32,6 +33,7 @@ import me.lucko.spark.api.profiler.report.ReportConfiguration; import me.lucko.spark.api.profiler.thread.ThreadGrouper; import me.lucko.spark.api.util.ErrorHandler; +import me.lucko.spark.api.util.UploadResult; import me.lucko.spark.proto.SparkSamplerProtos; import me.lucko.spark.proto.SparkSamplerProtos.SamplerData; import net.minecraft.commands.CommandRuntimeException; @@ -45,6 +47,7 @@ import net.minecraftforge.fml.loading.FMLPaths; import net.minecraftforge.server.ServerLifecycleHooks; +import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; @@ -137,11 +140,18 @@ private static void test2(final CommandContext context) thro // Dump the report final ProfilerReport report = sampler.dumpReport(ReportConfiguration.onlySender("My test")); final Path saveFile = report.saveToFile(savePath.resolve("test2.sparkprofile")); // Save the report - try (final var localIs = Files.newInputStream(saveFile)) { + final UploadResult uploadResult = report.upload(); + try (final var localIs = Files.newInputStream(saveFile); + final var onlineIs = URI.create(uploadResult.getBytebinUrl()).toURL().openStream()) { final SamplerData data = report.data(); - final SamplerData fromLocal = SamplerData.parseFrom(localIs); - if (data.equals(fromLocal)) { - source.sendSuccess(Component.literal("Results from local file and memory are equal!"), false); + final CodedInputStream localCd = CodedInputStream.newInstance(localIs); + localCd.setRecursionLimit(Integer.MAX_VALUE); + final SamplerData fromLocal = SamplerData.parseFrom(localCd); + final CodedInputStream onlineCd = CodedInputStream.newInstance(onlineIs); + onlineCd.setRecursionLimit(Integer.MAX_VALUE); + final SamplerData fromOnline = SamplerData.parseFrom(onlineCd); + if (data.equals(fromLocal) && fromLocal.equals(fromOnline)) { + source.sendSuccess(Component.literal("Results from local file, memory and Bytebin are equal!"), false); } else { source.sendFailure(Component.literal("Results do not match!")); } From 2158bb9dd3cb0ba248722bcb43d6276769191f4e Mon Sep 17 00:00:00 2001 From: matyrobbrt Date: Tue, 9 Aug 2022 12:42:56 +0300 Subject: [PATCH 29/29] Update gradle.properties --- gradle.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gradle.properties b/gradle.properties index 31552c8a..92c9098e 100644 --- a/gradle.properties +++ b/gradle.properties @@ -4,6 +4,6 @@ org.gradle.parallel=true # thanks, forge org.gradle.daemon=false -forge_version=1.19-41.0.11 +forge_version=1.19.2-43.0.0 -api_version=1.0.0 \ No newline at end of file +api_version=1.0.0