Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Expose a profiler API #229

Open
wants to merge 31 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
2b2e084
Initial attempt at API
Matyrobbrt Jul 17, 2022
2e3218d
Update licenses
Matyrobbrt Jul 17, 2022
3fc7b7c
Doc updates
Matyrobbrt Jul 17, 2022
e1563b1
Change build.gradle
Matyrobbrt Jul 17, 2022
cc3d087
Merge remote-tracking branch 'upstream/master' into spark-api
Matyrobbrt Jul 17, 2022
96f40d8
Update GameThreadDumper.java
Matyrobbrt Jul 17, 2022
a15f13f
Update publishing buildscript
Matyrobbrt Jul 17, 2022
b5d027f
Fixes
Matyrobbrt Jul 17, 2022
001981b
Small tweaks
Matyrobbrt Jul 18, 2022
114d079
Correctly clear active sampler
Matyrobbrt Jul 18, 2022
2acdb6c
No longer use internal objects
Matyrobbrt Jul 18, 2022
25cfe1a
Add multi-sampler support
Matyrobbrt Jul 18, 2022
7e3ff2b
Use dedicated error handler interface
Matyrobbrt Jul 18, 2022
076238c
Add heap analysis API
Matyrobbrt Jul 18, 2022
d36505e
Use link tags
Matyrobbrt Jul 18, 2022
10f62ea
Fix javadoc
Matyrobbrt Jul 23, 2022
f63d495
Update mods.toml
Matyrobbrt Jul 25, 2022
88604b9
Expose ping monitoring
Matyrobbrt Jul 26, 2022
d6db984
Rename some methods
Matyrobbrt Jul 28, 2022
822e8b5
Remove file
Matyrobbrt Jul 28, 2022
9d4c0e2
Changes to the ERROR_HANDLER
Matyrobbrt Jul 28, 2022
d6fc647
Add test mod
Matyrobbrt Jul 28, 2022
bb37a4e
Fix
Matyrobbrt Jul 29, 2022
b52a58f
Add a static get method in spark
Matyrobbrt Jul 31, 2022
e916af5
Add license
Matyrobbrt Jul 31, 2022
d408550
ErrorHandler changes
Matyrobbrt Jul 31, 2022
9409e27
Throwing util methods
Matyrobbrt Jul 31, 2022
aea22d4
Change upload methods to return a `UploadResult`
Matyrobbrt Jul 31, 2022
6a2a784
Update test mod to use online upload results
Matyrobbrt Jul 31, 2022
daba66c
Merge branch 'master' into spark-api
Matyrobbrt Aug 9, 2022
2158bb9
Update gradle.properties
Matyrobbrt Aug 9, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions api-test/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
plugins {
id 'net.minecraftforge.gradle' version '5.1.+'
}

tasks.withType(JavaCompile) {
// override, compile targeting J17
options.release = 17
}

configurations {
library
implementation.extendsFrom library
}

minecraft.runs.all {
lazyToken('minecraft_classpath') {
configurations.library.copyRecursive().resolve().collect { it.absolutePath }.join(File.pathSeparator)
}
}

minecraft {
mappings channel: 'official', version: '1.19'

runs {
client {
workingDirectory project.file('run')
property 'forge.enabledGameTestNamespaces', 'sparktest'
mods {
sparktest {
source sourceSets.main
}
}
}
}
}

dependencies {
minecraft "net.minecraftforge:forge:${rootProject.forge_version}"

compileOnly project(':spark-api')
compileOnly project(':spark-proto')

library project(':spark-common')
runtimeOnly project(':spark-forge')
}
171 changes: 171 additions & 0 deletions api-test/src/main/java/me/lucko/spark/test/SparkTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.test;

import com.google.protobuf.CodedInputStream;
import com.mojang.brigadier.Command;
import com.mojang.brigadier.context.CommandContext;
import cpw.mods.modlauncher.api.LamdbaExceptionUtils;
import me.lucko.spark.api.SparkProvider;
import me.lucko.spark.api.profiler.Profiler;
import me.lucko.spark.api.profiler.ProfilerConfiguration;
import me.lucko.spark.api.profiler.dumper.SpecificThreadDumper;
import me.lucko.spark.api.profiler.dumper.ThreadDumper;
import me.lucko.spark.api.profiler.report.ProfilerReport;
import me.lucko.spark.api.profiler.report.ReportConfiguration;
import me.lucko.spark.api.profiler.thread.ThreadGrouper;
import me.lucko.spark.api.util.ErrorHandler;
import me.lucko.spark.api.util.UploadResult;
import me.lucko.spark.proto.SparkSamplerProtos;
import me.lucko.spark.proto.SparkSamplerProtos.SamplerData;
import net.minecraft.commands.CommandRuntimeException;
import net.minecraft.commands.CommandSourceStack;
import net.minecraft.commands.Commands;
import net.minecraft.network.chat.Component;
import net.minecraftforge.event.RegisterCommandsEvent;
import net.minecraftforge.event.server.ServerStoppingEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.loading.FMLPaths;
import net.minecraftforge.server.ServerLifecycleHooks;

import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;

import static net.minecraft.commands.Commands.literal;

@Mod("sparktest")
@Mod.EventBusSubscriber
public class SparkTest {

private static Profiler profiler;
private static Path savePath;

public SparkTest() {
// Mod loading is parallel, so we're not assured that spark will be loaded before us
// As such, get the profiler once spark loads
SparkProvider.whenLoaded(spark -> profiler = spark.profiler(12) /* Request a profiler capable of managing 12 active samplers */);

savePath = FMLPaths.GAMEDIR.get().resolve("sparktest");
}

@SubscribeEvent
static void serverStop(final ServerStoppingEvent event) {
profiler.stop();
}

@SubscribeEvent
static void registerCommand(final RegisterCommandsEvent event) {
event.getDispatcher().register(Commands.literal("sparktest")
.then(literal("test1")
.executes(throwingCommand(SparkTest::test1)))
.then(literal("test2")
.executes(throwingCommand(SparkTest::test2))));
}

private static void test1(CommandContext<CommandSourceStack> ctx) throws Exception {
final var source = ctx.getSource();
source.sendFailure(Component.literal("Building sampler... stand by."));
// Create the sampler
final Profiler.Sampler sampler = profiler.createSamplerThrowing(ProfilerConfiguration.builder()
.dumper(new SpecificThreadDumper(ServerLifecycleHooks.getCurrentServer().getRunningThread()))
.grouper(ThreadGrouper.BY_NAME)
.ignoreSleeping()
.samplingInterval(12)
.forceJavaSampler()
.duration(Duration.ofSeconds(20))
.build());

sampler.start(); // Start the sampler

source.sendSuccess(Component.literal("Started sampler. Please await the results in the next 20 seconds."), false);

// Await sampler completion and execute callback once the sampler is completed
sampler.onCompleted(ReportConfiguration.builder()
.separateParentCalls(true).build())
.whenComplete(LamdbaExceptionUtils.rethrowBiConsumer((report, t) -> {
final SamplerData data = report.data();
source.sendSuccess(Component.literal("Profiling done. Profiled threads: " + data.getThreadsList()
.stream()
.map(SparkSamplerProtos.ThreadNode::getName)
.toList()), false);
final Path path = report.saveToFile(savePath.resolve("test1.sparkprofile"));
try (final var is = Files.newInputStream(path)) {
final SamplerData fromBytes = SparkSamplerProtos.SamplerData.parseFrom(is);
final var isEqual = data.equals(fromBytes);
if (isEqual) {
source.sendSuccess(Component.literal("Results from bytes and from memory are equal!"), false);
} else {
source.sendFailure(Component.literal("Results from bytes and from memory do not match!"));
}
}
}));
}

private static void test2(final CommandContext<CommandSourceStack> context) throws Exception {
final var source = context.getSource();
source.sendFailure(Component.literal("Building sampler... Please stand by."));
// Create the sampler
final Profiler.Sampler sampler = profiler.createSamplerThrowing(ProfilerConfiguration.builder()
.dumper(ThreadDumper.ALL)
.grouper(ThreadGrouper.AS_ONE)
.ignoreNative()
.build());

sampler.start(); // Start the profiler
source.sendSuccess(Component.literal("Profiler started..."), true);
Thread.sleep(1000 * 5); // Wait 5 seconds
sampler.stop(); // Stop the profiler

// Dump the report
final ProfilerReport report = sampler.dumpReport(ReportConfiguration.onlySender("My test"));
final Path saveFile = report.saveToFile(savePath.resolve("test2.sparkprofile")); // Save the report
final UploadResult uploadResult = report.upload();
try (final var localIs = Files.newInputStream(saveFile);
final var onlineIs = URI.create(uploadResult.getBytebinUrl()).toURL().openStream()) {
final SamplerData data = report.data();
final CodedInputStream localCd = CodedInputStream.newInstance(localIs);
localCd.setRecursionLimit(Integer.MAX_VALUE);
final SamplerData fromLocal = SamplerData.parseFrom(localCd);
final CodedInputStream onlineCd = CodedInputStream.newInstance(onlineIs);
onlineCd.setRecursionLimit(Integer.MAX_VALUE);
final SamplerData fromOnline = SamplerData.parseFrom(onlineCd);
if (data.equals(fromLocal) && fromLocal.equals(fromOnline)) {
source.sendSuccess(Component.literal("Results from local file, memory and Bytebin are equal!"), false);
} else {
source.sendFailure(Component.literal("Results do not match!"));
}
}
}

private static <S> Command<S> throwingCommand(LamdbaExceptionUtils.Consumer_WithExceptions<CommandContext<S>, Exception> consumer) {
return ctx -> {
try {
consumer.accept(ctx);
return 1;
} catch (Exception e) {
throw new CommandRuntimeException(Component.literal(e.toString()));
}
};
}
}
10 changes: 10 additions & 0 deletions api-test/src/main/resources/META-INF/mods.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
modLoader="javafml"
loaderVersion="[34,)"
authors="Luck"
license="GPLv3"

[[mods]]
modId="sparktest"
displayName="sparktest"
version="1.0.0"
description="Spark testing"
6 changes: 5 additions & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,8 @@ org.gradle.jvmargs=-Xmx2G
org.gradle.parallel=true

# thanks, forge
org.gradle.daemon=false
org.gradle.daemon=false

forge_version=1.19.2-43.0.0

api_version=1.0.0
10 changes: 9 additions & 1 deletion settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,17 @@ pluginManagement {
name = 'Fabric'
url = 'https://maven.fabricmc.net/'
}
maven {
name = 'Forge'
url = "https://maven.minecraftforge.net"
}
gradlePluginPortal()
}
}

rootProject.name = 'spark'
include (
'spark-proto',
'spark-api',
'spark-common',
'spark-bukkit',
Expand All @@ -22,5 +27,8 @@ include (
'spark-fabric',
'spark-nukkit',
'spark-waterdog',
'spark-minestom'
'spark-minestom',

// A Forge project for testing the API
'api-test'
)
46 changes: 33 additions & 13 deletions spark-api/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,47 @@ plugins {
id 'maven-publish'
}

version = '0.1-SNAPSHOT'
version = api_version
group = 'me.lucko.spark'
archivesBaseName = 'api'

dependencies {
compileOnly 'org.checkerframework:checker-qual:3.8.0'
compileOnly 'org.jetbrains:annotations:20.1.0'
api project(':spark-proto')
compileOnly 'org.jetbrains:annotations:23.0.0'
compileOnly 'org.checkerframework:checker-qual:3.22.1'
compileOnly 'com.google.errorprone:error_prone_annotations:2.6.0'
}

license {
header = project.file('HEADER.txt')
}

java.withSourcesJar()
java.withJavadocJar()

jar {
from file('LICENSE.txt')
manifest.attributes([
"Specification-Title" : 'SparkAPI',
"Specification-Vendor" : 'Lucko',
"Specification-Version" : '1', // We are version 1 of ourselves
"Implementation-Title" : 'SparkAPI',
"Implementation-Version" : api_version,
"Implementation-Vendor" : 'Lucko',
"Implementation-Timestamp": new Date().format("yyyy-MM-dd'T'HH:mm:ssZ")
])
}

publishing {
//repositories {
// maven {
// url = 'https://oss.sonatype.org/content/repositories/snapshots'
// credentials {
// username = sonatypeUsername
// password = sonatypePassword
// }
// }
//}
repositories {
maven {
url = 'https://oss.sonatype.org/content/repositories/snapshots'
credentials {
username = findProperty('sonatypeUsername') ?: ''
password = findProperty('sonatypePassword') ?: ''
}
}
}
publications {
mavenJava(MavenPublication) {
from components.java
Expand All @@ -33,4 +53,4 @@ publishing {
}
}
}
}
}
Loading