Skip to content

Commit

Permalink
Implement standalone profiling agent
Browse files Browse the repository at this point in the history
  • Loading branch information
lucko committed Jan 11, 2025
1 parent 336102f commit 76b3a79
Show file tree
Hide file tree
Showing 13 changed files with 650 additions and 3 deletions.
1 change: 1 addition & 0 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,6 @@ include (
'spark-neoforge',
'spark-paper',
'spark-sponge',
'spark-standalone-agent',
'spark-velocity',
)
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ public CompletableFuture<Void> executeCommand(CommandSender sender, String[] arg
try {
executeCommand0(sender, args);
future.complete(null);
} catch (Exception e) {
} catch (Throwable e) {
this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command", e);
future.completeExceptionally(e);
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ default Data toData() {
enum Type {
SERVER(PlatformMetadata.Type.SERVER),
CLIENT(PlatformMetadata.Type.CLIENT),
PROXY(PlatformMetadata.Type.PROXY);
PROXY(PlatformMetadata.Type.PROXY),
STANDALONE(PlatformMetadata.Type.STANDALONE);

private final PlatformMetadata.Type type;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,10 @@ private static Instrumentation loadInstrumentation(SparkPlugin plugin) {
private final Map<String, Class<?>> classes = new HashMap<>();

public InstrumentationClassFinder(SparkPlugin plugin) {
Instrumentation instrumentation = loadInstrumentation(plugin);
this(loadInstrumentation(plugin));
}

public InstrumentationClassFinder(Instrumentation instrumentation) {
if (instrumentation == null) {
return;
}
Expand Down
1 change: 1 addition & 0 deletions spark-common/src/main/proto/spark/spark.proto
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ message PlatformMetadata {
SERVER = 0;
CLIENT = 1;
PROXY = 2;
STANDALONE = 3;
}
}

Expand Down
62 changes: 62 additions & 0 deletions spark-standalone-agent/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
plugins {
id 'net.kyori.blossom' version '1.3.0'
id 'com.gradleup.shadow' version '8.3.0'
}

dependencies {
implementation project(':spark-common')
implementation('net.kyori:adventure-text-serializer-ansi:4.17.0') {
exclude(module: 'adventure-bom')
exclude(module: 'adventure-api')
exclude(module: 'annotations')
}
implementation 'org.slf4j:slf4j-simple:2.0.16'
implementation 'com.google.code.gson:gson:2.9.0'
implementation 'com.google.guava:guava:31.1-jre'

implementation 'org.jline:jline-remote-ssh:3.28.0'
implementation 'org.apache.sshd:sshd-core:2.14.0'
}

tasks.withType(JavaCompile).configureEach {
options.compilerArgs += ['--add-modules', 'jdk.attach']
options.release = 11
}

blossom {
replaceTokenIn('src/main/java/me/lucko/spark/standalone/StandaloneSparkPlugin.java')
replaceToken '@version@', project.pluginVersion
}

jar {
manifest {
attributes(
'Main-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent',
'Agent-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent',
'Premain-Class': 'me.lucko.spark.standalone.StandaloneSparkAgent'
)
}
}

shadowJar {
archiveFileName = "spark-${project.pluginVersion}-standalone-agent.jar"

relocate 'net.kyori.adventure', 'me.lucko.spark.lib.adventure'
relocate 'net.kyori.examination', 'me.lucko.spark.lib.adventure.examination'
relocate 'net.kyori.option', 'me.lucko.spark.lib.adventure.option'
relocate 'net.bytebuddy', 'me.lucko.spark.lib.bytebuddy'
relocate 'com.google.protobuf', 'me.lucko.spark.lib.protobuf'
relocate 'org.objectweb.asm', 'me.lucko.spark.lib.asm'
relocate 'one.profiler', 'me.lucko.spark.lib.asyncprofiler'
relocate 'me.lucko.bytesocks.client', 'me.lucko.spark.lib.bytesocks'
relocate 'org.java_websocket', 'me.lucko.spark.lib.bytesocks.ws'
relocate 'com.google.gson', 'me.lucko.spark.lib.gson'
relocate 'com.google.common', 'me.lucko.spark.lib.guava'

project.applyExcludes(delegate)
}

artifacts {
archives shadowJar
shadow shadowJar
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.standalone;

import me.lucko.spark.common.command.sender.AbstractCommandSender;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.serializer.ansi.ANSIComponentSerializer;

import java.util.UUID;

public class StandaloneCommandSender extends AbstractCommandSender<StandaloneCommandSender.Output> {
public static final StandaloneCommandSender NO_OP = new StandaloneCommandSender(msg -> {});
public static final StandaloneCommandSender SYSTEM_OUT = new StandaloneCommandSender(System.out::println);

public StandaloneCommandSender(Output output) {
super(output);
}

@Override
public String getName() {
return "Standalone";
}

@Override
public UUID getUniqueId() {
return null;
}

@Override
public void sendMessage(Component message) {
this.delegate.sendMessage(ANSIComponentSerializer.ansi().serialize(message));
}

@Override
public boolean hasPermission(String permission) {
return true;
}

public interface Output {
void sendMessage(String message);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.standalone;

import me.lucko.spark.common.platform.PlatformInfo;

public class StandalonePlatformInfo implements PlatformInfo {
private final String version;

public StandalonePlatformInfo(String version) {
this.version = version;
}

@Override
public Type getType() {
return Type.STANDALONE;
}

@Override
public String getName() {
return "Standalone";
}

@Override
public String getBrand() {
return "Standalone";
}

@Override
public String getVersion() {
return this.version;
}

@Override
public String getMinecraftVersion() {
return null;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <[email protected]>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.standalone;

import com.sun.tools.attach.VirtualMachine;
import com.sun.tools.attach.VirtualMachineDescriptor;

import java.lang.instrument.Instrumentation;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class StandaloneSparkAgent {

// Entry point when the agent is run as a normal jar
public static void main(String[] args) {
if (args.length == 0) {
System.err.println("Usage: java -jar spark-standalone-agent.jar <pid> [args...]");

List<VirtualMachineDescriptor> vms = VirtualMachine.list();
if (vms.isEmpty()) {
return;
}

System.out.println("Current JVM processes:");
for (VirtualMachineDescriptor vm : vms) {
System.out.println(" pid=" + vm.id() + " (" + vm.displayName() + ")");
}

return;
}

try {
VirtualMachine vm = VirtualMachine.attach(args[0]);
String agentPath = StandaloneSparkAgent.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String arguments = String.join(",", Arrays.copyOfRange(args, 1, args.length));
vm.loadAgent(agentPath, arguments);
System.out.println("[spark] Agent loaded successfully.");
vm.detach();
} catch (Throwable e) {
System.err.println("Failed to attach agent to process " + args[0]);
e.printStackTrace(System.err);
}
}

// Entry point when the agent is loaded via -javaagent
public static void premain(String agentArgs, Instrumentation instrumentation) {
System.out.println("[spark] Loading standalone agent... (premain)");
init(agentArgs, instrumentation);
}

// Entry point when the agent is loaded via VirtualMachine#loadAgent
public static void agentmain(String agentArgs, Instrumentation instrumentation) {
System.out.println("[spark] Loading standalone agent... (agentmain)");
init(agentArgs, instrumentation);
}

private static void init(String agentArgs, Instrumentation instrumentation) {
try {
Map<String, String> arguments = new HashMap<>();
if (agentArgs == null) {
agentArgs = "";
}
for (String arg : agentArgs.split(",")) {
if (arg.contains("=")) {
String[] parts = arg.split("=", 2);
arguments.put(parts[0], parts[1]);
} else {
arguments.put(arg, "true");
}
}
new StandaloneSparkPlugin(instrumentation, arguments);
} catch (Throwable e) {
System.err.println("[spark] Loading failed :(");
e.printStackTrace(System.err);
}
}

}
Loading

0 comments on commit 76b3a79

Please sign in to comment.