Skip to content

Commit e0878fb

Browse files
committed
Add much better codestyle
1 parent 49b5302 commit e0878fb

File tree

91 files changed

+3161
-3216
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

91 files changed

+3161
-3216
lines changed

conf/formatterConfig.xml

+362
Large diffs are not rendered by default.

jlama-cli/src/main/java/com/github/tjake/jlama/cli/JlamaCli.java

+4-13
Original file line numberDiff line numberDiff line change
@@ -23,27 +23,18 @@
2323
import picocli.CommandLine;
2424
import picocli.CommandLine.*;
2525

26-
27-
@Command(
28-
name = "jlama",
29-
mixinStandardHelpOptions = true,
30-
requiredOptionMarker = '*',
31-
usageHelpAutoWidth = true,
32-
sortOptions = true)
26+
@Command(name = "jlama", mixinStandardHelpOptions = true, requiredOptionMarker = '*', usageHelpAutoWidth = true, sortOptions = true)
3327
public class JlamaCli implements Runnable {
3428
static {
3529
System.setProperty("jdk.incubator.vector.VECTOR_ACCESS_OOB_CHECK", "0");
3630
TensorOperationsProvider.get();
3731
}
3832

39-
@Option(
40-
names = {"-h", "--help"},
41-
usageHelp = true,
42-
hidden = true)
33+
@Option(names = { "-h", "--help" }, usageHelp = true, hidden = true)
4334
boolean helpRequested = false;
4435

4536
public static void main(String[] args) {
46-
Logger root = (Logger)LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME);
37+
Logger root = (Logger) LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME);
4738
root.setLevel(Level.INFO);
4839

4940
CommandLine cli = new CommandLine(new JlamaCli());
@@ -57,7 +48,7 @@ public static void main(String[] args) {
5748

5849
cli.setUsageHelpLongOptionsMaxWidth(256);
5950

60-
String[] pargs = args.length == 0 ? new String[] {"-h"} : args;
51+
String[] pargs = args.length == 0 ? new String[] { "-h" } : args;
6152
cli.parseWithHandler(new RunLast(), pargs);
6253
}
6354

jlama-cli/src/main/java/com/github/tjake/jlama/cli/commands/ApiServiceCommand.java

+14-19
Original file line numberDiff line numberDiff line change
@@ -30,19 +30,14 @@
3030
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
3131
import picocli.CommandLine;
3232

33-
@CommandLine.Command(
34-
name = "restapi",
35-
description = "Starts a openai compatible rest api for interacting with this model")
36-
@SpringBootApplication(scanBasePackages = {"com.github.tjake.jlama.net.openai", "com.github.tjake.jlama.cli.commands"})
33+
@CommandLine.Command(name = "restapi", description = "Starts a openai compatible rest api for interacting with this model")
34+
@SpringBootApplication(scanBasePackages = { "com.github.tjake.jlama.net.openai", "com.github.tjake.jlama.cli.commands" })
3735
@SpringBootConfiguration
3836
@Configuration
3937
public class ApiServiceCommand extends BaseCommand implements WebMvcConfigurer {
4038
private static final Logger logger = LoggerFactory.getLogger(ApiServiceCommand.class);
4139

42-
@CommandLine.Option(
43-
names = {"-p", "--port"},
44-
description = "http port (default: ${DEFAULT-VALUE})",
45-
defaultValue = "8080")
40+
@CommandLine.Option(names = { "-p", "--port" }, description = "http port (default: ${DEFAULT-VALUE})", defaultValue = "8080")
4641
int port = 8080;
4742

4843
static volatile AbstractModel m;
@@ -61,21 +56,21 @@ public void addResourceHandlers(ResourceHandlerRegistry registry) {
6156
public void run() {
6257
try {
6358
m = loadModel(
64-
model,
65-
workingDirectory,
66-
workingMemoryType,
67-
workingQuantizationType,
68-
Optional.ofNullable(modelQuantization),
69-
Optional.ofNullable(threadCount));
59+
model,
60+
workingDirectory,
61+
workingMemoryType,
62+
workingQuantizationType,
63+
Optional.ofNullable(modelQuantization),
64+
Optional.ofNullable(threadCount)
65+
);
7066

7167
System.out.println("Chat UI: http://localhost:" + port);
7268
System.out.println("OpenAI Chat API: http://localhost:" + port + "/chat/completions");
7369

74-
new SpringApplicationBuilder(ApiServiceCommand.class)
75-
.lazyInitialization(true)
76-
.properties("server.port", "" + port, "logging.level.org.springframework.web", "info")
77-
.build()
78-
.run();
70+
new SpringApplicationBuilder(ApiServiceCommand.class).lazyInitialization(true)
71+
.properties("server.port", "" + port, "logging.level.org.springframework.web", "info")
72+
.build()
73+
.run();
7974
} catch (Exception e) {
8075
e.printStackTrace();
8176
System.exit(2);

jlama-cli/src/main/java/com/github/tjake/jlama/cli/commands/BaseCommand.java

+7-17
Original file line numberDiff line numberDiff line change
@@ -20,30 +20,20 @@
2020
import picocli.CommandLine;
2121

2222
public class BaseCommand extends SimpleBaseCommand {
23-
@CommandLine.Option(
24-
names = {"-d", "--working-directory"},
25-
description = "Working directory for attention cache")
23+
@CommandLine.Option(names = { "-d", "--working-directory" }, description = "Working directory for attention cache")
2624
protected File workingDirectory = null;
2725

28-
@CommandLine.Option(
29-
names = {"-wm", "--working-dtype"},
30-
description = "Working memory data type (default: ${DEFAULT-VALUE})",
31-
defaultValue = "F32")
26+
@CommandLine.Option(names = { "-wm",
27+
"--working-dtype" }, description = "Working memory data type (default: ${DEFAULT-VALUE})", defaultValue = "F32")
3228
protected DType workingMemoryType = DType.F32;
3329

34-
@CommandLine.Option(
35-
names = {"-wq", "--working-qtype"},
36-
description = "Working memory quantization data type (default: ${DEFAULT-VALUE})",
37-
defaultValue = "I8")
30+
@CommandLine.Option(names = { "-wq",
31+
"--working-qtype" }, description = "Working memory quantization data type (default: ${DEFAULT-VALUE})", defaultValue = "I8")
3832
protected DType workingQuantizationType = DType.I8;
3933

40-
@CommandLine.Option(
41-
names = {"-tc", "--threads"},
42-
description = "Number of threads to use (default: number of cores)")
34+
@CommandLine.Option(names = { "-tc", "--threads" }, description = "Number of threads to use (default: number of cores)")
4335
protected Integer threadCount = null;
4436

45-
@CommandLine.Option(
46-
names = {"-q", "--quantization"},
47-
description = "Model quantization type")
37+
@CommandLine.Option(names = { "-q", "--quantization" }, description = "Model quantization type")
4838
protected DType modelQuantization;
4939
}

jlama-cli/src/main/java/com/github/tjake/jlama/cli/commands/ChatCommand.java

+12-18
Original file line numberDiff line numberDiff line change
@@ -34,33 +34,27 @@ public class ChatCommand extends BaseCommand {
3434
private static final AnsiFormat chatText = new AnsiFormat(Attribute.CYAN_TEXT());
3535
private static final AnsiFormat statsColor = new AnsiFormat(Attribute.BLUE_TEXT());
3636

37-
@Option(
38-
names = {"-s", "--system-prompt"},
39-
description = "Change the default system prompt for this model")
37+
@Option(names = { "-s", "--system-prompt" }, description = "Change the default system prompt for this model")
4038
String systemPrompt = null;
4139

42-
@Option(
43-
names = {"-t", "--temperature"},
44-
description = "Temperature of response [0,1] (default: ${DEFAULT-VALUE})",
45-
defaultValue = "0.6")
40+
@Option(names = { "-t",
41+
"--temperature" }, description = "Temperature of response [0,1] (default: ${DEFAULT-VALUE})", defaultValue = "0.6")
4642
protected Float temperature;
4743

48-
@Option(
49-
names = {"--top-p"},
50-
description =
51-
"Controls how many different words the model considers per token [0,1] (default: ${DEFAULT-VALUE})",
52-
defaultValue = ".9")
44+
@Option(names = {
45+
"--top-p" }, description = "Controls how many different words the model considers per token [0,1] (default: ${DEFAULT-VALUE})", defaultValue = ".9")
5346
protected Float topp;
5447

5548
@Override
5649
public void run() {
5750
AbstractModel m = loadModel(
58-
model,
59-
workingDirectory,
60-
workingMemoryType,
61-
workingQuantizationType,
62-
Optional.ofNullable(modelQuantization),
63-
Optional.ofNullable(threadCount));
51+
model,
52+
workingDirectory,
53+
workingMemoryType,
54+
workingQuantizationType,
55+
Optional.ofNullable(modelQuantization),
56+
Optional.ofNullable(threadCount)
57+
);
6458

6559
if (m.promptSupport().isEmpty()) {
6660
System.err.println("This model does not support chat prompting");

jlama-cli/src/main/java/com/github/tjake/jlama/cli/commands/ClusterCoordinatorCommand.java

+13-23
Original file line numberDiff line numberDiff line change
@@ -18,27 +18,18 @@
1818
import com.github.tjake.jlama.net.Coordinator;
1919
import picocli.CommandLine;
2020

21-
@CommandLine.Command(
22-
name = "cluster-coordinator",
23-
description = "Starts a distributed rest api for a model using cluster workers")
21+
@CommandLine.Command(name = "cluster-coordinator", description = "Starts a distributed rest api for a model using cluster workers")
2422
public class ClusterCoordinatorCommand extends BaseCommand {
2523

26-
@CommandLine.Option(
27-
names = {"-w", "--worker-count"},
28-
description = "signifies this instance is a coordinator",
29-
required = true)
24+
@CommandLine.Option(names = { "-w", "--worker-count" }, description = "signifies this instance is a coordinator", required = true)
3025
int workerCount = 1;
3126

32-
@CommandLine.Option(
33-
names = {"-g", "--grpc-port"},
34-
description = "grpc port to listen on (default: ${DEFAULT-VALUE})",
35-
defaultValue = "9777")
27+
@CommandLine.Option(names = { "-g",
28+
"--grpc-port" }, description = "grpc port to listen on (default: ${DEFAULT-VALUE})", defaultValue = "9777")
3629
int grpcPort = 9777;
3730

38-
@CommandLine.Option(
39-
names = {"-p", "--port"},
40-
description = "http port to listen on (default: ${DEFAULT-VALUE})",
41-
defaultValue = "8080")
31+
@CommandLine.Option(names = { "-p",
32+
"--port" }, description = "http port to listen on (default: ${DEFAULT-VALUE})", defaultValue = "8080")
4233
int port = 8080;
4334

4435
@Override
@@ -47,13 +38,12 @@ public void run() {
4738
Coordinator c = new Coordinator(model, workingDirectory, grpcPort, workerCount);
4839

4940
new Thread(() -> {
50-
try {
51-
c.start();
52-
} catch (Exception e) {
53-
e.printStackTrace();
54-
}
55-
})
56-
.start();
41+
try {
42+
c.start();
43+
} catch (Exception e) {
44+
e.printStackTrace();
45+
}
46+
}).start();
5747

5848
/*UndertowJaxrsServer ut = new UndertowJaxrsServer();
5949
ut.deploy(new JlamaRestApi(c), APPLICATION_PATH);
@@ -62,7 +52,7 @@ public void run() {
6252
resource(new ClassPathResourceManager(ServeCommand.class.getClassLoader()))
6353
.setDirectoryListingEnabled(true)
6454
.addWelcomeFiles("index.html"));
65-
55+
6656
System.out.println("Chat UI: http://localhost:" + port + "/ui/index.html");
6757
ut.start(Undertow.builder().addHttpListener(port, "0.0.0.0"));*/
6858

jlama-cli/src/main/java/com/github/tjake/jlama/cli/commands/ClusterWorkerCommand.java

+15-22
Original file line numberDiff line numberDiff line change
@@ -19,44 +19,37 @@
1919
import java.util.Optional;
2020
import picocli.CommandLine;
2121

22-
@CommandLine.Command(
23-
name = "cluster-worker",
24-
description = "Connects to a cluster coordinator to perform distributed inference")
22+
@CommandLine.Command(name = "cluster-worker", description = "Connects to a cluster coordinator to perform distributed inference")
2523
public class ClusterWorkerCommand extends BaseCommand {
2624

2725
private static final Boolean useHostnameAsWorkerId = Boolean.getBoolean("jlama.use_hostname_as_workerid");
2826
private static final String HOSTNAME = System.getenv("HOSTNAME");
2927

30-
@CommandLine.Option(
31-
names = {"-o", "--host"},
32-
description = "hostname of coordinator",
33-
required = true)
28+
@CommandLine.Option(names = { "-o", "--host" }, description = "hostname of coordinator", required = true)
3429
String host;
3530

36-
@CommandLine.Option(
37-
names = {"-g", "--grpc-port"},
38-
description = "grpc port to listen on (default: ${DEFAULT-VALUE})",
39-
defaultValue = "9777")
31+
@CommandLine.Option(names = { "-g",
32+
"--grpc-port" }, description = "grpc port to listen on (default: ${DEFAULT-VALUE})", defaultValue = "9777")
4033
int grpcPort = 9777;
4134

42-
@CommandLine.Option(
43-
names = {"-w", "--worker-id"},
44-
description = "consistent name to use when register this worker with the coordinator")
35+
@CommandLine.Option(names = { "-w",
36+
"--worker-id" }, description = "consistent name to use when register this worker with the coordinator")
4537
String workerId = useHostnameAsWorkerId ? HOSTNAME : null;
4638

4739
@Override
4840
public void run() {
4941
try {
5042
if (workerId != null) System.out.println("Using " + workerId + " as worker id");
5143
Worker w = new Worker(
52-
model,
53-
host,
54-
grpcPort,
55-
workingDirectory,
56-
workingMemoryType,
57-
workingQuantizationType,
58-
Optional.ofNullable(modelQuantization),
59-
Optional.ofNullable(workerId));
44+
model,
45+
host,
46+
grpcPort,
47+
workingDirectory,
48+
workingMemoryType,
49+
workingQuantizationType,
50+
Optional.ofNullable(modelQuantization),
51+
Optional.ofNullable(workerId)
52+
);
6053
w.run();
6154
} catch (Exception e) {
6255
e.printStackTrace();

jlama-cli/src/main/java/com/github/tjake/jlama/cli/commands/CompleteCommand.java

+7-6
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,13 @@ public class CompleteCommand extends ModelBaseCommand {
2929
@Override
3030
public void run() {
3131
AbstractModel m = loadModel(
32-
model,
33-
workingDirectory,
34-
workingMemoryType,
35-
workingQuantizationType,
36-
Optional.ofNullable(modelQuantization),
37-
Optional.ofNullable(threadCount));
32+
model,
33+
workingDirectory,
34+
workingMemoryType,
35+
workingQuantizationType,
36+
Optional.ofNullable(modelQuantization),
37+
Optional.ofNullable(threadCount)
38+
);
3839
m.generate(UUID.randomUUID(), PromptContext.of(prompt), temperature, tokens, makeOutHandler());
3940
}
4041
}

0 commit comments

Comments
 (0)