Skip to content

Commit 8f43b96

Browse files
authoredNov 20, 2024··
Merge pull request #262 from johnoliver/otel-functions-2
Add function invocation telemetry
2 parents 02cbd46 + ac0f569 commit 8f43b96

File tree

16 files changed

+848
-155
lines changed

16 files changed

+848
-155
lines changed
 

‎aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatCompletion.java

+26-20
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
import com.microsoft.semantickernel.hooks.PreChatCompletionEvent;
5151
import com.microsoft.semantickernel.hooks.PreToolCallEvent;
5252
import com.microsoft.semantickernel.implementation.CollectionUtil;
53+
import com.microsoft.semantickernel.implementation.telemetry.ChatCompletionSpan;
5354
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
5455
import com.microsoft.semantickernel.orchestration.FunctionResult;
5556
import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
@@ -69,7 +70,6 @@
6970
import com.microsoft.semantickernel.services.chatcompletion.message.ChatMessageContentType;
7071
import com.microsoft.semantickernel.services.chatcompletion.message.ChatMessageImageContent;
7172
import com.microsoft.semantickernel.services.openai.OpenAiServiceBuilder;
72-
import io.opentelemetry.api.trace.Span;
7373
import java.io.IOException;
7474
import java.util.ArrayList;
7575
import java.util.Arrays;
@@ -419,26 +419,32 @@ private Mono<ChatMessages> internalChatMessageContentsAsync(
419419
invocationContext)))
420420
.getOptions();
421421

422-
Span span = SemanticKernelTelemetry.startChatCompletionSpan(
423-
getModelId(),
424-
SemanticKernelTelemetry.OPEN_AI_PROVIDER,
425-
options.getMaxTokens(),
426-
options.getTemperature(),
427-
options.getTopP());
428-
return getClient()
429-
.getChatCompletionsWithResponse(getDeploymentName(), options,
430-
OpenAIRequestSettings.getRequestOptions())
431-
.flatMap(completionsResult -> {
432-
if (completionsResult.getStatusCode() >= 400) {
433-
SemanticKernelTelemetry.endSpanWithError(span);
434-
return Mono.error(new AIException(ErrorCodes.SERVICE_ERROR,
435-
"Request failed: " + completionsResult.getStatusCode()));
436-
}
437-
SemanticKernelTelemetry.endSpanWithUsage(span,
438-
completionsResult.getValue().getUsage());
422+
return Mono.deferContextual(contextView -> {
423+
ChatCompletionSpan span = ChatCompletionSpan.startChatCompletionSpan(
424+
SemanticKernelTelemetry.getTelemetry(invocationContext),
425+
contextView,
426+
getModelId(),
427+
SemanticKernelTelemetry.OPEN_AI_PROVIDER,
428+
options.getMaxTokens(),
429+
options.getTemperature(),
430+
options.getTopP());
431+
432+
return getClient()
433+
.getChatCompletionsWithResponse(getDeploymentName(), options,
434+
OpenAIRequestSettings.getRequestOptions())
435+
.contextWrite(span.getReactorContextModifier())
436+
.flatMap(completionsResult -> {
437+
if (completionsResult.getStatusCode() >= 400) {
438+
return Mono.error(new AIException(ErrorCodes.SERVICE_ERROR,
439+
"Request failed: " + completionsResult.getStatusCode()));
440+
}
439441

440-
return Mono.just(completionsResult.getValue());
441-
})
442+
return Mono.just(completionsResult.getValue());
443+
})
444+
.doOnError(span::endSpanWithError)
445+
.doOnSuccess(span::endSpanWithUsage)
446+
.doOnTerminate(span::close);
447+
})
442448
.flatMap(completions -> {
443449

444450
List<ChatResponseMessage> responseMessages = completions

‎aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textcompletion/OpenAITextGenerationService.java

-11
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,6 @@
1414
import com.microsoft.semantickernel.services.StreamingTextContent;
1515
import com.microsoft.semantickernel.services.textcompletion.TextContent;
1616
import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
17-
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
18-
import io.opentelemetry.api.trace.Span;
1917
import java.util.Collections;
2018
import java.util.HashMap;
2119
import java.util.List;
@@ -84,23 +82,14 @@ protected Mono<List<TextContent>> internalCompleteTextAsync(
8482

8583
CompletionsOptions completionsOptions = getCompletionsOptions(text, requestSettings);
8684

87-
Span span = SemanticKernelTelemetry.startTextCompletionSpan(
88-
getModelId(),
89-
SemanticKernelTelemetry.OPEN_AI_PROVIDER,
90-
completionsOptions.getMaxTokens(),
91-
completionsOptions.getTemperature(),
92-
completionsOptions.getTopP());
9385
return getClient()
9486
.getCompletionsWithResponse(getDeploymentName(), completionsOptions,
9587
OpenAIRequestSettings.getRequestOptions())
9688
.flatMap(completionsResult -> {
9789
if (completionsResult.getStatusCode() >= 400) {
98-
SemanticKernelTelemetry.endSpanWithError(span);
9990
return Mono.error(new AIException(ErrorCodes.SERVICE_ERROR,
10091
"Request failed: " + completionsResult.getStatusCode()));
10192
}
102-
SemanticKernelTelemetry.endSpanWithUsage(span,
103-
completionsResult.getValue().getUsage());
10493
return Mono.just(completionsResult.getValue());
10594
})
10695
.map(completions -> {

‎aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/OtelCaptureTest.java

-51
Original file line numberDiff line numberDiff line change
@@ -75,57 +75,6 @@ public static void shutdown() {
7575
otel.shutdown();
7676
}
7777

78-
@Test
79-
public void otelTextCaptureTest() {
80-
81-
OpenAIAsyncClient openAIAsyncClient = Mockito.mock(OpenAIAsyncClient.class);
82-
83-
CompletionsUsage completionsUsage = Mockito.mock(CompletionsUsage.class);
84-
Mockito.when(completionsUsage.getCompletionTokens()).thenReturn(22);
85-
Mockito.when(completionsUsage.getPromptTokens()).thenReturn(55);
86-
87-
Completions completions = Mockito.mock(Completions.class);
88-
Mockito.when(completions.getUsage()).thenReturn(completionsUsage);
89-
90-
Response<Completions> response = Mockito.mock(Response.class);
91-
Mockito.when(response.getStatusCode()).thenReturn(200);
92-
Mockito.when(response.getValue()).thenReturn(completions);
93-
94-
Mockito.when(openAIAsyncClient.getCompletionsWithResponse(
95-
Mockito.any(),
96-
Mockito.<CompletionsOptions>any(),
97-
Mockito.any())).thenAnswer(invocation -> Mono.just(response));
98-
99-
TextGenerationService client = OpenAITextGenerationService.builder()
100-
.withOpenAIAsyncClient(openAIAsyncClient)
101-
.withModelId("a-model")
102-
.build();
103-
104-
try {
105-
client.getTextContentsAsync(
106-
"foo",
107-
null,
108-
null).block();
109-
} catch (Exception e) {
110-
// Expect to fail
111-
}
112-
113-
Assertions.assertFalse(spans.isEmpty());
114-
Assertions.assertEquals("a-model",
115-
spans.get(0).getAttributes().get(AttributeKey.stringKey("gen_ai.request.model")));
116-
Assertions.assertEquals("text.completions",
117-
spans.get(0).getAttributes().get(AttributeKey.stringKey("gen_ai.operation.name")));
118-
Assertions.assertEquals("openai",
119-
spans.get(0).getAttributes().get(AttributeKey.stringKey("gen_ai.system")));
120-
Assertions.assertEquals(22,
121-
spans.get(0).getAttributes()
122-
.get(AttributeKey.longKey("gen_ai.response.completion_tokens")));
123-
Assertions.assertEquals(55,
124-
spans.get(0).getAttributes()
125-
.get(AttributeKey.longKey("gen_ai.response.prompt_tokens")));
126-
127-
}
128-
12978
@Test
13079
public void otelChatCaptureTest() {
13180
OpenAIAsyncClient openAIAsyncClient = Mockito.mock(OpenAIAsyncClient.class);

‎samples/semantickernel-concepts/semantickernel-syntax-examples/pom.xml

+6
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,11 @@
2626
</dependencyManagement>
2727

2828
<dependencies>
29+
<dependency>
30+
<groupId>io.opentelemetry.instrumentation</groupId>
31+
<artifactId>opentelemetry-reactor-3.1</artifactId>
32+
<version>2.9.0-alpha</version>
33+
</dependency>
2934
<dependency>
3035
<groupId>com.microsoft.semantic-kernel</groupId>
3136
<artifactId>semantickernel-api</artifactId>
@@ -165,6 +170,7 @@
165170
</executions>
166171
<configuration>
167172
<mainClass>com.microsoft.semantickernel.samples.syntaxexamples.${sample}</mainClass>
173+
<cleanupDaemonThreads>false</cleanupDaemonThreads>
168174
</configuration>
169175
</plugin>
170176
</plugins>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,337 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
package com.microsoft.semantickernel.samples.syntaxexamples.java;
3+
4+
import com.azure.ai.openai.OpenAIAsyncClient;
5+
import com.azure.ai.openai.OpenAIClientBuilder;
6+
import com.azure.core.credential.AzureKeyCredential;
7+
import com.azure.core.credential.KeyCredential;
8+
import com.microsoft.semantickernel.Kernel;
9+
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
10+
import com.microsoft.semantickernel.exceptions.ConfigurationException;
11+
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
12+
import com.microsoft.semantickernel.orchestration.InvocationContext;
13+
import com.microsoft.semantickernel.orchestration.InvocationReturnMode;
14+
import com.microsoft.semantickernel.orchestration.ToolCallBehavior;
15+
import com.microsoft.semantickernel.plugin.KernelPluginFactory;
16+
import com.microsoft.semantickernel.samples.syntaxexamples.functions.Example59_OpenAIFunctionCalling.PetPlugin;
17+
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionArguments;
18+
import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction;
19+
import com.microsoft.semantickernel.semanticfunctions.annotations.KernelFunctionParameter;
20+
import com.microsoft.semantickernel.services.ServiceNotFoundException;
21+
import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
22+
import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
23+
import io.opentelemetry.api.GlobalOpenTelemetry;
24+
import io.opentelemetry.api.trace.Span;
25+
import io.opentelemetry.api.trace.SpanKind;
26+
import io.opentelemetry.api.trace.StatusCode;
27+
import io.opentelemetry.context.Scope;
28+
import java.io.IOException;
29+
import java.math.BigInteger;
30+
import java.nio.charset.StandardCharsets;
31+
import java.security.MessageDigest;
32+
import java.security.NoSuchAlgorithmException;
33+
import java.util.Locale;
34+
import reactor.core.publisher.Mono;
35+
36+
public class FunctionTelemetry_Example {
37+
/*
38+
* // Get the Application Insights agent from
39+
* https://github.com/microsoft/ApplicationInsights-Java, e.g:
40+
* ```
41+
* wget -O "/tmp/applicationinsights-agent-3.6.1.jar"
42+
* "https://github.com/microsoft/ApplicationInsights-Java/releases/download/3.6.1/applicationinsights-agent-3.6.1.jar"
43+
* ```
44+
*
45+
* // Get your application insights connection string from the Azure portal
46+
* ```
47+
* CLIENT_ENDPOINT="<ENDPOINT>" \
48+
* AZURE_CLIENT_KEY="<KEY>" \
49+
* APPLICATIONINSIGHTS_CONNECTION_STRING="<CONNECTION STRING>" \
50+
* MAVEN_OPTS="-javaagent:/tmp/applicationinsights-agent-3.6.1.jar" \
51+
* ../../../mvnw package exec:java -Dsample="java.FunctionTelemetry_Example"
52+
* ```
53+
*
54+
* If you open the Application Insights "Live metrics" view while running this example, you
55+
* should see the telemetry in real-time.
56+
* Otherwise within a few minutes, you should see the telemetry in the Application Insights ->
57+
* Investigate -> Transaction search ui in the Azure portal.
58+
*/
59+
60+
private static final String CLIENT_KEY = System.getenv("CLIENT_KEY");
61+
private static final String AZURE_CLIENT_KEY = System.getenv("AZURE_CLIENT_KEY");
62+
63+
// Only required if AZURE_CLIENT_KEY is set
64+
private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT");
65+
private static final String MODEL_ID = "gpt-4o";
66+
67+
public static void main(String[] args)
68+
throws ConfigurationException, IOException, NoSuchMethodException, InterruptedException {
69+
requestsWithSpanContext();
70+
testNestedCalls();
71+
requestsWithScope();
72+
73+
Thread.sleep(1000);
74+
}
75+
76+
private static void requestsWithSpanContext() throws IOException {
77+
Span fakeRequest = GlobalOpenTelemetry.getTracer("Custom")
78+
.spanBuilder("GET /requestsWithSpanContext")
79+
.setSpanKind(SpanKind.SERVER)
80+
.setAttribute("http.request.method", "GET")
81+
.setAttribute("url.path", "/requestsWithSpanContext")
82+
.setAttribute("url.scheme", "http")
83+
.startSpan();
84+
85+
// Pass span context to the telemetry object to correlate telemetry with the request
86+
SemanticKernelTelemetry telemetry = new SemanticKernelTelemetry(
87+
GlobalOpenTelemetry.getTracer("Custom"),
88+
fakeRequest.getSpanContext());
89+
90+
sequentialFunctionCalls(telemetry);
91+
92+
fakeRequest.setStatus(StatusCode.OK);
93+
fakeRequest.end();
94+
}
95+
96+
private static void requestsWithScope() throws IOException {
97+
Span fakeRequest = GlobalOpenTelemetry.getTracer("Custom")
98+
.spanBuilder("GET /requestsWithScope")
99+
.setSpanKind(SpanKind.SERVER)
100+
.setAttribute("http.request.method", "GET")
101+
.setAttribute("url.path", "/requestsWithScope")
102+
.setAttribute("url.scheme", "http")
103+
.startSpan();
104+
105+
// Pass span context to the telemetry object to correlate telemetry with the request
106+
SemanticKernelTelemetry telemetry = new SemanticKernelTelemetry();
107+
108+
try (Scope scope = fakeRequest.makeCurrent()) {
109+
sequentialFunctionCalls(telemetry);
110+
}
111+
112+
fakeRequest.setStatus(StatusCode.OK);
113+
fakeRequest.end();
114+
}
115+
116+
public static void sequentialFunctionCalls(SemanticKernelTelemetry telemetry) {
117+
118+
OpenAIAsyncClient client;
119+
120+
if (AZURE_CLIENT_KEY != null) {
121+
client = new OpenAIClientBuilder()
122+
.credential(new AzureKeyCredential(AZURE_CLIENT_KEY))
123+
.endpoint(CLIENT_ENDPOINT)
124+
.buildAsyncClient();
125+
126+
} else {
127+
client = new OpenAIClientBuilder()
128+
.credential(new KeyCredential(CLIENT_KEY))
129+
.buildAsyncClient();
130+
}
131+
132+
ChatCompletionService chat = OpenAIChatCompletion.builder()
133+
.withModelId(MODEL_ID)
134+
.withOpenAIAsyncClient(client)
135+
.build();
136+
137+
var plugin = KernelPluginFactory.createFromObject(new PetPlugin(), "PetPlugin");
138+
139+
var kernel = Kernel.builder()
140+
.withAIService(ChatCompletionService.class, chat)
141+
.withPlugin(plugin)
142+
.build();
143+
144+
var chatHistory = new ChatHistory();
145+
chatHistory.addUserMessage(
146+
"What is the name and type of the pet with id ca2fc6bc-1307-4da6-a009-d7bf88dec37b?");
147+
148+
var messages = chat.getChatMessageContentsAsync(
149+
chatHistory,
150+
kernel,
151+
InvocationContext.builder()
152+
.withToolCallBehavior(ToolCallBehavior.allowAllKernelFunctions(true))
153+
.withReturnMode(InvocationReturnMode.FULL_HISTORY)
154+
.withTelemetry(telemetry)
155+
.build())
156+
.block();
157+
158+
chatHistory = new ChatHistory(messages);
159+
160+
System.out.println(
161+
"THE NAME AND TYPE IS: " + chatHistory.getLastMessage().get().getContent());
162+
}
163+
164+
public static void testNestedCalls() {
165+
166+
OpenAIAsyncClient client;
167+
168+
if (AZURE_CLIENT_KEY != null) {
169+
client = new OpenAIClientBuilder()
170+
.credential(new AzureKeyCredential(AZURE_CLIENT_KEY))
171+
.endpoint(CLIENT_ENDPOINT)
172+
.buildAsyncClient();
173+
174+
} else {
175+
client = new OpenAIClientBuilder()
176+
.credential(new KeyCredential(CLIENT_KEY))
177+
.buildAsyncClient();
178+
}
179+
180+
ChatCompletionService chat = OpenAIChatCompletion.builder()
181+
.withModelId(MODEL_ID)
182+
.withOpenAIAsyncClient(client)
183+
.build();
184+
185+
var plugin = KernelPluginFactory.createFromObject(new TextAnalysisPlugin(),
186+
"TextAnalysisPlugin");
187+
188+
var kernel = Kernel.builder()
189+
.withAIService(ChatCompletionService.class, chat)
190+
.withPlugin(plugin)
191+
.build();
192+
193+
SemanticKernelTelemetry telemetry = new SemanticKernelTelemetry();
194+
195+
Span span = GlobalOpenTelemetry.getTracer("Test")
196+
.spanBuilder("testNestedCalls span")
197+
.setSpanKind(SpanKind.SERVER)
198+
.startSpan();
199+
200+
try (Scope scope = span.makeCurrent()) {
201+
String analysed = kernel
202+
.invokePromptAsync(
203+
"""
204+
Analyse the following text:
205+
Hello There
206+
""",
207+
KernelFunctionArguments.builder().build(),
208+
InvocationContext.builder()
209+
.withToolCallBehavior(ToolCallBehavior.allowAllKernelFunctions(true))
210+
.withReturnMode(InvocationReturnMode.NEW_MESSAGES_ONLY)
211+
.withTelemetry(telemetry)
212+
.build())
213+
.withResultType(String.class)
214+
.map(result -> {
215+
return result.getResult();
216+
})
217+
.block();
218+
System.out.println(analysed);
219+
} finally {
220+
span.end();
221+
}
222+
223+
}
224+
225+
public static class TextAnalysisPlugin {
226+
227+
@DefineKernelFunction(description = "Change all string chars to uppercase.", name = "Uppercase")
228+
public String uppercase(
229+
@KernelFunctionParameter(description = "Text to uppercase", name = "input") String text) {
230+
return text.toUpperCase(Locale.ROOT);
231+
}
232+
233+
@DefineKernelFunction(name = "sha256sum", description = "Calculates a sha256 of the input", returnType = "string")
234+
public Mono<String> sha256sum(
235+
@KernelFunctionParameter(name = "input", description = "The input to checksum", type = String.class) String input,
236+
Kernel kernel,
237+
SemanticKernelTelemetry telemetry) throws NoSuchAlgorithmException {
238+
MessageDigest digest = MessageDigest.getInstance("SHA-256");
239+
byte[] hash = digest.digest(input.getBytes(StandardCharsets.UTF_8));
240+
String hashStr = new BigInteger(1, hash).toString(16);
241+
242+
return kernel
243+
.invokePromptAsync(
244+
"""
245+
Uppercase the following text:
246+
=== BEGIN TEXT ===
247+
%s
248+
=== END TEXT ===
249+
""".formatted(hashStr)
250+
.stripIndent(),
251+
null,
252+
InvocationContext.builder()
253+
.withToolCallBehavior(ToolCallBehavior.allowAllKernelFunctions(true))
254+
.withReturnMode(InvocationReturnMode.NEW_MESSAGES_ONLY)
255+
.withTelemetry(telemetry)
256+
.build())
257+
.withResultType(String.class)
258+
.map(result -> {
259+
return result.getResult();
260+
});
261+
}
262+
263+
@DefineKernelFunction(name = "formatAnswer", description = "Formats an answer", returnType = "string")
264+
public Mono<String> formatAnswer(
265+
@KernelFunctionParameter(name = "input", description = "The input to format", type = String.class) String input,
266+
Kernel kernel,
267+
SemanticKernelTelemetry telemetry) throws ServiceNotFoundException {
268+
269+
return kernel
270+
.invokePromptAsync(
271+
"""
272+
Translate the following text into Italian:
273+
=== BEGIN TEXT ===
274+
%s
275+
=== END TEXT ===
276+
""".formatted(input)
277+
.stripIndent())
278+
.withResultType(String.class)
279+
.map(result -> {
280+
return result.getResult();
281+
});
282+
}
283+
284+
@DefineKernelFunction(name = "analyseInput", description = "Gives a text analysis of the input", returnType = "string")
285+
public Mono<String> analyseInput(
286+
@KernelFunctionParameter(name = "input", description = "The input to analyse", type = String.class) String input,
287+
Kernel kernel,
288+
SemanticKernelTelemetry telemetry) throws ServiceNotFoundException {
289+
290+
return kernel
291+
.invokePromptAsync(
292+
"""
293+
Calculating sha256sum of the following text:
294+
=== BEGIN TEXT ===
295+
%s
296+
=== END TEXT ===
297+
""".formatted(input)
298+
.stripIndent(),
299+
null,
300+
InvocationContext.builder()
301+
.withToolCallBehavior(ToolCallBehavior.allowAllKernelFunctions(true))
302+
.withReturnMode(InvocationReturnMode.NEW_MESSAGES_ONLY)
303+
.withTelemetry(telemetry)
304+
.build())
305+
.withResultType(String.class)
306+
.map(result -> {
307+
return result.getResult();
308+
})
309+
.flatMap(answer -> {
310+
return kernel
311+
.invokePromptAsync(
312+
"""
313+
Format the following text:
314+
=== BEGIN TEXT ===
315+
%s
316+
=== END TEXT ===
317+
""".formatted(answer)
318+
.stripIndent())
319+
.withInvocationContext(
320+
InvocationContext.builder()
321+
.withToolCallBehavior(
322+
ToolCallBehavior.allowAllKernelFunctions(true))
323+
.withReturnMode(InvocationReturnMode.NEW_MESSAGES_ONLY)
324+
.withTelemetry(telemetry)
325+
.build())
326+
.withArguments(null)
327+
.withTelemetry(telemetry)
328+
.withResultType(String.class);
329+
})
330+
.map(it -> {
331+
return it.getResult();
332+
});
333+
}
334+
335+
}
336+
337+
}

‎samples/semantickernel-concepts/semantickernel-syntax-examples/src/main/java/com/microsoft/semantickernel/samples/syntaxexamples/java/KernelFunctionYaml_Example.java

+15-6
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,14 @@
1010
import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
1111
import com.microsoft.semantickernel.exceptions.ConfigurationException;
1212
import com.microsoft.semantickernel.implementation.EmbeddedResourceLoader;
13+
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
1314
import com.microsoft.semantickernel.orchestration.FunctionResult;
1415
import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
1516
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionArguments;
1617
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionYaml;
1718
import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
18-
import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
1919
import java.io.IOException;
20+
import javax.annotation.Nullable;
2021

2122
public class KernelFunctionYaml_Example {
2223

@@ -29,7 +30,10 @@ public class KernelFunctionYaml_Example {
2930
.getOrDefault("MODEL_ID", "gpt-35-turbo");
3031

3132
public static void main(String[] args) throws ConfigurationException, IOException {
33+
run(null);
34+
}
3235

36+
public static void run(@Nullable SemanticKernelTelemetry telemetry) throws IOException {
3337
OpenAIAsyncClient client;
3438

3539
if (AZURE_CLIENT_KEY != null) {
@@ -51,12 +55,13 @@ public static void main(String[] args) throws ConfigurationException, IOExceptio
5155
Builder kernelBuilder = Kernel.builder()
5256
.withAIService(ChatCompletionService.class, openAIChatCompletion);
5357

54-
semanticKernelTemplate(kernelBuilder.build());
55-
handlebarsTemplate(kernelBuilder.build());
56-
58+
semanticKernelTemplate(kernelBuilder.build(), telemetry);
59+
handlebarsTemplate(kernelBuilder.build(), telemetry);
5760
}
5861

59-
private static void handlebarsTemplate(Kernel kernel) throws IOException {
62+
private static void handlebarsTemplate(Kernel kernel,
63+
@Nullable SemanticKernelTelemetry telemetry)
64+
throws IOException {
6065
String yaml = EmbeddedResourceLoader.readFile("GenerateStoryHandlebars.yaml",
6166
KernelFunctionYaml_Example.class);
6267

@@ -69,12 +74,15 @@ private static void handlebarsTemplate(Kernel kernel) throws IOException {
6974
.withVariable("length", 5)
7075
.withVariable("topic", "dogs")
7176
.build())
77+
.withTelemetry(telemetry)
7278
.block();
7379

7480
System.out.println(result.getResult());
7581
}
7682

77-
private static void semanticKernelTemplate(Kernel kernel) throws IOException {
83+
private static void semanticKernelTemplate(Kernel kernel,
84+
@Nullable SemanticKernelTelemetry telemetry)
85+
throws IOException {
7886
String yaml = EmbeddedResourceLoader.readFile("GenerateStory.yaml",
7987
KernelFunctionYaml_Example.class);
8088

@@ -87,6 +95,7 @@ private static void semanticKernelTemplate(Kernel kernel) throws IOException {
8795
.withVariable("length", 5)
8896
.withVariable("topic", "cats")
8997
.build())
98+
.withTelemetry(telemetry)
9099
.block();
91100

92101
System.out.println(result.getResult());
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"connectionString": "InstrumentationKey=00000000-0000-0000-0000-0000-000000000000",
3+
"preview": {
4+
"processors": [
5+
{
6+
"type": "span",
7+
"include": {
8+
"matchType": "regexp",
9+
"spanNames": [
10+
".*"
11+
]
12+
}
13+
}
14+
]
15+
}
16+
}

‎semantickernel-api/pom.xml

+7-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
<?xml version="1.0" encoding="UTF-8"?>
22

3-
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
3+
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
4+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
45
<modelVersion>4.0.0</modelVersion>
56

67
<parent>
@@ -15,6 +16,11 @@
1516
<name>Semantic Kernel API</name>
1617
<description>Defines the public interface for the Semantic Kernel</description>
1718
<dependencies>
19+
<dependency>
20+
<groupId>io.opentelemetry.instrumentation</groupId>
21+
<artifactId>opentelemetry-reactor-3.1</artifactId>
22+
<version>2.9.0-alpha</version>
23+
</dependency>
1824
<dependency>
1925
<groupId>com.azure</groupId>
2026
<artifactId>azure-ai-openai</artifactId>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
package com.microsoft.semantickernel.implementation.telemetry;
3+
4+
import com.azure.ai.openai.models.ChatCompletions;
5+
import com.azure.ai.openai.models.CompletionsUsage;
6+
import io.opentelemetry.api.trace.Span;
7+
import io.opentelemetry.api.trace.SpanBuilder;
8+
import io.opentelemetry.api.trace.SpanKind;
9+
import io.opentelemetry.api.trace.StatusCode;
10+
import io.opentelemetry.context.Scope;
11+
import java.util.function.Function;
12+
import javax.annotation.Nullable;
13+
import reactor.util.context.Context;
14+
import reactor.util.context.ContextView;
15+
16+
public class ChatCompletionSpan extends SemanticKernelTelemetrySpan {
17+
18+
public ChatCompletionSpan(
19+
Span span,
20+
Function<Context, Context> reactorContextModifier,
21+
Scope spanScope,
22+
Scope contextScope) {
23+
super(span, reactorContextModifier, spanScope, contextScope);
24+
}
25+
26+
public static ChatCompletionSpan startChatCompletionSpan(
27+
SemanticKernelTelemetry telemetry,
28+
ContextView contextView,
29+
@Nullable String modelName,
30+
String modelProvider,
31+
@Nullable Integer maxTokens,
32+
@Nullable Double temperature,
33+
@Nullable Double topP) {
34+
return startCompletionSpan(
35+
telemetry,
36+
contextView,
37+
"chat.completions",
38+
modelName,
39+
modelProvider,
40+
maxTokens,
41+
temperature, topP);
42+
}
43+
44+
public ChatCompletionSpan startTextCompletionSpan(
45+
SemanticKernelTelemetry telemetry,
46+
ContextView contextView,
47+
@Nullable String modelName,
48+
String modelProvider,
49+
@Nullable Integer maxTokens,
50+
@Nullable Double temperature,
51+
@Nullable Double topP) {
52+
return startCompletionSpan(
53+
telemetry,
54+
contextView,
55+
"text.completions",
56+
modelName,
57+
modelProvider,
58+
maxTokens,
59+
temperature, topP);
60+
}
61+
62+
public static ChatCompletionSpan startCompletionSpan(
63+
SemanticKernelTelemetry telemetry,
64+
ContextView contextView,
65+
String operationName,
66+
@Nullable String modelName,
67+
String modelProvider,
68+
@Nullable Integer maxTokens,
69+
@Nullable Double temperature,
70+
@Nullable Double topP) {
71+
if (modelName == null) {
72+
modelName = "unknown";
73+
}
74+
75+
SpanBuilder builder = telemetry.spanBuilder(operationName + " " + modelName)
76+
.setSpanKind(SpanKind.CLIENT)
77+
.setAttribute("gen_ai.request.model", modelName)
78+
.setAttribute("gen_ai.operation.name", operationName)
79+
.setAttribute("gen_ai.system", modelProvider);
80+
81+
if (maxTokens != null) {
82+
builder.setAttribute("gen_ai.request.max_tokens", maxTokens);
83+
}
84+
if (temperature != null) {
85+
builder.setAttribute("gen_ai.request.temperature", temperature);
86+
}
87+
if (topP != null) {
88+
builder.setAttribute("gen_ai.request.top_p", topP);
89+
}
90+
91+
Span span = builder.startSpan();
92+
93+
return build(
94+
span,
95+
contextView,
96+
(contextModifier, spanScope, contextScope) -> new ChatCompletionSpan(
97+
span,
98+
contextModifier,
99+
spanScope,
100+
contextScope));
101+
}
102+
103+
public void endSpanWithUsage(ChatCompletions chatCompletions) {
104+
CompletionsUsage usage = chatCompletions.getUsage();
105+
getSpan().setStatus(StatusCode.OK);
106+
getSpan()
107+
.setAttribute("gen_ai.response.completion_tokens", usage.getCompletionTokens());
108+
getSpan().setAttribute("gen_ai.response.prompt_tokens", usage.getPromptTokens());
109+
close();
110+
}
111+
112+
public void endSpanWithError(Throwable throwable) {
113+
getSpan().setStatus(StatusCode.ERROR, throwable.getMessage());
114+
close();
115+
}
116+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
package com.microsoft.semantickernel.implementation.telemetry;
3+
4+
import com.microsoft.semantickernel.orchestration.FunctionResult;
5+
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionArguments;
6+
import io.opentelemetry.api.trace.Span;
7+
import io.opentelemetry.api.trace.SpanBuilder;
8+
import io.opentelemetry.api.trace.SpanKind;
9+
import io.opentelemetry.api.trace.StatusCode;
10+
import io.opentelemetry.context.Scope;
11+
import java.util.function.Function;
12+
import reactor.util.context.Context;
13+
import reactor.util.context.ContextView;
14+
15+
public class FunctionSpan extends SemanticKernelTelemetrySpan {
16+
17+
public FunctionSpan(
18+
Span span,
19+
Function<Context, Context> reactorContextModifier,
20+
Scope spanScope,
21+
Scope contextScope) {
22+
super(span, reactorContextModifier, spanScope, contextScope);
23+
}
24+
25+
public static FunctionSpan build(
26+
SemanticKernelTelemetry telemetry,
27+
ContextView contextView,
28+
String pluginName,
29+
String name,
30+
KernelFunctionArguments arguments) {
31+
32+
SpanBuilder builder = telemetry.spanBuilder(
33+
String.format("function_invocation %s-%s", pluginName, name))
34+
.setSpanKind(SpanKind.INTERNAL)
35+
.setAttribute("semantic_kernel.function.invocation.name", name)
36+
.setAttribute("semantic_kernel.function.invocation.plugin_name", pluginName);
37+
38+
Span span = builder.startSpan();
39+
40+
return build(
41+
span,
42+
contextView,
43+
(contextModifier, spanScope, contextScope) -> new FunctionSpan(
44+
span,
45+
contextModifier,
46+
spanScope,
47+
contextScope));
48+
}
49+
50+
public <T> void onFunctionSuccess(FunctionResult<T> result) {
51+
try {
52+
getSpan().setStatus(StatusCode.OK);
53+
} finally {
54+
close();
55+
}
56+
}
57+
58+
public void onFunctionError(Throwable error) {
59+
try {
60+
getSpan().setStatus(StatusCode.ERROR, error.getMessage());
61+
getSpan().recordException(error);
62+
} finally {
63+
close();
64+
}
65+
}
66+
}
Original file line numberDiff line numberDiff line change
@@ -1,79 +1,54 @@
11
// Copyright (c) Microsoft. All rights reserved.
22
package com.microsoft.semantickernel.implementation.telemetry;
33

4-
import com.azure.ai.openai.models.CompletionsUsage;
4+
import com.microsoft.semantickernel.orchestration.InvocationContext;
55
import io.opentelemetry.api.GlobalOpenTelemetry;
6-
import io.opentelemetry.api.OpenTelemetry;
7-
import io.opentelemetry.api.trace.Span;
86
import io.opentelemetry.api.trace.SpanBuilder;
9-
import io.opentelemetry.api.trace.StatusCode;
7+
import io.opentelemetry.api.trace.SpanContext;
8+
import io.opentelemetry.api.trace.Tracer;
109
import javax.annotation.Nullable;
1110

1211
public class SemanticKernelTelemetry {
1312

1413
public static final String OPEN_AI_PROVIDER = "openai";
1514

16-
public static Span startChatCompletionSpan(
17-
@Nullable String modelName,
18-
String modelProvider,
19-
@Nullable Integer maxTokens,
20-
@Nullable Double temperature,
21-
@Nullable Double topP) {
22-
return startCompletionSpan("chat.completions", modelName, modelProvider, maxTokens,
23-
temperature, topP);
24-
}
15+
private final Tracer tracer;
2516

26-
public static Span startTextCompletionSpan(
27-
@Nullable String modelName,
28-
String modelProvider,
29-
@Nullable Integer maxTokens,
30-
@Nullable Double temperature,
31-
@Nullable Double topP) {
32-
return startCompletionSpan("text.completions", modelName, modelProvider, maxTokens,
33-
temperature, topP);
34-
}
17+
@Nullable
18+
private final SpanContext spanContext;
3519

36-
private static Span startCompletionSpan(
37-
String operationName,
38-
@Nullable String modelName,
39-
String modelProvider,
40-
@Nullable Integer maxTokens,
41-
@Nullable Double temperature,
42-
@Nullable Double topP) {
43-
OpenTelemetry otel = GlobalOpenTelemetry.get();
20+
public SemanticKernelTelemetry(
21+
Tracer tracer,
22+
@Nullable SpanContext spanContext) {
4423

45-
if (modelName == null) {
46-
modelName = "unknown";
47-
}
48-
SpanBuilder builder = otel
49-
.getTracer("SemanticKernel")
50-
.spanBuilder(operationName + " " + modelName)
51-
.setAttribute("gen_ai.request.model", modelName)
52-
.setAttribute("gen_ai.operation.name", operationName)
53-
.setAttribute("gen_ai.system", modelProvider);
24+
this.tracer = tracer;
25+
this.spanContext = spanContext;
26+
}
5427

55-
if (maxTokens != null) {
56-
builder.setAttribute("gen_ai.request.max_tokens", maxTokens);
57-
}
58-
if (temperature != null) {
59-
builder.setAttribute("gen_ai.request.temperature", temperature);
60-
}
61-
if (topP != null) {
62-
builder.setAttribute("gen_ai.request.top_p", topP);
63-
}
28+
public SemanticKernelTelemetry() {
29+
this(
30+
GlobalOpenTelemetry.getTracer("SemanticKernel"),
31+
null);
32+
}
6433

65-
return builder.startSpan();
34+
public static SemanticKernelTelemetry getTelemetry(
35+
@Nullable InvocationContext invocationContext) {
36+
if (invocationContext != null) {
37+
return invocationContext.getTelemetry();
38+
}
39+
return new SemanticKernelTelemetry();
6640
}
6741

68-
public static void endSpanWithUsage(Span span, CompletionsUsage usage) {
69-
span.setStatus(StatusCode.OK);
70-
span.setAttribute("gen_ai.response.completion_tokens", usage.getCompletionTokens());
71-
span.setAttribute("gen_ai.response.prompt_tokens", usage.getPromptTokens());
72-
span.end();
42+
private Tracer getTracer() {
43+
return tracer;
7344
}
7445

75-
public static void endSpanWithError(Span span) {
76-
span.setStatus(StatusCode.ERROR);
77-
span.end();
46+
public SpanBuilder spanBuilder(String operationName) {
47+
SpanBuilder sb = tracer.spanBuilder(operationName);
48+
49+
if (spanContext != null) {
50+
sb.addLink(spanContext);
51+
}
52+
return sb;
7853
}
7954
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
package com.microsoft.semantickernel.implementation.telemetry;
3+
4+
import io.opentelemetry.api.trace.Span;
5+
import io.opentelemetry.context.Context;
6+
import io.opentelemetry.context.Scope;
7+
import io.opentelemetry.instrumentation.reactor.v3_1.ContextPropagationOperator;
8+
import java.io.Closeable;
9+
import java.time.Duration;
10+
import java.util.concurrent.atomic.AtomicBoolean;
11+
import java.util.function.Function;
12+
import org.slf4j.Logger;
13+
import reactor.core.Disposable;
14+
import reactor.core.publisher.Mono;
15+
import reactor.util.context.ContextView;
16+
17+
public abstract class SemanticKernelTelemetrySpan implements Closeable {
18+
19+
private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(
20+
SemanticKernelTelemetrySpan.class);
21+
22+
private static final long SPAN_TIMEOUT_MS = Long.parseLong((String) System.getProperties()
23+
.getOrDefault("semantickernel.telemetry.span_timeout", "120000"));
24+
25+
private final Span span;
26+
private final Function<reactor.util.context.Context, reactor.util.context.Context> reactorContextModifier;
27+
private final Scope spanScope;
28+
private final Scope contextScope;
29+
private final AtomicBoolean closed = new AtomicBoolean(false);
30+
31+
// Timeout to close the span if it was not closed within the specified time to avoid memory leaks
32+
private final Disposable watchdog;
33+
34+
// This is a finalizer guardian to ensure that the span is closed if it was not closed explicitly
35+
@SuppressWarnings("unused")
36+
private final Object finalizerGuardian = new Object() {
37+
@Override
38+
protected void finalize() {
39+
if (closed.get() == false) {
40+
LOGGER.warn("Span was not closed");
41+
close();
42+
}
43+
}
44+
};
45+
46+
public SemanticKernelTelemetrySpan(Span span,
47+
Function<reactor.util.context.Context, reactor.util.context.Context> reactorContextModifier,
48+
Scope spanScope, Scope contextScope) {
49+
this.span = span;
50+
this.reactorContextModifier = reactorContextModifier;
51+
this.spanScope = spanScope;
52+
this.contextScope = contextScope;
53+
54+
watchdog = Mono.just(1)
55+
.delay(Duration.ofMillis(SPAN_TIMEOUT_MS))
56+
.subscribe(i -> {
57+
if (closed.get() == false) {
58+
LOGGER.warn("Span was not closed, timing out");
59+
close();
60+
}
61+
});
62+
}
63+
64+
public interface SpanConstructor<T extends SemanticKernelTelemetrySpan> {
65+
66+
public T build(
67+
Function<reactor.util.context.Context, reactor.util.context.Context> contextModifier,
68+
Scope spanScope,
69+
Scope contextScope);
70+
}
71+
72+
// Does need to be closed but as we are doing this in a reactive app, cant enforce the try with resources
73+
@SuppressWarnings("MustBeClosedChecker")
74+
public static <T extends SemanticKernelTelemetrySpan> T build(
75+
Span span,
76+
ContextView contextView,
77+
SpanConstructor<T> builder) {
78+
LOGGER.trace("Starting Span: {}", span);
79+
80+
Context currentOtelContext = ContextPropagationOperator
81+
.getOpenTelemetryContextFromContextView(
82+
contextView,
83+
Context.current());
84+
85+
Context otelContext = span.storeInContext(currentOtelContext);
86+
Scope contextScope = otelContext.makeCurrent();
87+
Scope spanScope = span.makeCurrent();
88+
89+
Function<reactor.util.context.Context, reactor.util.context.Context> reactorContextModifier = ctx -> {
90+
return ContextPropagationOperator.storeOpenTelemetryContext(ctx, otelContext);
91+
};
92+
93+
return builder.build(reactorContextModifier, spanScope, contextScope);
94+
}
95+
96+
public Function<reactor.util.context.Context, reactor.util.context.Context> getReactorContextModifier() {
97+
return reactorContextModifier;
98+
}
99+
100+
public void close() {
101+
if (closed.compareAndSet(false, true)) {
102+
LOGGER.trace("Closing span: {}", span);
103+
if (span.isRecording()) {
104+
try {
105+
span.end();
106+
} catch (Exception e) {
107+
LOGGER.error("Error closing span", e);
108+
}
109+
}
110+
if (contextScope != null) {
111+
try {
112+
contextScope.close();
113+
} catch (Exception e) {
114+
LOGGER.error("Error closing context scope", e);
115+
}
116+
}
117+
if (spanScope != null) {
118+
try {
119+
spanScope.close();
120+
} catch (Exception e) {
121+
LOGGER.error("Error closing span scope", e);
122+
}
123+
}
124+
watchdog.dispose();
125+
}
126+
}
127+
128+
public Span getSpan() {
129+
return span;
130+
}
131+
}

‎semantickernel-api/src/main/java/com/microsoft/semantickernel/orchestration/FunctionInvocation.java

+21-1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import com.microsoft.semantickernel.hooks.KernelHook;
1212
import com.microsoft.semantickernel.hooks.KernelHooks;
1313
import com.microsoft.semantickernel.hooks.KernelHooks.UnmodifiableKernelHooks;
14+
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
1415
import com.microsoft.semantickernel.localization.SemanticKernelResources;
1516
import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
1617
import com.microsoft.semantickernel.semanticfunctions.KernelFunctionArguments;
@@ -47,6 +48,8 @@ public class FunctionInvocation<T> extends Mono<FunctionResult<T>> {
4748
protected PromptExecutionSettings promptExecutionSettings;
4849
@Nullable
4950
protected ToolCallBehavior toolCallBehavior;
51+
@Nullable
52+
protected SemanticKernelTelemetry telemetry;
5053

5154
private boolean isSubscribed = false;
5255

@@ -312,6 +315,17 @@ public FunctionInvocation<T> withTypes(ContextVariableTypes contextVariableTypes
312315
return this;
313316
}
314317

318+
/**
319+
* Supply a tracer to the function invocation.
320+
*
321+
* @param tracer The tracer to supply to the function invocation.
322+
* @return this {@code FunctionInvocation} for fluent chaining.
323+
*/
324+
public FunctionInvocation<T> withTelemetry(SemanticKernelTelemetry telemetry) {
325+
this.telemetry = telemetry;
326+
return this;
327+
}
328+
315329
/**
316330
* Use an invocation context variable to supply the types, tool call behavior, prompt execution
317331
* settings, and kernel hooks to the function invocation.
@@ -329,6 +343,7 @@ public FunctionInvocation<T> withInvocationContext(
329343
withToolCallBehavior(invocationContext.getToolCallBehavior());
330344
withPromptExecutionSettings(invocationContext.getPromptExecutionSettings());
331345
addKernelHooks(invocationContext.getKernelHooks());
346+
withTelemetry(invocationContext.getTelemetry());
332347
return this;
333348
}
334349

@@ -356,6 +371,10 @@ public void subscribe(CoreSubscriber<? super FunctionResult<T>> coreSubscriber)
356371
function.getPluginName(), function.getName());
357372
}
358373

374+
if (telemetry == null) {
375+
telemetry = new SemanticKernelTelemetry();
376+
}
377+
359378
isSubscribed = true;
360379

361380
performSubscribe(
@@ -369,7 +388,8 @@ public void subscribe(CoreSubscriber<? super FunctionResult<T>> coreSubscriber)
369388
promptExecutionSettings,
370389
toolCallBehavior,
371390
contextVariableTypes,
372-
InvocationReturnMode.NEW_MESSAGES_ONLY));
391+
InvocationReturnMode.NEW_MESSAGES_ONLY,
392+
telemetry));
373393
}
374394

375395
}

‎semantickernel-api/src/main/java/com/microsoft/semantickernel/orchestration/InvocationContext.java

+31-3
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import com.microsoft.semantickernel.contextvariables.ContextVariableTypes;
77
import com.microsoft.semantickernel.hooks.KernelHooks;
88
import com.microsoft.semantickernel.hooks.KernelHooks.UnmodifiableKernelHooks;
9+
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
910
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
1011
import javax.annotation.Nullable;
1112

@@ -24,6 +25,7 @@ public class InvocationContext {
2425
private final ToolCallBehavior toolCallBehavior;
2526
private final ContextVariableTypes contextVariableTypes;
2627
private final InvocationReturnMode invocationReturnMode;
28+
private final SemanticKernelTelemetry telemetry;
2729

2830
/**
2931
* Create a new instance of InvocationContext.
@@ -38,7 +40,8 @@ protected InvocationContext(
3840
@Nullable PromptExecutionSettings promptExecutionSettings,
3941
@Nullable ToolCallBehavior toolCallBehavior,
4042
@Nullable ContextVariableTypes contextVariableTypes,
41-
InvocationReturnMode invocationReturnMode) {
43+
InvocationReturnMode invocationReturnMode,
44+
SemanticKernelTelemetry telemetry) {
4245
this.hooks = unmodifiableClone(hooks);
4346
this.promptExecutionSettings = promptExecutionSettings;
4447
this.toolCallBehavior = toolCallBehavior;
@@ -48,6 +51,7 @@ protected InvocationContext(
4851
} else {
4952
this.contextVariableTypes = new ContextVariableTypes(contextVariableTypes);
5053
}
54+
this.telemetry = telemetry;
5155
}
5256

5357
/**
@@ -59,6 +63,7 @@ protected InvocationContext() {
5963
this.toolCallBehavior = null;
6064
this.contextVariableTypes = new ContextVariableTypes();
6165
this.invocationReturnMode = InvocationReturnMode.NEW_MESSAGES_ONLY;
66+
this.telemetry = null;
6267
}
6368

6469
/**
@@ -73,12 +78,14 @@ protected InvocationContext(@Nullable InvocationContext context) {
7378
this.toolCallBehavior = null;
7479
this.contextVariableTypes = new ContextVariableTypes();
7580
this.invocationReturnMode = InvocationReturnMode.NEW_MESSAGES_ONLY;
81+
this.telemetry = null;
7682
} else {
7783
this.hooks = context.hooks;
7884
this.promptExecutionSettings = context.promptExecutionSettings;
7985
this.toolCallBehavior = context.toolCallBehavior;
8086
this.contextVariableTypes = context.contextVariableTypes;
8187
this.invocationReturnMode = context.invocationReturnMode;
88+
this.telemetry = context.telemetry;
8289
}
8390
}
8491

@@ -114,7 +121,8 @@ public static Builder copy(InvocationContext context) {
114121
.withKernelHooks(context.getKernelHooks())
115122
.withContextVariableConverter(context.contextVariableTypes)
116123
.withPromptExecutionSettings(context.getPromptExecutionSettings())
117-
.withToolCallBehavior(context.getToolCallBehavior());
124+
.withToolCallBehavior(context.getToolCallBehavior())
125+
.withTelemetry(context.getTelemetry());
118126
}
119127

120128
/**
@@ -166,6 +174,10 @@ public InvocationReturnMode returnMode() {
166174
return invocationReturnMode;
167175
}
168176

177+
public SemanticKernelTelemetry getTelemetry() {
178+
return telemetry;
179+
}
180+
169181
/**
170182
* Builder for {@link InvocationContext}.
171183
*/
@@ -179,6 +191,8 @@ public static class Builder implements SemanticKernelBuilder<InvocationContext>
179191
@Nullable
180192
private ToolCallBehavior toolCallBehavior;
181193
private InvocationReturnMode invocationReturnMode = InvocationReturnMode.NEW_MESSAGES_ONLY;
194+
@Nullable
195+
private SemanticKernelTelemetry telemetry;
182196

183197
/**
184198
* Add kernel hooks to the builder.
@@ -252,10 +266,24 @@ public Builder withReturnMode(InvocationReturnMode invocationReturnMode) {
252266
return this;
253267
}
254268

269+
/**
270+
* Add a tracer to the builder.
271+
*
272+
* @param tracer the tracer to add.
273+
* @return this {@link Builder}
274+
*/
275+
public Builder withTelemetry(@Nullable SemanticKernelTelemetry telemetry) {
276+
this.telemetry = telemetry;
277+
return this;
278+
}
279+
255280
@Override
256281
public InvocationContext build() {
282+
if (telemetry == null) {
283+
telemetry = new SemanticKernelTelemetry();
284+
}
257285
return new InvocationContext(hooks, promptExecutionSettings, toolCallBehavior,
258-
contextVariableTypes, invocationReturnMode);
286+
contextVariableTypes, invocationReturnMode, telemetry);
259287
}
260288
}
261289

‎semantickernel-api/src/main/java/com/microsoft/semantickernel/semanticfunctions/KernelFunctionFromMethod.java

+26-3
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
import com.microsoft.semantickernel.hooks.FunctionInvokedEvent;
1515
import com.microsoft.semantickernel.hooks.FunctionInvokingEvent;
1616
import com.microsoft.semantickernel.hooks.KernelHooks;
17+
import com.microsoft.semantickernel.implementation.telemetry.FunctionSpan;
18+
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
1719
import com.microsoft.semantickernel.localization.SemanticKernelResources;
1820
import com.microsoft.semantickernel.orchestration.FunctionResult;
1921
import com.microsoft.semantickernel.orchestration.InvocationContext;
@@ -150,9 +152,10 @@ private static MethodDetails getMethodDetails(
150152

151153
/**
152154
* Gets the function from the method.
153-
* @param method the method to invoke
155+
*
156+
* @param method the method to invoke
154157
* @param instance the instance to invoke the method on
155-
* @param <T> the return type of the function
158+
* @param <T> the return type of the function
156159
* @return the function representing the method
157160
*/
158161
@SuppressWarnings("unchecked")
@@ -367,6 +370,8 @@ private static Object getArgumentValue(
367370

368371
if (Kernel.class.isAssignableFrom(targetArgType)) {
369372
return kernel;
373+
} else if (SemanticKernelTelemetry.class.isAssignableFrom(targetArgType)) {
374+
return invocationContext.getTelemetry();
370375
}
371376

372377
String variableName = getGetVariableName(parameter);
@@ -692,6 +697,7 @@ private static InputVariable toKernelParameterMetadata(Parameter parameter) {
692697

693698
/**
694699
* Gets the constants from an enum type.
700+
*
695701
* @param type the type to get the enum constants from
696702
* @return a list of the enum constants or {@code null} if the type is not an enum
697703
*/
@@ -726,11 +732,27 @@ public Mono<FunctionResult<T>> invokeAsync(
726732
@Nullable KernelFunctionArguments arguments,
727733
@Nullable ContextVariableType<T> variableType,
728734
@Nullable InvocationContext invocationContext) {
729-
return function.invokeAsync(kernel, this, arguments, variableType, invocationContext);
735+
736+
return Mono.deferContextual(contextView -> {
737+
FunctionSpan span = FunctionSpan.build(
738+
SemanticKernelTelemetry.getTelemetry(invocationContext),
739+
contextView,
740+
this.getPluginName(),
741+
this.getName(),
742+
arguments);
743+
744+
return function
745+
.invokeAsync(kernel, this, arguments, variableType, invocationContext)
746+
.contextWrite(span.getReactorContextModifier())
747+
.doOnSuccess(span::onFunctionSuccess)
748+
.doOnError(span::onFunctionError)
749+
.doOnTerminate(span::close);
750+
});
730751
}
731752

732753
/**
733754
* Concrete implementation of the abstract method in KernelFunction.
755+
*
734756
* @param <T> the return type of the function
735757
*/
736758
public interface ImplementationFunc<T> {
@@ -775,6 +797,7 @@ default FunctionResult<T> invoke(
775797

776798
/**
777799
* A builder for {@link KernelFunction}.
800+
*
778801
* @param <T> the return type of the function
779802
*/
780803
public static class Builder<T> {

‎semantickernel-api/src/main/java/com/microsoft/semantickernel/semanticfunctions/KernelFunctionFromPrompt.java

+18-2
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@
1010
import com.microsoft.semantickernel.hooks.KernelHooks;
1111
import com.microsoft.semantickernel.hooks.PromptRenderedEvent;
1212
import com.microsoft.semantickernel.hooks.PromptRenderingEvent;
13+
import com.microsoft.semantickernel.implementation.telemetry.FunctionSpan;
14+
import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
1315
import com.microsoft.semantickernel.localization.SemanticKernelResources;
1416
import com.microsoft.semantickernel.orchestration.FunctionResult;
1517
import com.microsoft.semantickernel.orchestration.InvocationContext;
@@ -274,8 +276,22 @@ public Mono<FunctionResult<T>> invokeAsync(
274276
@Nullable KernelFunctionArguments arguments,
275277
@Nullable ContextVariableType<T> variableType,
276278
@Nullable InvocationContext invocationContext) {
277-
return invokeInternalAsync(kernel, arguments, variableType, invocationContext)
278-
.takeLast(1).single();
279+
return Mono.deferContextual(contextView -> {
280+
FunctionSpan span = FunctionSpan.build(
281+
SemanticKernelTelemetry.getTelemetry(invocationContext),
282+
contextView,
283+
this.getPluginName(),
284+
this.getName(),
285+
arguments);
286+
287+
return invokeInternalAsync(kernel, arguments, variableType, invocationContext)
288+
.contextWrite(span.getReactorContextModifier())
289+
.takeLast(1)
290+
.single()
291+
.doOnSuccess(span::onFunctionSuccess)
292+
.doOnError(span::onFunctionError)
293+
.doOnTerminate(span::close);
294+
});
279295
}
280296

281297
/**

0 commit comments

Comments
 (0)
Please sign in to comment.