From 496fd6dde09b194d60fc08fa9c6b38f6d16755cc Mon Sep 17 00:00:00 2001 From: Daymon Date: Wed, 8 Oct 2025 15:23:52 -0500 Subject: [PATCH 01/15] Fix minor bug in nanosecond conversion --- FirebaseAI/CHANGELOG.md | 4 +++ .../Types/Internal/ProtoDuration.swift | 36 ++++++++++++++++++- 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/FirebaseAI/CHANGELOG.md b/FirebaseAI/CHANGELOG.md index 06f5f30908d..c32f89d1183 100644 --- a/FirebaseAI/CHANGELOG.md +++ b/FirebaseAI/CHANGELOG.md @@ -1,3 +1,7 @@ +# Unreleased +- [fixed] Fixed minor translation issue for nanosecond conversion when receiving + `LiveServerGoingAwayNotice`. (#????) + # 12.4.0 - [feature] Added support for the URL context tool, which allows the model to access content from provided public web URLs to inform and enhance its responses. (#15221) diff --git a/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift b/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift index 1dac21d6429..b55075747b8 100644 --- a/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift +++ b/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift @@ -107,6 +107,40 @@ extension ProtoDuration: Decodable { } self.seconds = secs - self.nanos = nanos + self.nanos = fractionalSecondsToNanoseconds(nanos, digits: nanoseconds.count) } } + +/// Cached powers of 10 for quickly mapping fractional seconds. +private let pow10: [Int32] = [ + 1, 10, 100, 1_000, 10_000, 100_000, + 1_000_000, 10_000_000, 100_000_000, 1_000_000_000 +] + +/// Converts a fractional second representing a nanosecond to a valid nanosecond value. +/// +/// ```swift +/// // 0.123456 +/// XCTAssertEqual( +/// fractionalSecondsToNanoseconds(123456, 6), +/// 123456000 +/// ) +/// +/// // 0.000123456 +/// XCTAssertEqual( +/// fractionalSecondsToNanoseconds(123456, 9), +/// 123456 +/// ) +/// +/// // 0.123456789 +/// XCTAssertEqual( +/// fractionalSecondsToNanoseconds(123456789, 9), +/// 123456789 +/// ) +/// ``` +private func fractionalSecondsToNanoseconds(_ value: Int32, digits: Int) -> Int32 { + precondition(digits >= 0 && digits <= 9, "A nanosecond value must fit within 0..9 digits") + precondition(value >= 0, "A nanosecond value must be positive") + + return Int32(truncatingIfNeeded: value) &* pow10[9 - digits] +} From 6096752d9cd5bb268bb51ab98643d83d8ad1a59f Mon Sep 17 00:00:00 2001 From: Daymon Date: Wed, 8 Oct 2025 15:24:04 -0500 Subject: [PATCH 02/15] Add live snippets --- .../Tests/Unit/Snippets/LiveSnippets.swift | 244 ++++++++++++++++++ 1 file changed, 244 insertions(+) create mode 100644 FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift diff --git a/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift b/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift new file mode 100644 index 00000000000..03c06367136 --- /dev/null +++ b/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift @@ -0,0 +1,244 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import FirebaseCore +import XCTest + +// These snippet tests are intentionally skipped in CI jobs; see the README file in this directory +// for instructions on running them manually. + +@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) +@available(watchOS, unavailable) +final class LiveSnippets: XCTestCase { + override func setUpWithError() throws { + try FirebaseApp.configureDefaultAppForSnippets() + } + + override func tearDown() async throws { + await FirebaseApp.deleteDefaultAppForSnippets() + } + + func sendAudioReceiveAudio() async throws { + // Initialize the Vertex AI Gemini API backend service + // Set the location to `us-central1` (the flash-live model is only supported in that location) + // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports the Live API) + let model = FirebaseAI.firebaseAI(backend: .vertexAI(location: "us-central1")).liveModel( + modelName: "gemini-2.0-flash-exp", + // Configure the model to respond with audio + generationConfig: LiveGenerationConfig( + responseModalities: [.audio] + ) + ) + + do { + let session = try await model.connect() + + // Load the audio file, or tap a microphone + guard let audioFile = NSDataAsset(name: "audio.pcm") else { + fatalError("Failed to load audio file") + } + + // Provide the audio data + await session.sendAudioRealtime(audioFile.data) + + var outputText = "" + for try await message in session.responses { + if case let .content(content) = message.payload { + content.modelTurn?.parts.forEach { part in + if let part = part as? InlineDataPart, part.mimeType.starts(with: "audio/pcm") { + // Handle 16bit pcm audio data at 24khz + playAudio(part.data) + } + } + // Optional: if you don't require to send more requests. + if content.isTurnComplete { + await session.close() + } + } + } + } catch { + fatalError(error.localizedDescription) + } + } + + func sendAudioReceiveText() async throws { + // Initialize the Vertex AI Gemini API backend service + // Set the location to `us-central1` (the flash-live model is only supported in that location) + // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports the Live API) + let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( + modelName: "gemini-live-2.5-flash-preview", + // Configure the model to respond with text + generationConfig: LiveGenerationConfig( + responseModalities: [.text] + ) + ) + + do { + let session = try await model.connect() + + // Load the audio file, or tap a microphone + guard let audioFile = NSDataAsset(name: "audio.pcm") else { + fatalError("Failed to load audio file") + } + + // Provide the audio data + await session.sendAudioRealtime(audioFile.data) + + var outputText = "" + for try await message in session.responses { + if case let .content(content) = message.payload { + content.modelTurn?.parts.forEach { part in + if let part = part as? TextPart { + outputText += part.text + } + } + // Optional: if you don't require to send more requests. + if content.isTurnComplete { + await session.close() + } + } + } + + // Output received from the server. + print(outputText) + } catch { + fatalError(error.localizedDescription) + } + } + + func sendTextReceiveAudio() async throws { + // Initialize the Gemini Developer API backend service + // Create a `LiveModel` instance with the flash-live model (only model that supports the Live API) + let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( + modelName: "gemini-live-2.5-flash-preview", + // Configure the model to respond with audio + generationConfig: LiveGenerationConfig( + responseModalities: [.audio] + ) + ) + + do { + let session = try await model.connect() + + // Provide a text prompt + let text = "tell a short story" + + await session.sendTextRealtime(text) + + var outputText = "" + for try await message in session.responses { + if case let .content(content) = message.payload { + content.modelTurn?.parts.forEach { part in + if let part = part as? InlineDataPart, part.mimeType.starts(with: "audio/pcm") { + // Handle 16bit pcm audio data at 24khz + playAudio(part.data) + } + } + // Optional: if you don't require to send more requests. + if content.isTurnComplete { + await session.close() + } + } + } + } catch { + fatalError(error.localizedDescription) + } + } + + func sendTextReceiveText() async throws { + // Initialize the Gemini Developer API backend service + // Create a `LiveModel` instance with the flash-live model (only model that supports the Live API) + let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( + modelName: "gemini-live-2.5-flash-preview", + // Configure the model to respond with audio + generationConfig: LiveGenerationConfig( + responseModalities: [.audio] + ) + ) + + do { + let session = try await model.connect() + + // Provide a text prompt + let text = "tell a short story" + + await session.sendTextRealtime(text) + + var outputText = "" + for try await message in session.responses { + if case let .content(content) = message.payload { + content.modelTurn?.parts.forEach { part in + if let part = part as? InlineDataPart, part.mimeType.starts(with: "audio/pcm") { + // Handle 16bit pcm audio data at 24khz + playAudio(part.data) + } + } + // Optional: if you don't require to send more requests. + if content.isTurnComplete { + await session.close() + } + } + } + } catch { + fatalError(error.localizedDescription) + } + } + + func changeVoiceAndLanguage() { + let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( + modelName: "gemini-live-2.5-flash-preview", + // Configure the model to use a specific voice for its audio response + generationConfig: LiveGenerationConfig( + responseModalities: [.audio], + speech: SpeechConfig(voiceName: "Fenrir") + ) + ) + } + + func modelParameters() { + // ... + + // Set parameter values in a `LiveGenerationConfig` (example values shown here) + let config = LiveGenerationConfig( + temperature: 0.9, + topP: 0.1, + topK: 16, + maxOutputTokens: 200, + responseModalities: [.audio], + speech: SpeechConfig(voiceName: "Fenrir"), + ) + + // Initialize the Vertex AI Gemini API backend service + // Specify the config as part of creating the `LiveGenerativeModel` instance + let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( + modelName: "gemini-live-2.5-flash-preview", + generationConfig: config + ) + + // ... + } + + func systemInstructions() { + // Specify the system instructions as part of creating the `LiveGenerativeModel` instance + let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( + modelName: "gemini-live-2.5-flash-preview", + systemInstruction: ModelContent(role: "system", parts: "You are a cat. Your name is Neko.") + ) + } + + private func playAudio(_ data: Data) { + // Use AVAudioPlayerNode or something akin to play back audio + } +} From cb7796c52d45976a1eb34615666cf19898987361 Mon Sep 17 00:00:00 2001 From: Daymon Date: Wed, 8 Oct 2025 15:24:26 -0500 Subject: [PATCH 03/15] Add tests for VoiceConfig encoding --- .../Unit/Types/Live/VoiceConfigTests.swift | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift diff --git a/FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift b/FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift new file mode 100644 index 00000000000..707c0088bdc --- /dev/null +++ b/FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift @@ -0,0 +1,62 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import XCTest + +@testable import FirebaseAI + +@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, *) +@available(watchOS, unavailable) +final class VoiceConfigTests: XCTestCase { + let encoder = JSONEncoder() + + override func setUp() { + super.setUp() + encoder.outputFormatting = [.prettyPrinted, .sortedKeys, .withoutEscapingSlashes] + } + + func testEncodeVoiceConfig_prebuiltVoice() throws { + let voice = VoiceConfig.prebuiltVoiceConfig( + PrebuiltVoiceConfig(voiceName: "Zephyr") + ) + + let jsonData = try encoder.encode(voice) + + let json = try XCTUnwrap(String(data: jsonData, encoding: .utf8)) + XCTAssertEqual(json, """ + { + "prebuiltVoiceConfig" : { + "voiceName" : "Zephyr" + } + } + """) + } + + func testEncodeVoiceConfig_customVoice() throws { + let voice = VoiceConfig.customVoiceConfig( + CustomVoiceConfig(customVoiceSample: Data(repeating: 5, count: 5)) + ) + + let jsonData = try encoder.encode(voice) + + let json = try XCTUnwrap(String(data: jsonData, encoding: .utf8)) + XCTAssertEqual(json, """ + { + "customVoiceConfig" : { + "customVoiceSample" : "BQUFBQU=" + } + } + """) + } +} From 72b135c2cd34fba9173050a6dc26221d0b7c55dc Mon Sep 17 00:00:00 2001 From: Daymon Date: Wed, 8 Oct 2025 15:24:42 -0500 Subject: [PATCH 04/15] Add tests for BidiGenerateContentServerMessage decoding --- ...idiGenerateContentServerMessageTests.swift | 189 ++++++++++++++++++ 1 file changed, 189 insertions(+) create mode 100644 FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift diff --git a/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift b/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift new file mode 100644 index 00000000000..9a5d28b704c --- /dev/null +++ b/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift @@ -0,0 +1,189 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import XCTest + +@testable import FirebaseAI + +final class BidiGenerateContentServerMessageTests: XCTestCase { + let decoder = JSONDecoder() + + func testDecodeBidiGenerateContentServerMessage_setupComplete() throws { + let json = """ + { + "setupComplete" : {} + } + """ + let jsonData = try XCTUnwrap(json.data(using: .utf8)) + + let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) + guard case .setupComplete(_) = serverMessage.messageType else { + XCTFail("Decoded message is not a setupComplete message.") + return + } + } + + func testDecodeBidiGenerateContentServerMessage_serverContent() throws { + let json = """ + { + "serverContent" : { + "modelTurn" : { + "parts" : [ + { + "inlineData" : { + "data" : "BQUFBQU=", + "mimeType" : "audio/pcm" + } + } + ], + "role" : "model" + }, + "turnComplete": true, + "groundingMetadata": { + "webSearchQueries": ["query1", "query2"], + "groundingChunks": [ + { "web": { "uri": "uri1", "title": "title1" } } + ], + "groundingSupports": [ + { "segment": { "endIndex": 10, "text": "text" }, "groundingChunkIndices": [0] } + ], + "searchEntryPoint": { "renderedContent": "html" } + }, + "inputTranscription": { + "text": "What day of the week is it?" + }, + "outputTranscription": { + "text": "Today is friday" + } + } + } + """ + let jsonData = try XCTUnwrap(json.data(using: .utf8)) + + let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) + guard case let .serverContent(serverContent) = serverMessage.messageType else { + XCTFail("Decoded message is not a serverContent message.") + return + } + + XCTAssertEqual(serverContent.turnComplete, true) + XCTAssertNil(serverContent.interrupted) + XCTAssertNil(serverContent.generationComplete) + + let modelTurn = try XCTUnwrap(serverContent.modelTurn) + XCTAssertEqual(modelTurn.role, "model") + XCTAssertEqual(modelTurn.parts.count, 1) + let part = try XCTUnwrap(modelTurn.parts.first as? InlineDataPart) + XCTAssertEqual(part.data, Data(repeating: 5, count: 5)) + XCTAssertEqual(part.mimeType, "audio/pcm") + + let metadata = try XCTUnwrap(serverContent.groundingMetadata) + XCTAssertEqual(metadata.webSearchQueries, ["query1", "query2"]) + XCTAssertEqual(metadata.groundingChunks.count, 1) + let groundingChunk = try XCTUnwrap(metadata.groundingChunks.first) + let webChunk = try XCTUnwrap(groundingChunk.web) + XCTAssertEqual(webChunk.uri, "uri1") + XCTAssertEqual(metadata.groundingSupports.count, 1) + let groundingSupport = try XCTUnwrap(metadata.groundingSupports.first) + XCTAssertEqual(groundingSupport.segment.startIndex, 0) + XCTAssertEqual(groundingSupport.segment.partIndex, 0) + XCTAssertEqual(groundingSupport.segment.endIndex, 10) + XCTAssertEqual(groundingSupport.segment.text, "text") + let searchEntryPoint = try XCTUnwrap(metadata.searchEntryPoint) + XCTAssertEqual(searchEntryPoint.renderedContent, "html") + + let inputTranscription = try XCTUnwrap(serverContent.inputTranscription) + XCTAssertEqual(inputTranscription.text, "What day of the week is it?") + + let outputTranscription = try XCTUnwrap(serverContent.outputTranscription) + XCTAssertEqual(outputTranscription.text, "Today is friday") + } + + func testDecodeBidiGenerateContentServerMessage_toolCall() throws { + let json = """ + { + "toolCall" : { + "functionCalls" : [ + { + "name": "changeBackgroundColor", + "id": "functionCall-12345-67890", + "args" : { + "color": "#F54927" + } + } + ] + } + } + """ + let jsonData = try XCTUnwrap(json.data(using: .utf8)) + + let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) + guard case let .toolCall(toolCall) = serverMessage.messageType else { + XCTFail("Decoded message is not a toolCall message.") + return + } + + let functionCalls = try XCTUnwrap(toolCall.functionCalls) + XCTAssertEqual(functionCalls.count, 1) + let functionCall = try XCTUnwrap(functionCalls.first) + XCTAssertEqual(functionCall.name, "changeBackgroundColor") + XCTAssertEqual(functionCall.id, "functionCall-12345-67890") + let args = try XCTUnwrap(functionCall.args) + guard case let .string(color) = args["color"] else { + XCTFail("Missing color argument") + return + } + XCTAssertEqual(color, "#F54927") + } + + func testDecodeBidiGenerateContentServerMessage_toolCallCancellation() throws { + let json = """ + { + "toolCallCancellation" : { + "ids" : ["functionCall-12345-67890"] + } + } + """ + let jsonData = try XCTUnwrap(json.data(using: .utf8)) + + let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) + guard case let .toolCallCancellation(toolCallCancellation) = serverMessage.messageType else { + XCTFail("Decoded message is not a toolCallCancellation message.") + return + } + + let ids = try XCTUnwrap(toolCallCancellation.ids) + XCTAssertEqual(ids, ["functionCall-12345-67890"]) + } + + func testDecodeBidiGenerateContentServerMessage_goAway() throws { + let json = """ + { + "goAway" : { + "timeLeft": "1.23456789s" + } + } + """ + let jsonData = try XCTUnwrap(json.data(using: .utf8)) + + let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) + guard case let .goAway(goAway) = serverMessage.messageType else { + XCTFail("Decoded message is not a goAway message.") + return + } + + XCTAssertEqual(goAway.timeLeft?.seconds, 1) + XCTAssertEqual(goAway.timeLeft?.nanos, 234567890) + } +} From 4cb3054af507eb5e60a2853041b4c42a68d1544a Mon Sep 17 00:00:00 2001 From: Daymon Date: Wed, 8 Oct 2025 15:24:51 -0500 Subject: [PATCH 05/15] Add tests for ProtoDuration decoding --- .../Unit/TestUtilities/XCTExtensions.swift | 30 ++++++ .../Tests/Unit/Types/ProtoDurationTests.swift | 99 +++++++++++++++++++ 2 files changed, 129 insertions(+) create mode 100644 FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift create mode 100644 FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift diff --git a/FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift b/FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift new file mode 100644 index 00000000000..33ef11de6a7 --- /dev/null +++ b/FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift @@ -0,0 +1,30 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import XCTest + +/// Asserts that a string contains another string. +/// +/// ```swift +/// XCTAssertContains("my name is", "name") +/// ``` +/// +/// - Parameters: +/// - string: The source string that should contain the other. +/// - contains: The string that should be contained in the source string. +func XCTAssertContains(_ string: String, _ contains: String) { + if !string.contains(contains) { + XCTFail("(\"\(string)\") does not contain (\"\(contains)\")") + } +} diff --git a/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift b/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift new file mode 100644 index 00000000000..dd73fdca3dc --- /dev/null +++ b/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift @@ -0,0 +1,99 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import XCTest + +@testable import FirebaseAI + +final class ProtoDurationTests: XCTestCase { + let decoder = JSONDecoder() + + private func decodeProtoDuration(_ jsonString: String) throws -> ProtoDuration { + let escapedString = "\"\(jsonString)\"" + let jsonData = try XCTUnwrap(escapedString.data(using: .utf8)) + + return try decoder.decode(ProtoDuration.self, from: jsonData) + } + + private func expectDecodeFailure(_ jsonString: String) throws -> DecodingError.Context? { + do { + let _ = try decodeProtoDuration(jsonString) + XCTFail("Expected decoding to fail") + return nil + } catch { + let decodingError = try XCTUnwrap(error as? DecodingError) + guard case let .dataCorrupted(dataCorrupted) = decodingError else { + XCTFail("Error was not a data corrupted error") + return nil + } + + return dataCorrupted + } + } + + func testDecodeProtoDuration_standardDuration() throws { + let duration = try decodeProtoDuration("120.000000123s") + XCTAssertEqual(duration.seconds, 120) + XCTAssertEqual(duration.nanos, 123) + + XCTAssertEqual(duration.timeInterval, 120.000000123) + } + + func testDecodeProtoDuration_withoutNanoseconds() throws { + let duration = try decodeProtoDuration("120s") + XCTAssertEqual(duration.seconds, 120) + XCTAssertEqual(duration.nanos, 0) + + XCTAssertEqual(duration.timeInterval, 120) + } + + func testDecodeProtoDuration_maxNanosecondDigits() throws { + let duration = try decodeProtoDuration("15.123456789s") + XCTAssertEqual(duration.seconds, 15) + XCTAssertEqual(duration.nanos, 123456789) + + XCTAssertEqual(duration.timeInterval, 15.123456789) + } + + func testDecodeProtoDuration_withMilliseconds() throws { + let duration = try decodeProtoDuration("15.123s") + XCTAssertEqual(duration.seconds, 15) + XCTAssertEqual(duration.nanos, 123000000) + + XCTAssertEqual(duration.timeInterval, 15.123000000) + } + + func testDecodeProtoDuration_invalidSeconds() throws { + guard let error = try expectDecodeFailure("invalid.123s") else { return } + XCTAssertContains(error.debugDescription, "Invalid proto duration seconds") + } + + func testDecodeProtoDuration_invalidNanoseconds() throws { + guard let error = try expectDecodeFailure("123.invalid") else { return } + XCTAssertContains(error.debugDescription, "Invalid proto duration nanoseconds") + } + + func testDecodeProtoDuration_tooManyDecimals() throws { + guard let error = try expectDecodeFailure("123.45.67") else { return } + XCTAssertContains(error.debugDescription, "Invalid proto duration string") + } + + func testDecodeProtoDuration_withoutSuffix() throws { + let duration = try decodeProtoDuration("123.456") + XCTAssertEqual(duration.seconds, 123) + XCTAssertEqual(duration.nanos, 456000000) + + XCTAssertEqual(duration.timeInterval, 123.456) + } +} From a1636c93fb3fa2e59b7408440a651c073a1de13b Mon Sep 17 00:00:00 2001 From: Daymon Date: Wed, 8 Oct 2025 15:25:55 -0500 Subject: [PATCH 06/15] formatting --- FirebaseAI/CHANGELOG.md | 2 +- .../Sources/Types/Internal/ProtoDuration.swift | 4 ++-- FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift | 12 ++++++++---- .../Live/BidiGenerateContentServerMessageTests.swift | 4 ++-- FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift | 6 +++--- 5 files changed, 16 insertions(+), 12 deletions(-) diff --git a/FirebaseAI/CHANGELOG.md b/FirebaseAI/CHANGELOG.md index c32f89d1183..c7d39a4dd2e 100644 --- a/FirebaseAI/CHANGELOG.md +++ b/FirebaseAI/CHANGELOG.md @@ -1,6 +1,6 @@ # Unreleased - [fixed] Fixed minor translation issue for nanosecond conversion when receiving - `LiveServerGoingAwayNotice`. (#????) + `LiveServerGoingAwayNotice`. (#????) # 12.4.0 - [feature] Added support for the URL context tool, which allows the model to access content diff --git a/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift b/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift index b55075747b8..d4af71b0346 100644 --- a/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift +++ b/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift @@ -113,8 +113,8 @@ extension ProtoDuration: Decodable { /// Cached powers of 10 for quickly mapping fractional seconds. private let pow10: [Int32] = [ - 1, 10, 100, 1_000, 10_000, 100_000, - 1_000_000, 10_000_000, 100_000_000, 1_000_000_000 + 1, 10, 100, 1000, 10000, 100_000, + 1_000_000, 10_000_000, 100_000_000, 1_000_000_000, ] /// Converts a fractional second representing a nanosecond to a valid nanosecond value. diff --git a/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift b/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift index 03c06367136..f2dc67f74dc 100644 --- a/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift +++ b/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift @@ -33,7 +33,8 @@ final class LiveSnippets: XCTestCase { func sendAudioReceiveAudio() async throws { // Initialize the Vertex AI Gemini API backend service // Set the location to `us-central1` (the flash-live model is only supported in that location) - // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports the Live API) + // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports + // the Live API) let model = FirebaseAI.firebaseAI(backend: .vertexAI(location: "us-central1")).liveModel( modelName: "gemini-2.0-flash-exp", // Configure the model to respond with audio @@ -76,7 +77,8 @@ final class LiveSnippets: XCTestCase { func sendAudioReceiveText() async throws { // Initialize the Vertex AI Gemini API backend service // Set the location to `us-central1` (the flash-live model is only supported in that location) - // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports the Live API) + // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports + // the Live API) let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( modelName: "gemini-live-2.5-flash-preview", // Configure the model to respond with text @@ -120,7 +122,8 @@ final class LiveSnippets: XCTestCase { func sendTextReceiveAudio() async throws { // Initialize the Gemini Developer API backend service - // Create a `LiveModel` instance with the flash-live model (only model that supports the Live API) + // Create a `LiveModel` instance with the flash-live model (only model that supports the Live + // API) let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( modelName: "gemini-live-2.5-flash-preview", // Configure the model to respond with audio @@ -159,7 +162,8 @@ final class LiveSnippets: XCTestCase { func sendTextReceiveText() async throws { // Initialize the Gemini Developer API backend service - // Create a `LiveModel` instance with the flash-live model (only model that supports the Live API) + // Create a `LiveModel` instance with the flash-live model (only model that supports the Live + // API) let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( modelName: "gemini-live-2.5-flash-preview", // Configure the model to respond with audio diff --git a/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift b/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift index 9a5d28b704c..2646e980421 100644 --- a/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift +++ b/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift @@ -28,7 +28,7 @@ final class BidiGenerateContentServerMessageTests: XCTestCase { let jsonData = try XCTUnwrap(json.data(using: .utf8)) let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) - guard case .setupComplete(_) = serverMessage.messageType else { + guard case .setupComplete = serverMessage.messageType else { XCTFail("Decoded message is not a setupComplete message.") return } @@ -184,6 +184,6 @@ final class BidiGenerateContentServerMessageTests: XCTestCase { } XCTAssertEqual(goAway.timeLeft?.seconds, 1) - XCTAssertEqual(goAway.timeLeft?.nanos, 234567890) + XCTAssertEqual(goAway.timeLeft?.nanos, 234_567_890) } } diff --git a/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift b/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift index dd73fdca3dc..bddb50eef1d 100644 --- a/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift +++ b/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift @@ -61,7 +61,7 @@ final class ProtoDurationTests: XCTestCase { func testDecodeProtoDuration_maxNanosecondDigits() throws { let duration = try decodeProtoDuration("15.123456789s") XCTAssertEqual(duration.seconds, 15) - XCTAssertEqual(duration.nanos, 123456789) + XCTAssertEqual(duration.nanos, 123_456_789) XCTAssertEqual(duration.timeInterval, 15.123456789) } @@ -69,7 +69,7 @@ final class ProtoDurationTests: XCTestCase { func testDecodeProtoDuration_withMilliseconds() throws { let duration = try decodeProtoDuration("15.123s") XCTAssertEqual(duration.seconds, 15) - XCTAssertEqual(duration.nanos, 123000000) + XCTAssertEqual(duration.nanos, 123_000_000) XCTAssertEqual(duration.timeInterval, 15.123000000) } @@ -92,7 +92,7 @@ final class ProtoDurationTests: XCTestCase { func testDecodeProtoDuration_withoutSuffix() throws { let duration = try decodeProtoDuration("123.456") XCTAssertEqual(duration.seconds, 123) - XCTAssertEqual(duration.nanos, 456000000) + XCTAssertEqual(duration.nanos, 456_000_000) XCTAssertEqual(duration.timeInterval, 123.456) } From ba35be4e3d0cfb3434b5e85bc3c1685b0d4fc0ab Mon Sep 17 00:00:00 2001 From: Daymon Date: Wed, 8 Oct 2025 15:31:57 -0500 Subject: [PATCH 07/15] Add PR number to changelog entry --- FirebaseAI/CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/FirebaseAI/CHANGELOG.md b/FirebaseAI/CHANGELOG.md index c7d39a4dd2e..b7999b9fa4a 100644 --- a/FirebaseAI/CHANGELOG.md +++ b/FirebaseAI/CHANGELOG.md @@ -1,6 +1,6 @@ # Unreleased - [fixed] Fixed minor translation issue for nanosecond conversion when receiving - `LiveServerGoingAwayNotice`. (#????) + `LiveServerGoingAwayNotice`. (#15396) # 12.4.0 - [feature] Added support for the URL context tool, which allows the model to access content From 7d998a31c1d06bc754b1c38c4039ff6b3139942c Mon Sep 17 00:00:00 2001 From: Daymon Date: Thu, 9 Oct 2025 14:52:20 -0500 Subject: [PATCH 08/15] Add missing available marker --- .../Unit/Types/Live/BidiGenerateContentServerMessageTests.swift | 2 ++ 1 file changed, 2 insertions(+) diff --git a/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift b/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift index 2646e980421..710f7510ff0 100644 --- a/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift +++ b/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift @@ -16,6 +16,8 @@ import XCTest @testable import FirebaseAI +@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, *) +@available(watchOS, unavailable) final class BidiGenerateContentServerMessageTests: XCTestCase { let decoder = JSONDecoder() From 6f5038ae75387add22aff10eecbce583d2997bd0 Mon Sep 17 00:00:00 2001 From: Daymon Date: Thu, 9 Oct 2025 14:57:00 -0500 Subject: [PATCH 09/15] Fix warnings --- .../Tests/Unit/Snippets/LiveSnippets.swift | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift b/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift index f2dc67f74dc..8c61da91d50 100644 --- a/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift +++ b/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift @@ -54,7 +54,6 @@ final class LiveSnippets: XCTestCase { // Provide the audio data await session.sendAudioRealtime(audioFile.data) - var outputText = "" for try await message in session.responses { if case let .content(content) = message.payload { content.modelTurn?.parts.forEach { part in @@ -140,7 +139,6 @@ final class LiveSnippets: XCTestCase { await session.sendTextRealtime(text) - var outputText = "" for try await message in session.responses { if case let .content(content) = message.payload { content.modelTurn?.parts.forEach { part in @@ -180,7 +178,6 @@ final class LiveSnippets: XCTestCase { await session.sendTextRealtime(text) - var outputText = "" for try await message in session.responses { if case let .content(content) = message.payload { content.modelTurn?.parts.forEach { part in @@ -209,6 +206,9 @@ final class LiveSnippets: XCTestCase { speech: SpeechConfig(voiceName: "Fenrir") ) ) + + // Not part of snippet + silenceWarning(model) } func modelParameters() { @@ -232,6 +232,9 @@ final class LiveSnippets: XCTestCase { ) // ... + + // Not part of snippet + silenceWarning(model) } func systemInstructions() { @@ -240,9 +243,17 @@ final class LiveSnippets: XCTestCase { modelName: "gemini-live-2.5-flash-preview", systemInstruction: ModelContent(role: "system", parts: "You are a cat. Your name is Neko.") ) + + // Not part of snippet + silenceWarning(model) } private func playAudio(_ data: Data) { // Use AVAudioPlayerNode or something akin to play back audio } + + /// This function only exists to silence the "unused value" warnings. + /// + /// This allows us to ensure the snippets match devsite. + private func silenceWarning(_ model: LiveGenerativeModel) {} } From c29c5c925fb82ae69e8942c139d7a192d3ae6c13 Mon Sep 17 00:00:00 2001 From: Daymon Date: Thu, 9 Oct 2025 15:03:53 -0500 Subject: [PATCH 10/15] Delete LiveSnippets.swift --- .../Tests/Unit/Snippets/LiveSnippets.swift | 259 ------------------ 1 file changed, 259 deletions(-) delete mode 100644 FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift diff --git a/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift b/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift deleted file mode 100644 index 8c61da91d50..00000000000 --- a/FirebaseAI/Tests/Unit/Snippets/LiveSnippets.swift +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2025 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import FirebaseAI -import FirebaseCore -import XCTest - -// These snippet tests are intentionally skipped in CI jobs; see the README file in this directory -// for instructions on running them manually. - -@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *) -@available(watchOS, unavailable) -final class LiveSnippets: XCTestCase { - override func setUpWithError() throws { - try FirebaseApp.configureDefaultAppForSnippets() - } - - override func tearDown() async throws { - await FirebaseApp.deleteDefaultAppForSnippets() - } - - func sendAudioReceiveAudio() async throws { - // Initialize the Vertex AI Gemini API backend service - // Set the location to `us-central1` (the flash-live model is only supported in that location) - // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports - // the Live API) - let model = FirebaseAI.firebaseAI(backend: .vertexAI(location: "us-central1")).liveModel( - modelName: "gemini-2.0-flash-exp", - // Configure the model to respond with audio - generationConfig: LiveGenerationConfig( - responseModalities: [.audio] - ) - ) - - do { - let session = try await model.connect() - - // Load the audio file, or tap a microphone - guard let audioFile = NSDataAsset(name: "audio.pcm") else { - fatalError("Failed to load audio file") - } - - // Provide the audio data - await session.sendAudioRealtime(audioFile.data) - - for try await message in session.responses { - if case let .content(content) = message.payload { - content.modelTurn?.parts.forEach { part in - if let part = part as? InlineDataPart, part.mimeType.starts(with: "audio/pcm") { - // Handle 16bit pcm audio data at 24khz - playAudio(part.data) - } - } - // Optional: if you don't require to send more requests. - if content.isTurnComplete { - await session.close() - } - } - } - } catch { - fatalError(error.localizedDescription) - } - } - - func sendAudioReceiveText() async throws { - // Initialize the Vertex AI Gemini API backend service - // Set the location to `us-central1` (the flash-live model is only supported in that location) - // Create a `LiveGenerativeModel` instance with the flash-live model (only model that supports - // the Live API) - let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( - modelName: "gemini-live-2.5-flash-preview", - // Configure the model to respond with text - generationConfig: LiveGenerationConfig( - responseModalities: [.text] - ) - ) - - do { - let session = try await model.connect() - - // Load the audio file, or tap a microphone - guard let audioFile = NSDataAsset(name: "audio.pcm") else { - fatalError("Failed to load audio file") - } - - // Provide the audio data - await session.sendAudioRealtime(audioFile.data) - - var outputText = "" - for try await message in session.responses { - if case let .content(content) = message.payload { - content.modelTurn?.parts.forEach { part in - if let part = part as? TextPart { - outputText += part.text - } - } - // Optional: if you don't require to send more requests. - if content.isTurnComplete { - await session.close() - } - } - } - - // Output received from the server. - print(outputText) - } catch { - fatalError(error.localizedDescription) - } - } - - func sendTextReceiveAudio() async throws { - // Initialize the Gemini Developer API backend service - // Create a `LiveModel` instance with the flash-live model (only model that supports the Live - // API) - let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( - modelName: "gemini-live-2.5-flash-preview", - // Configure the model to respond with audio - generationConfig: LiveGenerationConfig( - responseModalities: [.audio] - ) - ) - - do { - let session = try await model.connect() - - // Provide a text prompt - let text = "tell a short story" - - await session.sendTextRealtime(text) - - for try await message in session.responses { - if case let .content(content) = message.payload { - content.modelTurn?.parts.forEach { part in - if let part = part as? InlineDataPart, part.mimeType.starts(with: "audio/pcm") { - // Handle 16bit pcm audio data at 24khz - playAudio(part.data) - } - } - // Optional: if you don't require to send more requests. - if content.isTurnComplete { - await session.close() - } - } - } - } catch { - fatalError(error.localizedDescription) - } - } - - func sendTextReceiveText() async throws { - // Initialize the Gemini Developer API backend service - // Create a `LiveModel` instance with the flash-live model (only model that supports the Live - // API) - let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( - modelName: "gemini-live-2.5-flash-preview", - // Configure the model to respond with audio - generationConfig: LiveGenerationConfig( - responseModalities: [.audio] - ) - ) - - do { - let session = try await model.connect() - - // Provide a text prompt - let text = "tell a short story" - - await session.sendTextRealtime(text) - - for try await message in session.responses { - if case let .content(content) = message.payload { - content.modelTurn?.parts.forEach { part in - if let part = part as? InlineDataPart, part.mimeType.starts(with: "audio/pcm") { - // Handle 16bit pcm audio data at 24khz - playAudio(part.data) - } - } - // Optional: if you don't require to send more requests. - if content.isTurnComplete { - await session.close() - } - } - } - } catch { - fatalError(error.localizedDescription) - } - } - - func changeVoiceAndLanguage() { - let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( - modelName: "gemini-live-2.5-flash-preview", - // Configure the model to use a specific voice for its audio response - generationConfig: LiveGenerationConfig( - responseModalities: [.audio], - speech: SpeechConfig(voiceName: "Fenrir") - ) - ) - - // Not part of snippet - silenceWarning(model) - } - - func modelParameters() { - // ... - - // Set parameter values in a `LiveGenerationConfig` (example values shown here) - let config = LiveGenerationConfig( - temperature: 0.9, - topP: 0.1, - topK: 16, - maxOutputTokens: 200, - responseModalities: [.audio], - speech: SpeechConfig(voiceName: "Fenrir"), - ) - - // Initialize the Vertex AI Gemini API backend service - // Specify the config as part of creating the `LiveGenerativeModel` instance - let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( - modelName: "gemini-live-2.5-flash-preview", - generationConfig: config - ) - - // ... - - // Not part of snippet - silenceWarning(model) - } - - func systemInstructions() { - // Specify the system instructions as part of creating the `LiveGenerativeModel` instance - let model = FirebaseAI.firebaseAI(backend: .googleAI()).liveModel( - modelName: "gemini-live-2.5-flash-preview", - systemInstruction: ModelContent(role: "system", parts: "You are a cat. Your name is Neko.") - ) - - // Not part of snippet - silenceWarning(model) - } - - private func playAudio(_ data: Data) { - // Use AVAudioPlayerNode or something akin to play back audio - } - - /// This function only exists to silence the "unused value" warnings. - /// - /// This allows us to ensure the snippets match devsite. - private func silenceWarning(_ model: LiveGenerativeModel) {} -} From 9acee670d330501656c3c03cbe20a30d42ce523d Mon Sep 17 00:00:00 2001 From: Daymon Date: Fri, 10 Oct 2025 17:03:27 -0500 Subject: [PATCH 11/15] Add integration tests --- .../project.pbxproj | 6 + .../hello.dataset/Contents.json | 12 + .../Assets.xcassets/hello.dataset/hello.wav | Bin 0 -> 27164 bytes .../Tests/TestApp/Sources/Constants.swift | 2 + .../Tests/Integration/LiveSessionTests.swift | 481 ++++++++++++++++++ .../Tests/Utilities/InstanceConfig.swift | 15 + 6 files changed, 516 insertions(+) create mode 100644 FirebaseAI/Tests/TestApp/Resources/Assets.xcassets/hello.dataset/Contents.json create mode 100644 FirebaseAI/Tests/TestApp/Resources/Assets.xcassets/hello.dataset/hello.wav create mode 100644 FirebaseAI/Tests/TestApp/Tests/Integration/LiveSessionTests.swift diff --git a/FirebaseAI/Tests/TestApp/FirebaseAITestApp.xcodeproj/project.pbxproj b/FirebaseAI/Tests/TestApp/FirebaseAITestApp.xcodeproj/project.pbxproj index fc62b25f132..8b1b80e54d8 100644 --- a/FirebaseAI/Tests/TestApp/FirebaseAITestApp.xcodeproj/project.pbxproj +++ b/FirebaseAI/Tests/TestApp/FirebaseAITestApp.xcodeproj/project.pbxproj @@ -7,6 +7,8 @@ objects = { /* Begin PBXBuildFile section */ + 0E460FAB2E9858E4007E26A6 /* LiveSessionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0E460FAA2E9858E4007E26A6 /* LiveSessionTests.swift */; }; + 0EC8BAE22E98784E0075A4E0 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 868A7C532CCC26B500E449DD /* Assets.xcassets */; }; 862218812D04E098007ED2D4 /* IntegrationTestUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = 862218802D04E08D007ED2D4 /* IntegrationTestUtils.swift */; }; 864F8F712D4980DD0002EA7E /* ImagenIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 864F8F702D4980D60002EA7E /* ImagenIntegrationTests.swift */; }; 8661385C2CC943DD00F4B78E /* TestApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8661385B2CC943DD00F4B78E /* TestApp.swift */; }; @@ -42,6 +44,7 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ + 0E460FAA2E9858E4007E26A6 /* LiveSessionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LiveSessionTests.swift; sourceTree = ""; }; 862218802D04E08D007ED2D4 /* IntegrationTestUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IntegrationTestUtils.swift; sourceTree = ""; }; 864F8F702D4980D60002EA7E /* ImagenIntegrationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagenIntegrationTests.swift; sourceTree = ""; }; 866138582CC943DD00F4B78E /* FirebaseAITestApp-SPM.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "FirebaseAITestApp-SPM.app"; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -141,6 +144,7 @@ 868A7C572CCC27AF00E449DD /* Integration */ = { isa = PBXGroup; children = ( + 0E460FAA2E9858E4007E26A6 /* LiveSessionTests.swift */, DEF0BB502DA9B7400093E9F4 /* SchemaTests.swift */, DEF0BB4E2DA74F460093E9F4 /* TestHelpers.swift */, 8689CDCB2D7F8BCF00BF426B /* CountTokensIntegrationTests.swift */, @@ -271,6 +275,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 0EC8BAE22E98784E0075A4E0 /* Assets.xcassets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -295,6 +300,7 @@ files = ( 8689CDCC2D7F8BD700BF426B /* CountTokensIntegrationTests.swift in Sources */, 86D77E042D7B6C9D003D155D /* InstanceConfig.swift in Sources */, + 0E460FAB2E9858E4007E26A6 /* LiveSessionTests.swift in Sources */, DEF0BB512DA9B7450093E9F4 /* SchemaTests.swift in Sources */, DEF0BB4F2DA74F680093E9F4 /* TestHelpers.swift in Sources */, 868A7C4F2CCC229F00E449DD /* Credentials.swift in Sources */, diff --git a/FirebaseAI/Tests/TestApp/Resources/Assets.xcassets/hello.dataset/Contents.json b/FirebaseAI/Tests/TestApp/Resources/Assets.xcassets/hello.dataset/Contents.json new file mode 100644 index 00000000000..7e31b8c1616 --- /dev/null +++ b/FirebaseAI/Tests/TestApp/Resources/Assets.xcassets/hello.dataset/Contents.json @@ -0,0 +1,12 @@ +{ + "data" : [ + { + "filename" : "hello.wav", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/FirebaseAI/Tests/TestApp/Resources/Assets.xcassets/hello.dataset/hello.wav b/FirebaseAI/Tests/TestApp/Resources/Assets.xcassets/hello.dataset/hello.wav new file mode 100644 index 0000000000000000000000000000000000000000..c065afa21c3b722c357b7d8f2d004392c8dcf12e GIT binary patch literal 27164 zcmXV11GF8<)9>-j^)vHg+cq}b*iJUy*xA@l_8)I-+qP}KJo}pC{wg`&xo7UXZ>Fca z@T=gq0STD6KCjZn{~Jz5SQJEe#XAw6-A{{Nfd`n@Qj1@B)m@{KhD5k z@CW=6f5j>IHNK5+;OF=Uc3>S_PzL1ECZf-w$57I-@jdJ~|JAQrJ5=fIm!@e}+Q zKZN)0;`cZo?m6L;j=#aP&p7*ct7I7CDSm<9!kuKemkaH@aM^JI4q^iN;G2See^F+H z%L*gWa5o?OaR6vcfzN0B9cX=xU;gg*C(gn$aHj#KE*LWn?!LkAV1&o`4sdY;U&jxC zvbQjD3iObLQ{nwjaOX8pko%hgAO0ml1vn`Hdf!0HC-^2ljrZf7cq{%7{+@>aKY%<^ zaVE|KYF*H>0O-$z@xJ0$FxC^``3pSHgC2g-@CwHG3IAmPEq6hl_u%sse!YQqDL}>d z|8MgNp8tC9Bi#QAt#hH@GRvB>jGxYcvWoB`ULXwfLvwZ>6d0Re$(^~zVl$@T4O#@p{INJ8omHLy~Ounv~&0({CxqE{rtPHbf7yGerLjW25|Z2HzmK2m<)G+ zjq*!AnZVy;=<5Oe{e^?C@W}yw-5>!waFz*ur9;bKzkb56AHeBb82u~!bpXCR@SO+G z?C?eas8#^sCcuv$sLY0ul7I7*41DIp+s{B}4}g+n(0Lw=_y#2O0I11@elWag1}h|y z1bwBzXumL>0`I^3Eyq{z+Xbl7erp237^%OXeE_I-^1Vcz!3$MSiol3-*#RY-e6%o3Hbd2 zJiGyEYB&b0zYt17#ZWa=AJs$E;lF6;(E{{;1_VC=Z9IpTA)t!~zG6`#jF<%DmiX;! z6rlbW((O1BMo9(qeFB>;1-+FAeMf@c6G8J{(8(!$93R2QfQxH@uutHFN}yWMMh9s# zFlt5kpG98*YY%|OZ!qowya{gyd%Fy>6HpaY8?{AsfHo3*Q#h&wZ(yM9CTQaw$nG5= zXAeFLdasHafoyzW{o&AW1j+#b2_7mvt4XzAGc@I+h1m1EFAo3SxtD#naFAXF} zfF>fKHwo~Z0=PZ_Eg$05xGSCqXexsmgGOvX!4NbEwFH^8K|Ro)C>gkEk5}M>crlEz zA8)~D0L1~2?Il2V1Hjua84J*I9?&iTZv#;m)DCbHj_!dDzJhFiS&I@!%4}&Hvf$W3068alxCV{p#Fa>W!gW+u#j2{EE zn!y4dgPmr>2!n7dJo-1+e&BorUWMzU&*%v13}aM7xgh^Y)D(IuhlYawF5-#MHV1qq z1qj*?68H}Nt1kVSbwi?Db z1v0?EeKw489Q5o5TlxgPsV4M%1TeQ6*Tt>DM?A*+z$@kf75jj`RKQ3h&`f)vm@_3;pp-fNJZ1ANt`-@opGEPo-q2s#Im%TCbOIotxgTP$!b!l)a7 z;s8ivGFlGOehc&*2dbvP?C$|>-Gp(!!u1Jw`bA&m-#(~3=qn0!LJQC)(DpAc{u!tp zh?o8L?-(aSRQUq@&2_+w0KT&yjMo9|+64Wl02k%Jv+f1Wb;3RIIFQD5u%hm0D(VY} zO#lr}L&@k7+5!|!hI{2e@?XF%Yl4S!;<>mp@Q{lufE6|bymp4x6Tn0C1l%r$78PMG zx&bxfEIytqk!^ckh%`Kv;xIlK~IB$ zs@6c&DfAw#2Yngf&0mpxCZI;dzh>Ra;Gw3Y?dUAJhjLH`TyaDs@gCl|hdx5c{~BhV z@j&4upr$EU{Yc=KLsTcF+>t!AUw#4(oj6nfEYk5CLR$f#C~EY zafUcYY$jR~l?aZo5Qji^PKe!}g2$-|))I%4a09TzZg?U1uJ(Ybfw(cQj*DX+7s5^9 z-flqsR`5+n!M~43$51*dPIMubLC>3rF+?4rFcD5X#r8aa;4BX}~296@d+)5r=`UCK)JCU+AHQETkcrm6Yz zU(y({h3F8g$ZOQ*cn)!cEJWGKM&u2`Ni-y15DNIu*1APKEeYb5(B4pMv8gmj-k_XT zuW6Eg9jzwE(i#(B%Wx~$?@VW=0u#^tq?gjER6FVb*^!(=wxbr&ADEHcBEFkpg~?%E z82K~yldV>8c(U}JalHuq>5B3m(bI5s=g90)zV*yK{wQs*c1&mxch{O{d}!*-ToPA? z7SU&ngV1nKn}SV_TaFm}Y7Z^nz$7u5I?v4#hH;yjj>2BEY&mbGX|4ZM3#qJpS8O9U z))u1KWH}~-ZN<8&cH}1dHam@dKpi43p^bVs#qOWzUhn?q>n~aL1B8q85;>?Z`GUUA zw&64Rt$a1E5xtRkqwi4a$am$#YCV0kURVEHPt#=_PHNOJCdB;1s1!#Y0~7}EVZF0n z5+~p$T0ovFZ>Wz8JC%2VAn1Sj#=Jfu+_8=V`$TsK ze@Y-NI8ppA55*D8BW?-jXN}x;W))>24WxuSY1yh#|E6>n&xUF#4Pka*;EE!ekP86a z3%S*tNoZ+oV;pDj@J)r%hI-Z;QOgpgqUVbZurAH5{rt4r2kus@J0_V81Vsg5dv`=RIl|AKR{!u=Hf z;P10$dIij@2~+~JnR-OFp|XfciqqHFUD#VAD2b|cM=qitKz;BR<*|fRnW!R!tOeoL zu;=D~O-oIkjJx=I!r#_`5jP_`N8E@!5mhT}1bxgsIrVn>NY7GbPcXr;H&4vt^IzD{ zxb608*=dfG!okGN5fNyc9xyz#h8vq3%Z2T>wlIihOH?m&tTfs2BhU#e%G$uF;61UA z@44OW7~uQieeaUI_f?hd#&)LdMbjzncnnnL5XnFl@tTj74JGhOg%q7;ifvd!&hyACHLlvT3@hgp`+1Rj(QA5qOEdH3ek>_z0_opDjj?fnSdj}J+ z`l>r(oL9Uf+;j8J*>fd@yhl#J8N@=4Hf-QWa^=```ZIHpA8CBT{jKi^vcbAy{ZKRC zMDNSc1eH=ggl-47g-VLIrGDr*yPLnsA2$^VyJ#I|7$l6f;ILD+Th>_-o1#}oH4Y~u zFGrsXtE$e(T#~-uc`NkY9t-K6vN@|VYGz~Rqs>phDoPmo2Daf8+X?-z1|*?t`F!bM=qv6jYp* zg`i;*AIpv5S#w8AWz#?W|EMO~Rk4b=J2=8y);&8=O0kI!&k=8q*j#xfE>=pCrP&1j zh-pul-*QgaYPewA5MIgh-gMN~FycViLF)_~Wm7C`Y(>H^QR8#Rq*(2Jq~flultXFD z@>=92Wo;~2>-t~D@E|lh&ZILwtwAZgaj^MD~!C z)Vua8xox~&Uq1(B-{TtO?3O(;tFPyyTuyB(560QN(cI5ipS#J8GJG)Qm}Z({Oqg3r zkXn*@QVt8eci;CnQi{vveC<3-f-9uh&;Zeg!`a*H9KN>Yxg}FDaIDD{_QblvTsEw4 zbn~bjYnr7=*ge}Mu!(aK4T)a4GcwP6M##hS#(jU4+OS|}!StM0_79GwsnvhRhMw3e znRbQt;1{u9k|tZmaG8-E<2ss87~|t#C!I9+SN|bz@j2QT`KWb9x{5xO%wNtW7yYDtIu-=y)ljB58? z_NI!L<%glsVvbf3V%N7!3$xAa=EC_~w%gIOBVLEKk1n6EEY29cB-WN#J+7m*tg^vg zCcl(76#7=MHoaYXR_>L8GdXhB>eQLvLs@;)KTU;bPpBeuFzS!!H+)Oh5WYNWTUe9G z*@->kzZ$nwWrRM$YCOW$6=j_~jd7P`jWA}=_M$&rxjw(l$)J3fj z86})H?l#siUp8GgoZ$cC-?FRFGj#}R$YfBZRG-*iL25_I7J4SN)n?=7bSYt~P>+k{ zGE7y%Bf@uuw~c<9&?>1&p@T{D3v1E+aAPO$dM|xfIyt@Bi}Lz8j^3ZcBZlAdHANV3o%8CjVk&u>@s)D(9*b$q(kdM8`VKdL$P0I zT=11k%UPFwzTlzzu6K;@YG99SAP{+5FQzp_1K2oWnsKn@y`_WcC11d^LXc`qbf?O2 zJ=hNr=NBeMqtfa}afuSr8xuDA4>pF=nAYrM;csJ-i7}tHWk#jPeN6b2STB){UdQwb zjr9GL8t8?BFI}UY=RKo5TkI9{ilm7E~a92+Rzr7ya(T^#^t( zGUPmR06k7njW10HOhqjp&5d}7Hn@EL3~ME#2$inSIMHb3l#JA3`XZty{f<_t0=lI!JGd6NoM39=bJu#?aq1-k2#I5>E2(x%upA zI+7kt*JZ}g^N9Lbz)uN*u20<~4iFpZ1zaAtfa}lSHEy>(j_e;(Ce{*Xh;u~Drtkaj zI!(bQTE$>#yPUJyG15QCchYe;y|MI(M%{$ZTd8Mc|RTi9PQ zdt=twS{uF^c956}=d#3x5C&@eBdDNcjUM{N0O|JkuEs9Rk zHoiN1mOM=^;>%giTUs0Ev3sc^B;1HTti=kmymbq5K|wv50cHx zY*lP`tgFNJMn=ahjztNzl49bAvtbTfS|I0|Z=!2%%Hgk;%sT$*!C-+WHSHUfR^Ho* zZfg>Ft05Rup%_zms*My18A=&VmYlE(QP-^z^dW78Qagn0doxR9e#oiqeC|Ido(lcp z4SIG4|0kW6{*quVz(rfvL>v!WXza)wrus2As3Z85)(9=8BFWKu6Xm1GgigbT*A01+ zlA|}FUvcf&l9YoAvD1Y|h9{VO;XT7{S*ux)HP^Nyf{03sl+D|uNk1>Y zPWqCcqvu@x68Txki&uxKTbzS)-{-gUrQycbsqxDaj>iuwGQ33XLjA2>8B9i^GkP7e z411V-7npAUmiHw0Xl9Yrw3J8LBYpR=&N!&a=mNS-4`-@VeW~xP*}ORHOL(@0W4miN z#Cf5JP;#JDut0nz7gOEJLitdrL|{&^nQ{+(rYrDfV_nl^V?VJb&` zhFr8zoopsP@s{ws@+NvNIBL6Mf&*{{xe$-n-{MtdRicfSjb~8x`LhPeSi^GIxIjn* zdcQN7bXD#pzmFd-Tro^Gd4-$Q4&onj9~0t^@`|}@_@bC6wo%giw6ULuWSq2L$gY{( zGh?qfkIK>S26}rO?)UDt!5(z3rE%1^=Tr-w24&xqh1qRm8ovTirrpxl}gwae#rHaqxBwbgWl0E21 z^;Q+fsH1fauSQMvQCdgMfhW=~zOCt;ai!sb;h^ChSCW21b!7AT#e&5+%{16F$uNhl zMz1E*i1Ork%EN5sOBt`5FWOFo-w8WQZFRKxK0SGyqqVDY&XAPN`IP!W?-@Gi9N;|Z z{^4Dy?&rtasOT3_lVZojhhp+0Ci6%1=W;i-G&P%%VB-em~+6az-Y9Xy3R z#^o8F8praF4C}2MY!j@4vA5vhhBFf6wA|!BWIQp7a1b-dAh`oQgsgrMBArl(H{@$3 z%+SGdE^Ktf)u>LH0F zP|z1`WlRQx#ZcTbG=dC2Vq(~C`d7*0JL~9?e=O@+Hj_WhHB#K9n6wHip}6pH>J__^ z{|GC%NtQWbQ^G=~e}v1-IC2RwAI*iV{=a%jltI)WT_na=wJ3csu1R^Qzi2Ce##qJn zGkkhPoyY}|QPH00Xk$mwl4s31?CId#k~tuyapoD{o4^{+Fn5yYMxaVyh&YyMV~vQq z8ZjWeLG0JW74hf8vX~^uvLT`hHB8?UO!2k%Mf>yIQ}czKQCan}4i)tD*TFj7fd0z$ zGj9nW9QMudgdb(NY@BAEYc6HkYb2@mN_pQk$Der#S%%C$S?>$Z2clrbugfLm=DGp$ z#4FH#vXYQ#PPLw~UN*Z830y9*2am<=&|9Jsd5*cvC2+IY8gvG+5|`Ki!tIDD)E2gx zFxix78Du_ha+r|iq^)|y?#LPjSLjoIC@;~s)_yak>DRp}^W0YhV*=y+@BME>O@qh8 zFGL+-x9Of?oVji6$08?^zDE*!$+GO>~3~6f5Ld(^ul1~Cz1`cp5jEm#q(!DkE}Bp@wr#ro z?SgTlUtdRGXVx%I`ZZfjsA3#wd2Zcp9wQ84kFg1?L>|UYb*I)2eIbrfMv5jxy-ZWK z8Z(^Ok6$AnF@btdbl3mY{2EWRqzchTSPn3=$#NvTL3F*C<1snbEWNh7PF{6S3D4W? zqN(rFkN6^$3Zmk3dL{=N`*(VaN@s}He5$a(crs#iqA@8sCf9O>ZBLCS3&?K7eXzj; z60as|!9cw8WPT{OY{8s@qrWyaL*G<}I%ODbePU`aG~rwG(;(&ig8PSEOE+XF#!RkK zR|nGFK0BK~DSx>Avg?uOfKQiRqAdCg>C>Z8X)29*!@Os5_%+5#=JjTWX^o*jH<@lh zn#i57dvF{5K=8}@Gf+MXiZNQ+FLWiH$@83I65(QV`P11N1#+e zC1+3n+k#%1q4W}t8xYG}@^Rk6zJtCG&f~rZ`Uy6{@Q%;7+>VPayeol-=wNKd8Q8IO zX(|e@gk1A@b&vK$D(-9K`kJ>iw`0zk+{vyi?=W$>j@T~d(UudY=7uXmG2t9{fxW_B z=7$@48`4;YJfuDdY;uJa{LEdGzr^l#_V6Z#I_P#{B{2ZM(F@Qh*h{cbcUi>{Ytbw- ztgkJFOtbjWbPVN#^^gKParKFE)N^_U9YuQaNZ3VQiB6+JWE*Cn(A?bKmKRYtdTvam zn4>W-BC60!0&VkOISP9Z=M~NPJ9S|}G-z?DZ?NZ(@08E%ZXOt;S7Kv@seGh$Tzq&D zXCf8Z+*p=NhbV!d&TGBo4$4<`yl#^J@_%u^wYMoq&9&H*yh{RqO7*oI#$v8#u^D3^ zn%K!LXKT^3sWMD;_AX1&_lVirve01fR7Wkx3&#v6c2DtjkRIuM$STxf)P(q#C{2zf z9mD|o8GB5)WSndoZAvhH=U*}7>G70K{*Nq8R;S+3XIO>p#@wZPQ+?<%^c7+~x<)jj zd$R<;!!W^e%eE^l6uvVm-dY$pa%*`*Jc&MtN~;-dIL?-iF7f7)4GV+zLCBm-c#Q8-kqL`!6ix; z;v|u!=iySsOtJtq#2?TEY8LaHT_hAX-8N4#*5TdUNNxfcqV6unGYmjj?=aPScm=YQg zNby$-z4dMP3<=Iq({UsGA92zYj2W1?EACqO8nf3}!dRdGNHmo*#J`oRdIhvzeJS4w zt%9uUMMr0MNB1E|n9~p_gC4PixlZ6c?d);32J?*EK@=hOqDe$EayoSaB`U*0%L97Q z5}fBtb#HO+3bt0Oz&_*?T!LD~9AVR$C-h{x68#6A&!qDM4IK?$E{k2w7vUSSCNiKm z$92##vOKk%Y!9<#f95YnruI5KZB7Du&?y&XBqy1*Q3u4ZJ|9oiqE ze}j!hR&Akq)$W={l7k)m?fjhsbv)trlLaB~XLT+)hb%*Wq?d3r`EBe~N+#k^ie41g zCT38>$>=Gy|3qhz{HOLt{sg6ox=e}G*O4*Y zNA4DrL|uj2gHEsuU6WXiP5NAQiCP)z8R68LscUVs@>(TN=bbpF`3*(WS|eI zDA9^MNcE<-(#@$Y)FmdHJ1y+x1#TEyiK`^s<-^!=)K_8}b&Z}#|D>8x=~OFvCQUP? zm~BiXTbloBtYR%|=!VFU=F1C}@Tc4BW%bHl;r<%fDlQVANVn8EvQJExD`*e26!{PN z6AJS4EbXluOqUHa3=@pc4R4rA`cUbLI9Q3)Rw?V0NadzHLaHTpk!@1YchPyt+1Woq z+9CfV^_6RBI{HG5pf^$$Vh2<+%z)a0ZTd6qkUCe5!6%7{)L%$e`Y6XV6*tf*si;_A zcB&_}O8Ox@7Ai=-;PJ#4>H%$|YLNS=W9)OGf&unV`Q^M|5DcyO-i(b}OLij@iDl>; zszW%?6qH4DqqCV4><~^8x|ny{TA3@+$K`VYNt_fa>RFX{Hs`)8DzG8g+yBltD`bJC zX_){eCMq#<6_Hod=y=m1%RSRbqhQ)>PB#xU6sB+JUiFh!6G!R&)o>+2&Jw4DE{0Nr z(Y`Fl0K4vb>+crK_FIF^Whc&|6#5E%0=#H3GKcUG)rjx7to~8!j}2rC>L=Qy_K{yH z<@MU?!BDTjo8W3#>pWLO+GVT}&8dy_1okr5iaW<-Fi*LBp}pZxp)}u&PZz2hRDLzH zoZLmsLbvc=I;jg#FO^Sll$RdEt>6hmlCim^sP!ZFP=6e35;93!eY5R%vlkQ`_1+55 z!Gk^__+O}Auv)NRh?Q5%You|?BVw_TWck4gFh2DsD5+<<|A8|@5)zZi!-m7|F~X49bqUx zk2}MS=fn6`Y%8i3(G(qk4A^7+0KSDRWI1X*9S5uL9)=c1lWCsivGEgKQ?&#ui&KO1 zJ)iTZ=8kav;lCK{8@S@{DIOR9g3B(=RO_kdl)<`6-4HsPvW;I24ntSd8&gF?Te=bc zq`p-fX>*ka@({U&tVzWrRxTw6L-qY7y#M*02b+t>L*>NAa$S7@`5!%oxklfkCXmyJ zhiDm&(OartmCf3Bw32E`xllK#6&Xjo!&}sRskbynxu;b^=ZX2`Q_4*@Wj!pzWiWTB zJaQVfl4;5n=gzU8*u8v3$cUfhnliDpnQBCKA#0MY$)n^gYCDs`g@n1rDyA&c4NJT= zm4AWDht>x+haUUBJ6`5A&0p&o3OS#remR&ajg=Zm2c;6~94$(nuP#GYPBe})V^ew4 zQ1dfODGP0|QE&B?nxHS%Mk~FPUCIaLFQtWYLXA|5i@NWQ_nki~v?*9PI4(pfBJM%; zg8A@Y<}>{lwU8W7v_uZ@Y@KwT*i9#~2k7EtL&#sgB;EM2GELek&r@G%0){#`as{=8 z_A+bO5?qjJONY_%%r@>i|Bb83S@_0=*Tyr37JOCKNw=XtfrZQ_RuLQ-K|Q2jvfcOz z!Z1S{V=r@p?U~ijhpK~o4E7Ez5$no(#Xe$rIZIuyZkE!d zYPboDj7yBi4P}i!b937uD>6nfub|@Q4b;fsU1{F?XBjk z)3thv6eA2X!~aVb6ZNp{H53_6?g8R@zj7329%YWH~=L&OIP+MDA>7 zyf?@9-W%?J7TP2R#bHt*#i}OCVJmCgda0pIUv?JTi^-?1kxPj) zP=ocH=uF+B9zf;Mc<+a>M6a1t;sdw9BfH$6ZeRf7=dX9KH?gAiu_1bV#c$Xtc`DFNHdkU*0G(ot~ZpT z+i4TzyV45(e&@#A+}!q#X71ZwldorBLnsh}jdO`pcFNDiucA}2qcRM|dxdagHS=m~ zm9X}<=f+oD31%CWOI*+iZL*rEo=|e+((2#Ze`-y+dZ>4BY_K4(GJpb^fkxt1Mb}+u zJb4}V`emjEvzLycJ>*z|N2^gM@)~I;N)f$DKed4RM!dyE@HIV2Kd*JsSpA588n+E7)}d=L0bN+rZNh68lN5lyL33a!jl* zE>Omxmh?ZYoqH+J=CZcruzt2`rUm=}#z6gn($o%0O?jnMT-qR_FiXKVhA;m?}<*46H>nA97e-|6!z|X`SlZmrGQ(JvsiRktw@TduwOm7T8)R?JTW25YTHr40 zyAo_AUY3d}A5~tPF4q%Qiql4BV!A@rxx7j;95DX5ImO@QcA@!EnE;LPCq-@YvgCBZM-{kHar(4g5 zZM8l(?&OECG&6*Fs3j?0aSzPm<>jtm+r5<8(ljYWikC)(s6c;TW#4W8qEKhKlgjBT zzCqRmPu7hM(MPDEWPqqcej$?4DyVqfPK3xZR5Gy*sz)2)5Bfqa1>{*%+6@(6qtz8! zZJmHx{xBj5R;V+`Mnnaud7ePbA@7nBiReF=g)C;xTogB$Ey2V?@9ES9N+W%wNNG$p z{xxKWcbO`gX7dtRPTQ{xkz)MA92ax0=Qb|**Rjoc-C5t;C$KiuU+N+6P-tbXG)&5q z4{0H6A}>;3n32MG^I6+#TT9CaV^KpIZ(-LHX<9>AKg^fHpmwp0GFTZS&y$+UD`lJ9 zM6CZSx(N&my%0_E2&Ilz2;`SVmtotoJUf+XNBgPO)EY7sjfa}n{}I#3H^daEOD%v( z{BL?w&8ys&Ps^Jj6She$rzPqe^-Fj#F_i2?y@RM@1ZkwI(aq>0bSoM$vFw9id%o-i zb|xFeK8LkpHO5Epr1R;6jKFQ?lZ7cT`zZV^`cEvYo0QwZ-R{VId+woplYO>hzN4r6 zrLR^nTg;UW+A;0CT1))_b{RulCZho{PVTyKwzXK;3hPp{WPD*b&(EcwqW9WXMUu9Q zA`Mh7XhAXvVXCI*hcJ4W;*kXY01WNGr5KQPhq4nU|h<_(^0sO zc1;-=Z0>HIe?N~bSYL41zSz;;^V;7zR9GIQ7K4iA?Ru&tsBKYsl~!5x zf66wcyLv*~f~yi6$T5%um__H&bC?}W6tkLMPDe5+%se)iZ46d^pX4Gs6VCVU2MJ&(pW{RCS&5AgH@9=V#_kg?gR)jxo-so_c{#p|A2F*aK^T z!tfTo7mg&_lTD}y@W*zpyD?;$V{2(WWZrA)Xlh}|VO&so`dW3#NJqn?q0X}bx0w5zGjrqE(;%BW%z5So^BlC_i)+T)g-`rY<{CPnu2D0jPJzO%FZquP zHrfX`i@FxOOT+r(w3MaP*ZaWf#+opr9>o&~2l*#WG7;=`euQbVb%phud5Y<%@rNPE z{iKed04oiOpeN`z%t@Lpeq1sg_Z0N)b{`shWINeg~dlf@;9WiQSZgcEFL) zHK6xQwgKCZ8A~lD-a%!%g!be4dIi0a{!Fi;U(%kd0p+{WSy`p5P&Ozf)S*xXT@~L% zlgMU}f4)z%>;b5!S;?(q=diW7uAGx?4bkstb|Yl!r?ZnlF`ttJXkG4K?@vX`faw&)(ZH+_}Sf$P*R#5!xy*S8L*yXaP=zGdG)27?}>4 zzfyEBcDwM*)EIUz7MmnvFXMm0bEYpj2uEv*yiSxu7ICOpTs#>nFWmz@^NLrx2-0v% z0Vz)!A=g!s)u7&nm`PcfT5MVF759+)gG*#5(^beGXgv<-6?GPHo~n-5erj8_Fs+Yz zTB)Yw%Ch`gekoT|=BQ)zIM_2}tlu)B-)nvU$u& zh;FJgyXg9~M&(l)wTa%rl;JAz)!0n(CDhMMfEaz0@0{al!DxG~y^?dLbC3IuZ+Y-O ztiU$v`>_k^qo?cNVMeu*bIB4^98;6uVccu(WG-v!Xgq8fER0}pkM`}Gc3I0&_bCRYrt$}5R@y0h6^nXYtqhgiABjp72NBF5W)=IGGw@N| zH)bxw0EWCwMMk0fG3%IxjGtahw}xt&v(#{^82te_pU&>%4scm?7*RuOqqdiQfwOK$ z!TEws_Ct==PR7N0ulnnT-bxwDH>h#DukF@G!VdC0qA&T5RH+8+AfcTp-W+YpHn@c~ z!X0iZosG6@ca^VF74capBUD`c7&;ru5vR%Jm2+}KX^JR|cccOGSh+u-{)O5Q=I|;M zL0_k*!Y;&TCYlMsn)Eq>6ASuMJre3oYJ#6Ut@hDAstwhdN)Kg=@|sjP`B#=QmK-ku!+$j=AA|A2l%d!Pz^7{s`{scqB*sQg$=S774U z6gHWQV=s{p^{uK$DKGx%3po4*l6}5ok@KW;q`QajabS(ON8YBUYRfgF_LufW-;Lf7 zd&x-<{p(!1;kjw3sg1EM)I*%%i?KH91&-HdDtlm-YaRENTc24KfXkr;TfGSBBVV1CSxPQTJF#8qOXLBHeH8Y1HzJCna z^yRP1j+wt`qK&zVm@A zqDKzXcIm_QXuY=H2Jc1(iTdPgvKgJq`h>EkmZpD?j5~z?KN41Cb))a_7^O?#_9Q}hl zPb`I~y`^phG}cx{h}?bZAXxcL1g=-WHAbDHu2%PJVGZ8V}KDXrekmd#^pyMrkk97ixhjgCtnZraeveY5~m7-Q{lbE4hVI zUSX6)$|?09eKPE+b)}lptC_FtWZ0>>%bsOg(QnB`#6@%-_tkp>UGdr~?YOo?OI9=0 z+S+Jswic}g)jV~ywoZSHV~N`2aOyan%+%$^@d5q}pU-XK4sxS7AB&kmOjl+mbDWva zbYuE6lbM0c6j;5@WfQqsTsM9MpTSHd2kB;QoAM}B+V_|9q@%Jk;Qa1l-6ro4|EXYm zsgtq@V!CR2b3LRlL6yk<6iZV~4!cWeZ#rilYTjkij8~1j4ORJyOd@$3-`7?s_2lJJ z8ObkB6q`s><%HZ>9tOOd6^Hy)uB?<){g5-PPVT4n&@UmIa*A8Ur9kZ6fexpR z5})CW?PcAg#R9M2G`D8g6xFL1(%Ne~wfb6Bu!1 zlusGQxwH=ayY~!I|1`0!QR_LHgbUSpK?rHtDi^zgWZjl z^i`1LA+8^{kiEt9rt`@Y#4R{4_~rNB${BUBwohB8-G)fsu5wx{EnmH_8np&mFQ_$q zjLML`sQvW+m`ALMKh2-uIsQ2ph(IZi8zC&TIH|3QsZP1u#I z0QvrpOn`f5h&7KkpESKU4lrIfjO6<>t)ZtaNTfPBR)aXG~BRbcn%ntWI3u6&fA z%0YR%a!NUgDjC=q`~?TIniGJ9Z{FoPWTN+`_gyTe>E2Il_#j1}U&Wh~9bb1L15@TnD*OL8dm()YzV$uZyA#nr(rxHo%h`{xEN z(tG)h+Dsp&kJBUY3RHreK$WN8&=uJsd{?6p?ESp)tYNpoE*$3O(c#2NJwmIlq)R60 zws=7-EUpsMq!?I#c;vFOUs^BMR{jP#^i?XV547v}GNF^OO$9sH8SGNq4f!npRg^3~|mfzP`GbW>Pk$D+{~+yiJ(N z7jXNy9^6e_YYptnJ5esq8%JGA^OkQ?^PY11H?i z?MbkX8SL`9V%!_NJp(60w0u@Mr_BRgZZ`G3+UagtaB;<}S!KoXQj%2ePyR=y=$q0)Gt9FmKuglRl2-a0{Xbljm;$^Xr|!|0 z*zUq|qtiIfc*^jXp|oKXf05Zn=Hs#YVs*DXQ92>65DSSpp*x}?#VJ4JO0p>ZEtiD) z?R0k6Scs6Q7v?1qEYv??tBd2h7ZYeh!t_s{zb}4h2-bjz5JJS*L z2dXoDnJxu3@RBJFn19a>;_9-0P_yyh>M^C5G&NA*KI`bS^|CG}eRJH0Hb0Vh`1kqf9HOfjhK+-zKLv>9^^Hw@*41CWU>Lv+BEv=}8rN|I_o zgmYC~E?ttZ!|LmkJWW0?|DiNcjzLyqhP+W>w7>MJkdJ&sw1w40HKrNNzMp6-9SOU) zb~xYb)Jy9B)6Q#yAhsw5F|}7)2CKTU`U`ES)=4|7wbwKCg^<}hPS7w+7zX=NVXQ%DWPE45ZyXArT&UAY zf<5N|>VR8n7nEA^W~s5XS$rdImUhX1!>YSC`1xD%KqX4)4(Puwmr+k^b%66_#B1{Z zN;(htn$A9spXZEwvy31qN{ygK2$9BUXzhxvP0`k@7^PIv($*+qwAC6lTQ4<&7cUxB zA=Zm1YLo=ES7THqk$J~?&ij4t`?;Tcb8_!F&-35^|1-|-R3b;+pr+r*%+ibIi$=c4 z(Ndkka)mv@zF^m(s(MPan$}lt8@xRb)PCb`xj<@OD`pRC4x;Bn4>#F|&pn0rC1TwYYmqgA`JC0}Wb?Q=&N^X*66tbW zAG>qhH{FF+w!V>!obHP+ZB}UH{q!v7*`sGCatm`m%P%TuSiH4#syEqx(eBC}z-h$Q zB<&@UD&C+9pKXN&oeJI(qK7sO-4Z%6)CoT9o@9MtY!+`g4ec?$Dy;bbir&|-_P@$E zk%&^?x6t>%*WR~;eFT%p^VR(4{Y&gkvZnS-Y+*)ug!KctxTkf|yvoXfjqV#Udou_e zfkJlRjF+i0ggpbDoXhf6H+w$B$9!9O1V79hQDwjgnC+@@UPq~jY*7dy= zN9M?2m7~yj-z+r;TKlb6T~}O7-A2$3_h~CrKg6o-On-u>bp#~5pq8yCp0K*2Q#(1TuaTb z^;oT@?Cvk?i)D^B$#d4T&Rd-fm+PJ8tLcB2HL0q;uHKP+x5C$&^|OceBBxNBrT<`5 zVl*2?-C5mgXO1$)=#$x96$Fw>DtH{0^IX0Mu7e-^k^anP&1VjCFxU!9xYz$TZ9co7 zJ~lq5!l}!QoYz&$t-Gf&ACOK?zh(9}ea2(Pxo?}ZnA!Wn>P(J{ao2Y{R;lrlI3s)6 zJH4-zd|42e-zV>JZdBf@c{TH&v5C1}iCub=Rorihb$z&3dITsULqIUd7vqg?);ZT& z?mB4^+%qI7G$mv~aBxsZ*BNfE85HbJNz~6C3dV_&3(cXwDVdDGf2<%eMY7cV}4+6W^7&o)Pe@q_m<{* zjamN?m&Oj)X4WxQqIMdc4M$(feX-NoL6B%{HwGF@^ic7G(}%g``rdIRV~Sc7-g^Fb zeq{c-`~d|qMSV(&O5MI;b{VHwi)YlH#NBNL`gr|M@h>q*bQ4v@b8V*>t9w}?hzFtJ zUwQ)V9dHt5tUb)%*SE+!*gN0b#uw^8=PzYub%FMysGyJ1H|iVocKSkQCBbx)AInSj zCi_EsCXxF`;+Ny3XmvrS@FC}u*!Fe#CeZ}HR;0hKnPHtwuPPo$y)Ee8F3gX8|@Iyx6-Inv6Y$$iz&+J6`k4yotWRtdA z9A;0!7tB9>W9_D1`I?ITZ!^Wz&E3XKW0cX)7-Q@*E-{CjY)&^vvfGb)t<1H|UqzcG z#&~0|o+v(WGHfsR1uZDOR~%QQ7Y-^&eg0~}OWfV~XK}mIGTuABmG&4XnLXDn^$+w5 z@ZCVq6mj5{6luAdgMO-s5>0a7*b#7H+Je`V0O~`u+-}F)w!e&BmOT|y?2GndIm&rQ zYb*}2J~~!U(--T5^lKtYyr(U5`mzoo<)1_ogEiO9Ab>=H9^nQ7Xu8wDspPbD5}kOb zj*|?sPJLEpE3yl`r(WJjF`lu*Z3in&IYh}tM6%Up8r9o;Gs$de);7nOZ=v;fIO=T1 zm+|%6`brTmGC`h+11<3xI5QJKm^i9gaN*VVgJ4ow+lHPpsDw5#=IU&}%dEo- z%)T`sHhrnJ5=%vvu=G4p!sktqEr7b?Nc! zn^`ZW;6FLsBlZ!97#GP6Wt{dhhJ6r~d@0@+y$3vY>48$0XOib7=97;5ExDh24p)F_ z6AecpTD)^lx*U%@EW0qK$&quxyKBLH3G<*R0UP*Gs&?7&%V4Pd%>S7$%lkg}|3`Quy*<4*yr+G?`YSOXog@!2ZtLZ| z4HnZHXCJ6Vhrl132F}t_P@Kkse3j%30MjbjiDK1gxojdEQT4W!!{uxu@e`0kCXtVI zaE0b+XV_`@7nnMh2FterhF8zVHWssEJ4v_L zi4`PVsw@t3A!HTl{D?J@FsqWw)2VZu__+Ryk?N3e>zg8h@s z{>+9{dF?eJAY*_!hUg>*AL=Hc74ZV z@e^6m?BiS|rv%CGne#gB&tP=Cggun0{!9K4yN!K}nX$L!26#9sr&Bfel`*mzzT2A> zqC_gYB)O4#@+or|rSci_D3%qRMj)T{=dPl$pjT}Mr$%9t9_DVG5yY)U;DA*FZ7CVd zyj|KcIIAG4;Ngu$7o-u%{_H$>T*og(+I2KMOB(|s)z8{<&DPElJ?DW=H5k0A&Y)Y> zg8OK2(Bg@M9cXW{HWMFqv;sz!0#u}~;%zYW^436(S~7aA$=c`T0Q%c%aMDJx68|Osa}WGF7gyKkyxmw; z7P7perE|yhU9Bv(6^?C%Gs8O%N!4a=^xt6koyA{gLERi^i*~Al>?5cQs}SuRc}t!{ z&nM(8`JJ4HUI)PQ61hk2lt-AIyd^Vb9+54FQA7}^RUP1O8Ijiq+E|fO8N9zZP#Rl+ zRa6;^tVMLH2N&(>Ss@nI7u%=>1r?lFlx~Rj>q6bvjKVv7$s0k84 zx_cSqz=z}q4GgmC^d62hnjrDVNMkoUs235Z+7q$*fiAcll&-OG){R~#fXJ83XsHVF z@k<}bM}y#a4cLzL;V%q2>w%+K9Zqae*~)SJ8hBhu*x^^iqdoZRLo{*-3r-} zg5P!oG|-DqZQA;RjIfLxq!23;!D{OS>e>L1quTRMq@M-a*SuDM{WlgAxIV;+N?3vy z1U$8ai~0!unY96-PN7C#XQf}E1PioNo;x8bfKGJ-ls-2)aq(72X`MlC>kp1(A{wu+ zU3Y4r-}YqZ55a0%k1o!^y95tT6DHnvmTxD3Q`Z~69R{6!z{q}N(uhtO#H0O0s0!jd_;qUoQvO0aNIuGDW+xLryh7x@ zKt=K!BcEwRoVHpo?D0LK_ITn|C6MW|z%0zbi}M2rr8kHm>xm&5=+z6_VMTD>8e?Y) zakD=0>IGz951QLx!_`5_s|ccKOZ1;aMr;GOs)w;ia~P4eD^hs_jKwW;=cM;kWvvG!j6?G-Ecz~pi|3*A6g_5wzIhaC za^O*+@ZRUXm|dW5e&LKH_6;P;B_sR2#1w@=o=Y^ZjK;%hCj^|+%E-C^d%Fzfjrdk! z?}jqBattp$!|N*FU#GV)EY`<=PJCK6vkFR8r0<#-nOs0z7Mp_m>UCm@LMBhf+GbF*Y-camF|Nr& z(|7oL3O`8WY=v2V5D(bSf0@jdDKz{i{C*qTna{Ovk`<>qW8nD!a#aZ8VfdziIs>X+ zz)Gs)nN`tI5XjD_sQvcRa~66?N47Wc2IpWu060SSCa_Gm|*iz6~M#mn0ARw&g)+#jMa%8QWd{XlfdiNgF>Sk7L!JdQS&Lf2L_m4UP$ z(nb+6*B}eI0&zy!U?f>;0CBSiIcEUXMgn~+MDB`McqHd3e=kNu68jJ5+7RM+2tMy2 zx+?tha{TuUZ63$IkJ6j+fmEcj7p>&NqnDUgm2)ECsuFT^^S%d-7tpFgYY#>9kNID{ zRr^KQ#%-injtCk_4<&r7<9Q8^X zo@|_r1t@g?|Dp?JiI0dE8tpa0lZQ|vj3tLHU?e`3&&g!2H>g}x-Iz?a89|RByOK);zG`=1V<*IUT2~Ex6xDLFwCC2vPt*V}A`0GNX=*PHl2=sJ; z&zIpyjYWDQi2=xY3^Yz>#I=iR^c$#E*u!~lI-6UI+|hcwfWtI-#g@&i}>?N;@f4a zjlX$6fb-|H|A_eV96o}HSyj1Kd1@IvFoG-V&_WC3Ru|u>4b|nK@Bhyh`GfeHO)k8I z6dw|`Y^t*0K%A_IT!M)xPw=in+Ao6E+Hlzbir(N;<%SZjxkt3V%^8yDQkK3;pin&x zLOsJnq3<6c9-at9;`_u!)tkcZS2+1siBsyy6W54Z_lPq`Xz4!jTK%G)XmXIgRhG?$ z<3Fe+=2EZrW1KREifSj7)CtZ$!?hlwlNxv4=DZWgCXL$bGOgOg!rMqnJ-y^Ex%L9> z?x!X71PJwnkMn$!$!jO|z#7{9gn5(Y@uH#3f1!tr)0 z_@CkI2wDFBPc%6Tuj+XZXVCEl`c^pbj3!voLW}Z zbX%zDmLQj-(0qYbACdFaGf;k`t+QNp4auLwGX9~}o5+4Ylw9MC+x)&1X>CQ)mpJn` z`uc_Mw<4q8(Tb`fv$*yFwbof|MsmFYihMT5*+?)Q**>PNOwLq(cM1Joqn-N!c`Hoy zhy4CKygsIX^-P~#NOlkQw2$+j()v?)S5FoyhGzAwA@vNF+qCo;+q%V37Svql+RaGg z-`LApUg`-a$7nMZ3hvPREjaz1b595AF7;F#^#q&UP@ String { + switch config.apiConfig.service { + case .vertexAI: + ModelNames.gemini2FlashLivePreview + case .googleAI: + ModelNames.gemini2_5_FlashLivePreview + } + } + + @Test(arguments: InstanceConfig.liveConfigs) + func sendTextRealtime_receiveText(_ config: InstanceConfig) async throws { + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: textConfig, + systemInstruction: systemInstructions.yesOrNo + ) + + let session = try await model.connect() + await session.sendTextRealtime("Does five plus five equal ten?") + + let text = try await session.collectNextTextResponse() + + await session.close() + let modelResponse = text + .trimmingCharacters(in: .whitespacesAndNewlines) + .trimmingCharacters(in: .punctuationCharacters) + .lowercased() + + #expect(modelResponse == "yes") + } + + @Test(arguments: InstanceConfig.liveConfigs) + func sendTextRealtime_receiveAudioOutputTranscripts(_ config: InstanceConfig) async throws { + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: audioConfig, + systemInstruction: systemInstructions.yesOrNo + ) + + let session = try await model.connect() + await session.sendTextRealtime("Does five plus five equal ten?") + + let text = try await session.collectNextAudioOutputTranscript() + + await session.close() + let modelResponse = text + .trimmingCharacters(in: .whitespacesAndNewlines) + .trimmingCharacters(in: .punctuationCharacters) + .lowercased() + + #expect(modelResponse == "yes") + } + + @Test(arguments: InstanceConfig.liveConfigs) + func sendAudioRealtime_receiveAudioOutputTranscripts(_ config: InstanceConfig) async throws { + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: audioConfig, + systemInstruction: systemInstructions.helloGoodbye + ) + + let session = try await model.connect() + + guard let audioFile = NSDataAsset(name: "hello") else { + Issue.record("Missing audio file 'hello.wav' in Assets") + return + } + await session.sendAudioRealtime(audioFile.data) + await session.sendAudioRealtime(Data(repeating: 0, count: audioFile.data.count)) + + let text = try await session.collectNextAudioOutputTranscript() + + await session.close() + let modelResponse = text + .trimmingCharacters(in: .whitespacesAndNewlines) + .trimmingCharacters(in: .punctuationCharacters) + .lowercased() + + #expect(modelResponse == "goodbye") + } + + @Test(arguments: InstanceConfig.liveConfigs) + func sendAudioRealtime_receiveText(_ config: InstanceConfig) async throws { + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: textConfig, + systemInstruction: systemInstructions.helloGoodbye + ) + + let session = try await model.connect() + + guard let audioFile = NSDataAsset(name: "hello") else { + Issue.record("Missing audio file 'hello.wav' in Assets") + return + } + await session.sendAudioRealtime(audioFile.data) + await session.sendAudioRealtime(Data(repeating: 0, count: audioFile.data.count)) + + let text = try await session.collectNextTextResponse() + + await session.close() + let modelResponse = text + .trimmingCharacters(in: .whitespacesAndNewlines) + .trimmingCharacters(in: .punctuationCharacters) + .lowercased() + + #expect(modelResponse == "goodbye") + } + + @Test(arguments: InstanceConfig.liveConfigs) + func realtime_functionCalling(_ config: InstanceConfig) async throws { + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: textConfig, + tools: tools, + systemInstruction: systemInstructions.lastNames + ) + + let session = try await model.connect() + await session.sendTextRealtime("Alex") + + guard let toolCall = try await session.collectNextToolCall() else { + return + } + + let functionCalls = try #require(toolCall.functionCalls) + + #expect(functionCalls.count == 1) + let functionCall = try #require(functionCalls.first) + + #expect(functionCall.name == "getLastName") + guard let response = getLastName(args: functionCall.args) else { + return + } + await session.sendFunctionResponses([ + FunctionResponsePart( + name: functionCall.name, + response: ["lastName": .string(response)], + functionId: functionCall.functionId + ), + ]) + + var text = try await session.collectNextTextResponse() + if text.isEmpty { + // The model sometimes sends an empty text response first + text = try await session.collectNextTextResponse() + } + + await session.close() + let modelResponse = text + .trimmingCharacters(in: .whitespacesAndNewlines) + .trimmingCharacters(in: .punctuationCharacters) + .lowercased() + + #expect(modelResponse == "smith") + } + + @Test(arguments: InstanceConfig.liveConfigs.filter { + // TODO: (b/XXXX) Remove when vertex adds support + switch $0.apiConfig.service { + case .googleAI: + true + case .vertexAI: + false + } + }) + func realtime_functionCalling_cancellation(_ config: InstanceConfig) async throws { + // TODO: (b/XXXX) Remove when vertex adds support + guard case .googleAI = config.apiConfig.service else { + Issue.record("Vertex does not currently support function ids or function cancellation.") + return + } + + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: textConfig, + tools: tools, + systemInstruction: systemInstructions.lastNames + ) + + let session = try await model.connect() + await session.sendTextRealtime("Alex") + + guard let toolCall = try await session.collectNextToolCall() else { + return + } + + let functionCalls = try #require(toolCall.functionCalls) + + #expect(functionCalls.count == 1) + let functionCall = try #require(functionCalls.first) + let id = try #require(functionCall.functionId) + + await session.sendTextRealtime("Actually, I don't care about the last name of Alex anymore.") + + for try await cancellation in session.responsesOf(LiveServerToolCallCancellation.self) { + #expect(cancellation.ids == [id]) + break + } + + await session.close() + } + + // Getting a limited use token adds too much of an overhead; we can't interrupt the model in time + @Test( + arguments: InstanceConfig.liveConfigs.filter { !$0.useLimitedUseAppCheckTokens } + ) + func realtime_interruption(_ config: InstanceConfig) async throws { + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: audioConfig + ) + + let session = try await model.connect() + + guard let audioFile = NSDataAsset(name: "hello") else { + Issue.record("Missing audio file 'hello.wav' in Assets") + return + } + await session.sendAudioRealtime(audioFile.data) + await session.sendAudioRealtime(Data(repeating: 0, count: audioFile.data.count)) + + // wait a second to allow the model to start generating (and cuase a proper interruption) + try await Task.sleep(nanoseconds: OneSecondInNanoseconds) + await session.sendAudioRealtime(audioFile.data) + await session.sendAudioRealtime(Data(repeating: 0, count: audioFile.data.count)) + + for try await content in session.responsesOf(LiveServerContent.self) { + if content.wasInterrupted { + break + } + + if content.isTurnComplete { + Issue.record("The model never sent an interrupted message.") + return + } + } + } + + @Test(arguments: InstanceConfig.liveConfigs) + func incremental_works(_ config: InstanceConfig) async throws { + let model = FirebaseAI.componentInstance(config).liveModel( + modelName: modelForBackend(config), + generationConfig: textConfig, + systemInstruction: systemInstructions.yesOrNo + ) + + let session = try await model.connect() + await session.sendContent("Does five plus") + await session.sendContent(" five equal ten?", turnComplete: true) + + let text = try await session.collectNextTextResponse() + + await session.close() + let modelResponse = text + .trimmingCharacters(in: .whitespacesAndNewlines) + .trimmingCharacters(in: .punctuationCharacters) + .lowercased() + + #expect(modelResponse == "yes") + } + + private func getLastName(args: JSONObject) -> String? { + guard case let .string(firstName) = args["firstName"] else { + Issue.record("Missing 'firstName' argument: \(String(describing: args))") + return nil + } + + switch firstName { + case "Alex": return "Smith" + case "Bob": return "Johnson" + default: + Issue.record("Unsupported 'firstName': \(firstName)") + return nil + } + } +} + +private extension LiveSession { + /// Collects the text that the model sends for the next turn. + /// + /// Will listen for `LiveServerContent` messages from the model, + /// incrementally keeping track of any `TextPart`s it sends. Once + /// the model signals that its turn is complete, the function will return + /// a string concatenated of all the `TextPart`s. + func collectNextTextResponse() async throws -> String { + var text = "" + + for try await content in responsesOf(LiveServerContent.self) { + text += content.modelTurn?.allText() ?? "" + + if content.isTurnComplete { + break + } + } + + return text + } + + /// Collects the audio output transcripts that the model sends for the next turn. + /// + /// Will listen for `LiveServerContent` messages from the model, + /// incrementally keeping track of any `LiveAudioTranscription`s it sends. + /// Once the model signals that its turn is complete, the function will return + /// a string concatenated of all the `LiveAudioTranscription`s. + func collectNextAudioOutputTranscript() async throws -> String { + var text = "" + + for try await content in responsesOf(LiveServerContent.self) { + text += content.outputAudioText() + + if content.isTurnComplete { + break + } + } + + return text + } + + /// Waits for the next `LiveServerToolCall` message from the model, and will return it. + /// + /// If the model instead sends `LiveServerContent`, the function will attempt to keep track of + /// any messages it sends (either via `LiveAudioTranscription` or `TextPart`), and will + /// record an issue describing the message. + /// + /// This is useful when testing function calling, as sometimes the model sends an error message, does + /// something unexpected, or will attempt to get clarification. Logging the message (instead of just timing out), + /// allows us to more easily debug such situations. + func collectNextToolCall() async throws -> LiveServerToolCall? { + var error = "" + for try await response in responses { + switch response.payload { + case let .toolCall(toolCall): + return toolCall + case let .content(content): + if let text = content.modelTurn?.allText() { + error += text + } else { + error += content.outputAudioText() + } + + if content.isTurnComplete { + Issue.record("The model didn't send a tool call. Text received: \(error)") + return nil + } + default: + continue + } + } + Issue.record("Failed to receive any responses") + return nil + } + + /// Filters responses from the model to a certain type. + /// + /// Useful when you only expect (or care about) certain types. + /// + /// ```swift + /// for try await content in session.responsesOf(LiveServerContent.self) { + /// // ... + /// } + /// ``` + /// + /// Is the equivelent to manually doing: + /// ```swift + /// for try await response in session.responses { + /// if case let .content(content) = response.payload { + /// // ... + /// } + /// } + /// ``` + func responsesOf(_: T.Type) -> AsyncCompactMapSequence, T> { + responses.compactMap { response in + switch response.payload { + case let .content(content): + if let casted = content as? T { + return casted + } + case let .toolCall(toolCall): + if let casted = toolCall as? T { + return casted + } + case let .toolCallCancellation(cancellation): + if let casted = cancellation as? T { + return casted + } + case let .goingAwayNotice(goingAway): + if let casted = goingAway as? T { + return casted + } + } + return nil + } + } +} + +private extension ModelContent { + /// A collection of text from all parts. + /// + /// If this doesn't contain any `TextPart`, then an empty + /// string will be returned instead. + func allText() -> String { + parts.compactMap { ($0 as? TextPart)?.text }.joined() + } +} + +extension LiveServerContent { + /// Text of the output `LiveAudioTranscript`, or an empty string if it's missing. + func outputAudioText() -> String { + outputAudioTranscription?.text ?? "" + } +} diff --git a/FirebaseAI/Tests/TestApp/Tests/Utilities/InstanceConfig.swift b/FirebaseAI/Tests/TestApp/Tests/Utilities/InstanceConfig.swift index bf9d32c6e0d..4a91b00456d 100644 --- a/FirebaseAI/Tests/TestApp/Tests/Utilities/InstanceConfig.swift +++ b/FirebaseAI/Tests/TestApp/Tests/Utilities/InstanceConfig.swift @@ -26,6 +26,13 @@ struct InstanceConfig: Equatable, Encodable { version: .v1beta ) ) + static let vertexAI_v1beta_appCheckLimitedUse = InstanceConfig( + useLimitedUseAppCheckTokens: true, + apiConfig: APIConfig( + service: .vertexAI(endpoint: .firebaseProxyProd, location: "us-central1"), + version: .v1beta + ) + ) static let vertexAI_v1beta_global = InstanceConfig( apiConfig: APIConfig( service: .vertexAI(endpoint: .firebaseProxyProd, location: "global"), @@ -76,6 +83,14 @@ struct InstanceConfig: Equatable, Encodable { // googleAI_v1beta_freeTier_bypassProxy, ] + static let liveConfigs = [ + vertexAI_v1beta, + vertexAI_v1beta_appCheckLimitedUse, + googleAI_v1beta, + googleAI_v1beta_appCheckLimitedUse, + googleAI_v1beta_freeTier, + ] + static let vertexAI_v1beta_appCheckNotConfigured = InstanceConfig( appName: FirebaseAppNames.appCheckNotConfigured, apiConfig: APIConfig( From b73d18843274afd148afb5179a0e459c13a4b630 Mon Sep 17 00:00:00 2001 From: Daymon Date: Fri, 10 Oct 2025 17:37:32 -0500 Subject: [PATCH 12/15] Remove unit tests --- .../Unit/TestUtilities/XCTExtensions.swift | 30 --- ...idiGenerateContentServerMessageTests.swift | 191 ------------------ .../Unit/Types/Live/VoiceConfigTests.swift | 62 ------ .../Tests/Unit/Types/ProtoDurationTests.swift | 99 --------- 4 files changed, 382 deletions(-) delete mode 100644 FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift delete mode 100644 FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift delete mode 100644 FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift delete mode 100644 FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift diff --git a/FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift b/FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift deleted file mode 100644 index 33ef11de6a7..00000000000 --- a/FirebaseAI/Tests/Unit/TestUtilities/XCTExtensions.swift +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2025 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import XCTest - -/// Asserts that a string contains another string. -/// -/// ```swift -/// XCTAssertContains("my name is", "name") -/// ``` -/// -/// - Parameters: -/// - string: The source string that should contain the other. -/// - contains: The string that should be contained in the source string. -func XCTAssertContains(_ string: String, _ contains: String) { - if !string.contains(contains) { - XCTFail("(\"\(string)\") does not contain (\"\(contains)\")") - } -} diff --git a/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift b/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift deleted file mode 100644 index 710f7510ff0..00000000000 --- a/FirebaseAI/Tests/Unit/Types/Live/BidiGenerateContentServerMessageTests.swift +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright 2025 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import XCTest - -@testable import FirebaseAI - -@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, *) -@available(watchOS, unavailable) -final class BidiGenerateContentServerMessageTests: XCTestCase { - let decoder = JSONDecoder() - - func testDecodeBidiGenerateContentServerMessage_setupComplete() throws { - let json = """ - { - "setupComplete" : {} - } - """ - let jsonData = try XCTUnwrap(json.data(using: .utf8)) - - let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) - guard case .setupComplete = serverMessage.messageType else { - XCTFail("Decoded message is not a setupComplete message.") - return - } - } - - func testDecodeBidiGenerateContentServerMessage_serverContent() throws { - let json = """ - { - "serverContent" : { - "modelTurn" : { - "parts" : [ - { - "inlineData" : { - "data" : "BQUFBQU=", - "mimeType" : "audio/pcm" - } - } - ], - "role" : "model" - }, - "turnComplete": true, - "groundingMetadata": { - "webSearchQueries": ["query1", "query2"], - "groundingChunks": [ - { "web": { "uri": "uri1", "title": "title1" } } - ], - "groundingSupports": [ - { "segment": { "endIndex": 10, "text": "text" }, "groundingChunkIndices": [0] } - ], - "searchEntryPoint": { "renderedContent": "html" } - }, - "inputTranscription": { - "text": "What day of the week is it?" - }, - "outputTranscription": { - "text": "Today is friday" - } - } - } - """ - let jsonData = try XCTUnwrap(json.data(using: .utf8)) - - let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) - guard case let .serverContent(serverContent) = serverMessage.messageType else { - XCTFail("Decoded message is not a serverContent message.") - return - } - - XCTAssertEqual(serverContent.turnComplete, true) - XCTAssertNil(serverContent.interrupted) - XCTAssertNil(serverContent.generationComplete) - - let modelTurn = try XCTUnwrap(serverContent.modelTurn) - XCTAssertEqual(modelTurn.role, "model") - XCTAssertEqual(modelTurn.parts.count, 1) - let part = try XCTUnwrap(modelTurn.parts.first as? InlineDataPart) - XCTAssertEqual(part.data, Data(repeating: 5, count: 5)) - XCTAssertEqual(part.mimeType, "audio/pcm") - - let metadata = try XCTUnwrap(serverContent.groundingMetadata) - XCTAssertEqual(metadata.webSearchQueries, ["query1", "query2"]) - XCTAssertEqual(metadata.groundingChunks.count, 1) - let groundingChunk = try XCTUnwrap(metadata.groundingChunks.first) - let webChunk = try XCTUnwrap(groundingChunk.web) - XCTAssertEqual(webChunk.uri, "uri1") - XCTAssertEqual(metadata.groundingSupports.count, 1) - let groundingSupport = try XCTUnwrap(metadata.groundingSupports.first) - XCTAssertEqual(groundingSupport.segment.startIndex, 0) - XCTAssertEqual(groundingSupport.segment.partIndex, 0) - XCTAssertEqual(groundingSupport.segment.endIndex, 10) - XCTAssertEqual(groundingSupport.segment.text, "text") - let searchEntryPoint = try XCTUnwrap(metadata.searchEntryPoint) - XCTAssertEqual(searchEntryPoint.renderedContent, "html") - - let inputTranscription = try XCTUnwrap(serverContent.inputTranscription) - XCTAssertEqual(inputTranscription.text, "What day of the week is it?") - - let outputTranscription = try XCTUnwrap(serverContent.outputTranscription) - XCTAssertEqual(outputTranscription.text, "Today is friday") - } - - func testDecodeBidiGenerateContentServerMessage_toolCall() throws { - let json = """ - { - "toolCall" : { - "functionCalls" : [ - { - "name": "changeBackgroundColor", - "id": "functionCall-12345-67890", - "args" : { - "color": "#F54927" - } - } - ] - } - } - """ - let jsonData = try XCTUnwrap(json.data(using: .utf8)) - - let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) - guard case let .toolCall(toolCall) = serverMessage.messageType else { - XCTFail("Decoded message is not a toolCall message.") - return - } - - let functionCalls = try XCTUnwrap(toolCall.functionCalls) - XCTAssertEqual(functionCalls.count, 1) - let functionCall = try XCTUnwrap(functionCalls.first) - XCTAssertEqual(functionCall.name, "changeBackgroundColor") - XCTAssertEqual(functionCall.id, "functionCall-12345-67890") - let args = try XCTUnwrap(functionCall.args) - guard case let .string(color) = args["color"] else { - XCTFail("Missing color argument") - return - } - XCTAssertEqual(color, "#F54927") - } - - func testDecodeBidiGenerateContentServerMessage_toolCallCancellation() throws { - let json = """ - { - "toolCallCancellation" : { - "ids" : ["functionCall-12345-67890"] - } - } - """ - let jsonData = try XCTUnwrap(json.data(using: .utf8)) - - let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) - guard case let .toolCallCancellation(toolCallCancellation) = serverMessage.messageType else { - XCTFail("Decoded message is not a toolCallCancellation message.") - return - } - - let ids = try XCTUnwrap(toolCallCancellation.ids) - XCTAssertEqual(ids, ["functionCall-12345-67890"]) - } - - func testDecodeBidiGenerateContentServerMessage_goAway() throws { - let json = """ - { - "goAway" : { - "timeLeft": "1.23456789s" - } - } - """ - let jsonData = try XCTUnwrap(json.data(using: .utf8)) - - let serverMessage = try decoder.decode(BidiGenerateContentServerMessage.self, from: jsonData) - guard case let .goAway(goAway) = serverMessage.messageType else { - XCTFail("Decoded message is not a goAway message.") - return - } - - XCTAssertEqual(goAway.timeLeft?.seconds, 1) - XCTAssertEqual(goAway.timeLeft?.nanos, 234_567_890) - } -} diff --git a/FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift b/FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift deleted file mode 100644 index 707c0088bdc..00000000000 --- a/FirebaseAI/Tests/Unit/Types/Live/VoiceConfigTests.swift +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2025 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import XCTest - -@testable import FirebaseAI - -@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, *) -@available(watchOS, unavailable) -final class VoiceConfigTests: XCTestCase { - let encoder = JSONEncoder() - - override func setUp() { - super.setUp() - encoder.outputFormatting = [.prettyPrinted, .sortedKeys, .withoutEscapingSlashes] - } - - func testEncodeVoiceConfig_prebuiltVoice() throws { - let voice = VoiceConfig.prebuiltVoiceConfig( - PrebuiltVoiceConfig(voiceName: "Zephyr") - ) - - let jsonData = try encoder.encode(voice) - - let json = try XCTUnwrap(String(data: jsonData, encoding: .utf8)) - XCTAssertEqual(json, """ - { - "prebuiltVoiceConfig" : { - "voiceName" : "Zephyr" - } - } - """) - } - - func testEncodeVoiceConfig_customVoice() throws { - let voice = VoiceConfig.customVoiceConfig( - CustomVoiceConfig(customVoiceSample: Data(repeating: 5, count: 5)) - ) - - let jsonData = try encoder.encode(voice) - - let json = try XCTUnwrap(String(data: jsonData, encoding: .utf8)) - XCTAssertEqual(json, """ - { - "customVoiceConfig" : { - "customVoiceSample" : "BQUFBQU=" - } - } - """) - } -} diff --git a/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift b/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift deleted file mode 100644 index bddb50eef1d..00000000000 --- a/FirebaseAI/Tests/Unit/Types/ProtoDurationTests.swift +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2025 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import XCTest - -@testable import FirebaseAI - -final class ProtoDurationTests: XCTestCase { - let decoder = JSONDecoder() - - private func decodeProtoDuration(_ jsonString: String) throws -> ProtoDuration { - let escapedString = "\"\(jsonString)\"" - let jsonData = try XCTUnwrap(escapedString.data(using: .utf8)) - - return try decoder.decode(ProtoDuration.self, from: jsonData) - } - - private func expectDecodeFailure(_ jsonString: String) throws -> DecodingError.Context? { - do { - let _ = try decodeProtoDuration(jsonString) - XCTFail("Expected decoding to fail") - return nil - } catch { - let decodingError = try XCTUnwrap(error as? DecodingError) - guard case let .dataCorrupted(dataCorrupted) = decodingError else { - XCTFail("Error was not a data corrupted error") - return nil - } - - return dataCorrupted - } - } - - func testDecodeProtoDuration_standardDuration() throws { - let duration = try decodeProtoDuration("120.000000123s") - XCTAssertEqual(duration.seconds, 120) - XCTAssertEqual(duration.nanos, 123) - - XCTAssertEqual(duration.timeInterval, 120.000000123) - } - - func testDecodeProtoDuration_withoutNanoseconds() throws { - let duration = try decodeProtoDuration("120s") - XCTAssertEqual(duration.seconds, 120) - XCTAssertEqual(duration.nanos, 0) - - XCTAssertEqual(duration.timeInterval, 120) - } - - func testDecodeProtoDuration_maxNanosecondDigits() throws { - let duration = try decodeProtoDuration("15.123456789s") - XCTAssertEqual(duration.seconds, 15) - XCTAssertEqual(duration.nanos, 123_456_789) - - XCTAssertEqual(duration.timeInterval, 15.123456789) - } - - func testDecodeProtoDuration_withMilliseconds() throws { - let duration = try decodeProtoDuration("15.123s") - XCTAssertEqual(duration.seconds, 15) - XCTAssertEqual(duration.nanos, 123_000_000) - - XCTAssertEqual(duration.timeInterval, 15.123000000) - } - - func testDecodeProtoDuration_invalidSeconds() throws { - guard let error = try expectDecodeFailure("invalid.123s") else { return } - XCTAssertContains(error.debugDescription, "Invalid proto duration seconds") - } - - func testDecodeProtoDuration_invalidNanoseconds() throws { - guard let error = try expectDecodeFailure("123.invalid") else { return } - XCTAssertContains(error.debugDescription, "Invalid proto duration nanoseconds") - } - - func testDecodeProtoDuration_tooManyDecimals() throws { - guard let error = try expectDecodeFailure("123.45.67") else { return } - XCTAssertContains(error.debugDescription, "Invalid proto duration string") - } - - func testDecodeProtoDuration_withoutSuffix() throws { - let duration = try decodeProtoDuration("123.456") - XCTAssertEqual(duration.seconds, 123) - XCTAssertEqual(duration.nanos, 456_000_000) - - XCTAssertEqual(duration.timeInterval, 123.456) - } -} From 9d2dfa11cf5bac7ea91e93048db491cf8df574cf Mon Sep 17 00:00:00 2001 From: Daymon Date: Fri, 10 Oct 2025 17:38:06 -0500 Subject: [PATCH 13/15] Revert ProtoDuration change --- .../Types/Internal/ProtoDuration.swift | 36 +------------------ 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift b/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift index d4af71b0346..1dac21d6429 100644 --- a/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift +++ b/FirebaseAI/Sources/Types/Internal/ProtoDuration.swift @@ -107,40 +107,6 @@ extension ProtoDuration: Decodable { } self.seconds = secs - self.nanos = fractionalSecondsToNanoseconds(nanos, digits: nanoseconds.count) + self.nanos = nanos } } - -/// Cached powers of 10 for quickly mapping fractional seconds. -private let pow10: [Int32] = [ - 1, 10, 100, 1000, 10000, 100_000, - 1_000_000, 10_000_000, 100_000_000, 1_000_000_000, -] - -/// Converts a fractional second representing a nanosecond to a valid nanosecond value. -/// -/// ```swift -/// // 0.123456 -/// XCTAssertEqual( -/// fractionalSecondsToNanoseconds(123456, 6), -/// 123456000 -/// ) -/// -/// // 0.000123456 -/// XCTAssertEqual( -/// fractionalSecondsToNanoseconds(123456, 9), -/// 123456 -/// ) -/// -/// // 0.123456789 -/// XCTAssertEqual( -/// fractionalSecondsToNanoseconds(123456789, 9), -/// 123456789 -/// ) -/// ``` -private func fractionalSecondsToNanoseconds(_ value: Int32, digits: Int) -> Int32 { - precondition(digits >= 0 && digits <= 9, "A nanosecond value must fit within 0..9 digits") - precondition(value >= 0, "A nanosecond value must be positive") - - return Int32(truncatingIfNeeded: value) &* pow10[9 - digits] -} From 1e3c02e7675f034dd0eec254deaccac017f741f7 Mon Sep 17 00:00:00 2001 From: Daymon Date: Fri, 10 Oct 2025 17:38:26 -0500 Subject: [PATCH 14/15] Remove changelog entry --- FirebaseAI/CHANGELOG.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/FirebaseAI/CHANGELOG.md b/FirebaseAI/CHANGELOG.md index b7999b9fa4a..06f5f30908d 100644 --- a/FirebaseAI/CHANGELOG.md +++ b/FirebaseAI/CHANGELOG.md @@ -1,7 +1,3 @@ -# Unreleased -- [fixed] Fixed minor translation issue for nanosecond conversion when receiving - `LiveServerGoingAwayNotice`. (#15396) - # 12.4.0 - [feature] Added support for the URL context tool, which allows the model to access content from provided public web URLs to inform and enhance its responses. (#15221) From 96730a75218b4ddcf824b5a5bc23ecf5df93cad7 Mon Sep 17 00:00:00 2001 From: Daymon Date: Fri, 10 Oct 2025 17:45:54 -0500 Subject: [PATCH 15/15] Add bug link --- .../Tests/TestApp/Tests/Integration/LiveSessionTests.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/FirebaseAI/Tests/TestApp/Tests/Integration/LiveSessionTests.swift b/FirebaseAI/Tests/TestApp/Tests/Integration/LiveSessionTests.swift index 119ee638dc6..599d98c0d06 100644 --- a/FirebaseAI/Tests/TestApp/Tests/Integration/LiveSessionTests.swift +++ b/FirebaseAI/Tests/TestApp/Tests/Integration/LiveSessionTests.swift @@ -222,7 +222,7 @@ struct LiveSessionTests { } @Test(arguments: InstanceConfig.liveConfigs.filter { - // TODO: (b/XXXX) Remove when vertex adds support + // TODO: (b/450982184) Remove when vertex adds support switch $0.apiConfig.service { case .googleAI: true @@ -231,7 +231,7 @@ struct LiveSessionTests { } }) func realtime_functionCalling_cancellation(_ config: InstanceConfig) async throws { - // TODO: (b/XXXX) Remove when vertex adds support + // TODO: (b/450982184) Remove when vertex adds support guard case .googleAI = config.apiConfig.service else { Issue.record("Vertex does not currently support function ids or function cancellation.") return