Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions docs/content/docs/video-output.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,34 @@ const videoOutput = VisionCamera.createVideoOutput({
> [!WARNING]
> Enabling Audio requires microphone permission.

#### Custom iOS Audio Session Control

On iOS, `AVCaptureSession` normally configures the app's `AVAudioSession` automatically when audio recording is active.

If you only want to keep already-playing audio alive while recording, enable [`allowBackgroundAudioPlayback`](/api/react-native-vision-camera/interfaces/CameraProps#allowbackgroundaudioplayback):

```tsx
const camera = useCamera({
isActive: true,
device: device,
outputs: [videoOutput],
allowBackgroundAudioPlayback: true,
})
```

If you need full control over the app's `AVAudioSession` category or mode, disable [`automaticallyConfiguresApplicationAudioSession`](/api/react-native-vision-camera/interfaces/CameraProps#automaticallyconfiguresapplicationaudiosession) and configure `AVAudioSession` yourself using your preferred native integration:

```tsx
const camera = useCamera({
isActive: true,
device: device,
outputs: [videoOutput],
automaticallyConfiguresApplicationAudioSession: false,
})
```

This is especially useful for FaceTime-style experiences where you want to use a custom `AVAudioSession` mode such as `videoChat`, `voiceChat`, or `measurement` through a library like `react-native-volume-manager`.

#### Persistent Video Outputs

By default, an active recording will be automatically stopped when the input [`CameraDevice`](/api/react-native-vision-camera/hybrid-objects/CameraDevice) changes (e.g. when the user flips the Camera from front to back).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@ class AudioSession {
private let queue: DispatchQueue
private let delegate: AudioFrameDelegate

init() throws {
init(
automaticallyConfiguresApplicationAudioSession: Bool,
allowBackgroundAudioPlayback: Bool?
) throws {
logger.log("Initializing AudioSession...")
// 1. Create AVCaptureSession
self.audioSession = AVCaptureSession()
Expand All @@ -25,6 +28,12 @@ class AudioSession {
let audioSession = self.audioSession
defer { audioSession.commitConfiguration() }

updateConfiguration(
automaticallyConfiguresApplicationAudioSession:
automaticallyConfiguresApplicationAudioSession,
allowBackgroundAudioPlayback: allowBackgroundAudioPlayback
)

// 3. Create audio input
guard let microphone = AVCaptureDevice.default(for: .audio) else {
throw RuntimeError.error(withMessage: "No microphone available!")
Expand Down Expand Up @@ -58,6 +67,18 @@ class AudioSession {
output.setSampleBufferDelegate(delegate, queue: queue)
}

func updateConfiguration(
automaticallyConfiguresApplicationAudioSession: Bool,
allowBackgroundAudioPlayback: Bool?
) {
audioSession.automaticallyConfiguresApplicationAudioSession =
automaticallyConfiguresApplicationAudioSession
if #available(iOS 18.0, *) {
audioSession.configuresApplicationAudioSessionToMixWithOthers =
automaticallyConfiguresApplicationAudioSession && (allowBackgroundAudioPlayback ?? false)
}
}

func setOnFrameListener(onFrame: @escaping (CMSampleBuffer, CMTime) -> Void) {
delegate.onFrame = onFrame
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ class HybridCameraSession: HybridCameraSessionSpec {
)
}

let automaticallyConfiguresApplicationAudioSession =
config?.automaticallyConfiguresApplicationAudioSession ?? true
self.session.automaticallyConfiguresApplicationAudioSession =
automaticallyConfiguresApplicationAudioSession

// Remove all unwanted inputs and add all new inputs
try self.updateInputs(connections)
// Remove all unwanted outputs and add all new outputs
Expand All @@ -79,7 +84,7 @@ class HybridCameraSession: HybridCameraSessionSpec {
switch outputConfiguration.output {
case let output as any NativeCameraOutput:
// Configure AVCaptureOutput
output.configure(config: outputConfiguration)
output.configure(config: outputConfiguration, sessionConfig: config)
case let previewOutput as any NativePreviewViewOutput:
// Configure AVCaptureVideoPreviewLayer
previewOutput.configure(config: outputConfiguration)
Expand All @@ -100,10 +105,10 @@ class HybridCameraSession: HybridCameraSessionSpec {
self.session.automaticallyConfiguresCaptureDeviceForWideColor = !hasCustomDynamicRangeConstraint

// Background Audio Playback
if let allowBackgroundAudioPlayback = config?.allowBackgroundAudioPlayback {
if #available(iOS 18.0, *) {
self.session.configuresApplicationAudioSessionToMixWithOthers = allowBackgroundAudioPlayback
}
if #available(iOS 18.0, *) {
self.session.configuresApplicationAudioSessionToMixWithOthers =
automaticallyConfiguresApplicationAudioSession
&& (config?.allowBackgroundAudioPlayback ?? false)
}

// Return CameraControllers per connection to adjust camera settings (focus, etc)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ class HybridCameraVideoFrameOutput: HybridCameraVideoOutputSpec, NativeCameraOut
private let queue: DispatchQueue
private let videoQueue: DispatchQueue
private var audioSession: AudioSession? = nil
private var sessionConfiguration: CameraSessionConfiguration? = nil
let mediaType: MediaType = .video
let requiresAudioInput: Bool = false
let requiresDepthFormat: Bool = false
Expand Down Expand Up @@ -85,6 +86,22 @@ class HybridCameraVideoFrameOutput: HybridCameraVideoOutputSpec, NativeCameraOut
try? connection.setOrientation(outputOrientation)
}

func configure(
config: CameraOutputConfiguration,
sessionConfig: CameraSessionConfiguration?
) {
self.sessionConfiguration = sessionConfig
self.configure(config: config)

if let audioSession {
audioSession.updateConfiguration(
automaticallyConfiguresApplicationAudioSession:
sessionConfig?.automaticallyConfiguresApplicationAudioSession ?? true,
allowBackgroundAudioPlayback: sessionConfig?.allowBackgroundAudioPlayback
)
}
}

func getSupportedVideoCodecs() throws -> [VideoCodec] {
guard output.connection(with: .video) != nil else {
throw RuntimeError.error(
Expand Down Expand Up @@ -212,7 +229,11 @@ class HybridCameraVideoFrameOutput: HybridCameraVideoOutputSpec, NativeCameraOut
return audioSession
}
// 1. Create session
let audioSession = try AudioSession()
let audioSession = try AudioSession(
automaticallyConfiguresApplicationAudioSession:
sessionConfiguration?.automaticallyConfiguresApplicationAudioSession ?? true,
allowBackgroundAudioPlayback: sessionConfiguration?.allowBackgroundAudioPlayback
)
// 2. Add on frame listener
audioSession.setOnFrameListener { [weak self] buffer, timestamp in
guard let self else { return }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,25 @@ public protocol NativeCameraOutput: AnyObject, ResolutionNegotionParticipant {
* such as orientation or mirroring settings in here.
*/
func configure(config: CameraOutputConfiguration)
/**
* Called whenever the `CameraOutputConfiguration` or
* session-wide configuration might change.
*
* Outputs that need session-wide settings can override
* this method. The default implementation delegates to
* `configure(config:)`.
*/
func configure(
config: CameraOutputConfiguration,
sessionConfig: CameraSessionConfiguration?
)
}

public extension NativeCameraOutput {
func configure(
config: CameraOutputConfiguration,
sessionConfig: CameraSessionConfiguration?
) {
self.configure(config: config)
}
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { useMemoizedArray } from './useMemoizedArray'
import { useStableCallback } from './useStableCallback'

interface Config {
automaticallyConfiguresApplicationAudioSession?: boolean
mirrorMode?: MirrorMode
allowBackgroundAudioPlayback?: boolean
constraints?: Constraint[]
Expand All @@ -32,6 +33,7 @@ export function useCameraController(
device: CameraDevice | undefined,
outputs: CameraOutput[],
{
automaticallyConfiguresApplicationAudioSession,
mirrorMode = 'auto',
constraints = [],
onSessionConfigSelected,
Expand Down Expand Up @@ -98,7 +100,11 @@ export function useCameraController(
onSessionConfigSelected: stableOnSessionConfigSelected,
},
],
{ allowBackgroundAudioPlayback: allowBackgroundAudioPlayback },
{
automaticallyConfiguresApplicationAudioSession:
automaticallyConfiguresApplicationAudioSession,
allowBackgroundAudioPlayback: allowBackgroundAudioPlayback,
},
)
if (isCanceled) {
controllers.forEach((c) => {
Expand All @@ -116,6 +122,7 @@ export function useCameraController(
}
}, [
device,
automaticallyConfiguresApplicationAudioSession,
mirrorMode,
session,
allowBackgroundAudioPlayback,
Expand Down
29 changes: 29 additions & 0 deletions packages/react-native-vision-camera/src/hooks/useCamera.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,30 @@ export interface CameraProps {
// Session Configuration
isActive: boolean
enableMultiCamSupport?: boolean
/**
* Whether the underlying iOS `AVCaptureSession`s should automatically
* configure the application's `AVAudioSession` when audio recording is enabled.
*
* Disable this if you want to manage `AVAudioSession` yourself,
* for example to set a custom `videoChat` or `measurement` mode via
* another library.
*
* @default true
* @platform iOS
*/
automaticallyConfiguresApplicationAudioSession?: boolean
/**
* If enabled, audio that is already playing can continue while a recording
* is in progress instead of being interrupted by the Camera.
*
* This only applies when
* {@linkcode CameraProps.automaticallyConfiguresApplicationAudioSession}
* is enabled.
*
* @default false
* @platform iOS
*/
allowBackgroundAudioPlayback?: boolean

// Connection Configuration
device: CameraDevice | CameraPosition
Expand Down Expand Up @@ -100,6 +124,8 @@ function defaultOnErrorHandler(error: Error) {
export function useCamera({
isActive,
enableMultiCamSupport = false,
automaticallyConfiguresApplicationAudioSession,
allowBackgroundAudioPlayback,
device,
outputs = [],
constraints,
Expand Down Expand Up @@ -153,6 +179,9 @@ export function useCamera({

// 4. Configure the session with the input + outputs to create a `CameraController`
const controller = useCameraController(session, input, outputs, {
automaticallyConfiguresApplicationAudioSession:
automaticallyConfiguresApplicationAudioSession,
allowBackgroundAudioPlayback: allowBackgroundAudioPlayback,
mirrorMode: mirrorMode,
onConfigured: onConfigured,
getInitialExposureBias: getInitialExposureBias,
Expand Down
Loading