import Foundation
import ScreenCaptureKit
import AVFoundation
import UniformTypeIdentifiers
import OSLog
@MainActor
class ScreenRecorder: NSObject, ObservableObject, SCContentSharingPickerObserver, SCRecordingOutputDelegate {
private let logger = Logger()
// MARK: - State Properties
@Published var isRunning = false
@Published var isRecordingStream = false
@Published var isAudioCaptureEnabled = true
@Published var isMicCaptureEnabled = false
// The object that manages the SCStream.
private let captureEngine = CaptureEngine()
private var recordingOutput: SCRecordingOutput?
// The user's content selection from the picker.
private var contentFilter: SCContentFilter?
// MARK: - Capture Control
/// Starts the screen capture session.
func start() async {
guard !isRunning else { return }
// Ensure we have a content filter from the picker before starting.
guard let filter = self.contentFilter else {
logger.error("No content selected. Present the picker to the user first.")
return
}
do {
// Initialize the recording output to save the stream to a file.
try initRecordingOutput()
guard let recordingOutput = self.recordingOutput else {
logger.error("Failed to start: SCRecordingOutput is not initialized.")
return
}
let config = streamConfiguration
// Update the running state.
isRunning = true
// Start the stream and await new frames.
for try await _ in captureEngine.startCapture(configuration: config, filter: filter, recordingOutput: recordingOutput) {
// This loop runs for each new frame. You can process frames here if needed.
}
} catch {
logger.error("Failed to start capture: \(error.localizedDescription)")
isRunning = false
}
}
/// Stops the screen capture session.
func stop() async {
guard isRunning else { return }
await captureEngine.stopCapture()
try? stopRecordingOutput()
isRunning = false
}
// MARK: - Content Sharing Picker
/// Presents the system's content sharing picker to the user.
func presentPicker() {
SCContentSharingPicker.shared.add(self)
if let stream = captureEngine.stream {
SCContentSharingPicker.shared.present(for: stream)
} else {
SCContentSharingPicker.shared.present()
}
}
/// A system callback for when the user selects content in the picker.
nonisolated func contentSharingPicker(_ picker: SCContentSharingPicker, didUpdateWith filter: SCContentFilter, for stream: SCStream?) {
Task { @MainActor in
// Save the selected filter. The `start()` method will use this.
self.contentFilter = filter
// If the stream is already running, update it with the new selection.
if self.isRunning {
await self.captureEngine.update(configuration: self.streamConfiguration, filter: filter)
}
}
logger.info("Picker updated with filter: \(filter)")
}
nonisolated func contentSharingPicker(_ picker: SCContentSharingPicker, didCancelFor stream: SCStream?) {
logger.info("Picker canceled.")
}
// MARK: - Recording Output Setup
/// Initializes the `SCRecordingOutput` to configure where the video is saved.
private func initRecordingOutput() throws {
// Save someonewhere
// Configure the recording output.
let recordingConfig = SCRecordingOutputConfiguration()
recordingConfig.outputURL = outputURL
recordingConfig.outputFileType = .mp4
recordingConfig.videoCodecType = .hevc // H.265 for better compression.
let output = SCRecordingOutput(configuration: recordingConfig, delegate: self)
self.recordingOutput = output
logger.log("Recording output initialized at: \(outputURL.path)")
}
/// Starts the recording output process.
private func startRecordingOutput() async throws {
guard let recordingOutput = self.recordingOutput else { throw SCScreenRecordingError.setupFailed }
// The recording output is added to the stream when `startCapture` is called.
// You can manually call the delegate method for logging or state updates.
recordingOutputDidStartRecording(recordingOutput)
}
/// Stops the recording output and finalizes the video file.
private func stopRecordingOutput() throws {
if let recordingOutput = self.recordingOutput {
logger.log("Stopping recording output.")
try? captureEngine.stopRecordingOutputForStream(recordingOutput)
recordingOutputDidFinishRecording(recordingOutput)
}
self.recordingOutput = nil
}
// MARK: - Stream Configuration
/// Creates the stream configuration based on the current state.
private var streamConfiguration: SCStreamConfiguration {
let streamConfig = SCStreamConfiguration()
// Configure audio capture.
streamConfig.capturesAudio = isAudioCaptureEnabled
streamConfig.captureMicrophone = isMicCaptureEnabled
// Set a standard frame rate (e.g., 60 fps).
streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 60)
// Use a pixel format and color space compatible with most displays.
streamConfig.pixelFormat = kCVPixelFormatType_32BGRA
streamConfig.colorSpaceName = CGColorSpace.sRGB
// Set queue depth for smoother capture at the cost of memory.
streamConfig.queueDepth = 8
return streamConfig
}
// MARK: - SCRecordingOutputDelegate
nonisolated func recordingOutputDidStartRecording(_ recordingOutput: SCRecordingOutput) {
logger.log("Recording started.")
}
nonisolated func recordingOutputDidFinishRecording(_ recordingOutput: SCRecordingOutput) {
logger.log("Recording finished.")
}
}
class CaptureEngine {
var stream: SCStream?
func startCapture(configuration: SCStreamConfiguration, filter: SCContentFilter, recordingOutput: SCRecordingOutput) -> AsyncThrowingStream<Void, Error> {
// 1. Create SCStream with the configuration and filter.
// 2. Add the recordingOutput to the stream.
// 3. Start the stream.
}
func stopCapture() async {
}
func stopRecordingOutputForStream(_ output: SCRecordingOutput) throws {
}
}