import SwiftUI import SwiftData import Cocoa import Combine import ReplayKit @main struct CapturaApp: App { @NSApplicationDelegateAdaptor(CapturaAppDelegate.self) var appDelegate var body: some Scene { WindowGroup { PreferencesScreen() .handlesExternalEvents(preferring: Set(arrayLiteral: "PreferencesScreen"), allowing: Set(arrayLiteral: "*")) .frame(width: 650, height: 450) } .handlesExternalEvents(matching: Set(arrayLiteral: "PreferencesScreen")) .modelContainer(for: Item.self) } } class CapturaAppDelegate: NSObject, NSApplicationDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate, NSMenuDelegate { @Environment(\.openURL) var openURL var statusItem: NSStatusItem! var captureState: CaptureState = .idle var recordingWindow: RecordingWindow? = nil var preferencesWindow: PreferencesWindow? = nil var boxListener: AnyCancellable? = nil var popover: NSPopover? = nil var helpShown = false var receivedFrames = false var captureSession: AVCaptureSession? = nil var images: [CGImage] = [] var outputFile: CapturaFile? = nil var gifCallbackTimer = ContinuousClock.now var fps = UserDefaults.standard.integer(forKey: "frameRate") var pixelDensity: CGFloat = 1.0 var stopTimer: DispatchWorkItem? func applicationDidFinishLaunching(_ notification: Notification) { setupMenu() NotificationCenter.default.addObserver( self, selector: #selector(self.didReceiveNotification(_:)), name: nil, object: nil) closeWindow() } // MARK: - Setup Functions private func setupMenu() { statusItem = NSStatusBar.system.statusItem(withLength: NSStatusItem.variableLength) if let button = statusItem.button { button.image = NSImage(systemSymbolName: "rectangle.dashed.badge.record", accessibilityDescription: "Captura") } statusItem.isVisible = true statusItem.menu = NSMenu() statusItem.menu?.delegate = self // Create the Popover popover = NSPopover() popover?.contentViewController = HelpPopoverViewController() popover?.behavior = .transient let recordItem = NSMenuItem(title: "Record", action: #selector(CapturaAppDelegate.onClickStartRecording), keyEquivalent: "6") recordItem.keyEquivalentModifierMask = [.command, .shift] statusItem.menu?.addItem(recordItem) statusItem.menu?.addItem(NSMenuItem.separator()) let preferencesItem = NSMenuItem(title: "Preferences", action: #selector(CapturaAppDelegate.onOpenPreferences), keyEquivalent: "") statusItem.menu?.addItem(preferencesItem) let quitItem = NSMenuItem(title: "Quit", action: #selector(CapturaAppDelegate.onQuit), keyEquivalent: "") statusItem.menu?.addItem(quitItem) } private func closeWindow() { if let window = NSApplication.shared.windows.first { window.close() } } // MARK: - UI Event Handlers func menuWillOpen(_ menu: NSMenu) { if captureState != .idle { menu.cancelTracking() if captureState == .recording { NotificationCenter.default.post(name: .stopRecording, object: nil, userInfo: nil) } } } @objc private func onClickStartRecording() { NotificationCenter.default.post(name: .startAreaSelection, object: nil, userInfo: nil) } @objc private func onOpenPreferences() { NSApp.activate(ignoringOtherApps: true) if preferencesWindow == nil { preferencesWindow = PreferencesWindow() } else { preferencesWindow?.makeKeyAndOrderFront(nil) } } @objc private func onQuit() { NSApplication.shared.terminate(self) } // MARK: - App State Event Listeners @objc func didReceiveNotification(_ notification: Notification) { switch(notification.name) { case .startAreaSelection: startAreaSelection() case .startRecording: startRecording() case .stopRecording: stopRecording() case .finalizeRecording: DispatchQueue.main.async { self.finalizeRecording() } case .reset: reset() default: return } /* if let data = notification.userInfo?["data"] as? String { print("Data received: \(data)") } */ } @objc func startAreaSelection() { helpShown = false NSApp.activate(ignoringOtherApps: true) if captureState != .selectingArea { captureState = .selectingArea if let button = statusItem.button { let rectInWindow = button.convert(button.bounds, to: nil) let rectInScreen = button.window?.convertToScreen(rectInWindow) recordingWindow = RecordingWindow(rectInScreen) if let view = recordingWindow?.contentView as? RecordingContentView { boxListener = view.$box .debounce(for: .seconds(0.3), scheduler: RunLoop.main) .sink { newValue in if newValue != nil { button.image = NSImage(systemSymbolName: "circle.rectangle.dashed", accessibilityDescription: "Captura") if !self.helpShown { self.helpShown = true self.showPopoverWithMessage("Click here when you're ready to record.") } } } } } } } func startRecording() { captureState = .recording fps = UserDefaults.standard.integer(forKey: "frameRate") outputFile = nil images = []; pixelDensity = recordingWindow?.pixelDensity ?? 1.0 if let view = recordingWindow?.contentView as? RecordingContentView { view.startRecording() if let box = view.box { if let screen = NSScreen.main { let displayId = screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID let screenInput = AVCaptureScreenInput(displayID: displayId) screenInput?.cropRect = box.insetBy(dx: 1, dy: 1) captureSession = AVCaptureSession() if let captureSession { if captureSession.canAddInput(screenInput!) { captureSession.addInput(screenInput!) } let videoOutput = AVCaptureVideoDataOutput() videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer delegate", attributes: [])) if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } let movieFileOutput = AVCaptureMovieFileOutput() if captureSession.canAddOutput(movieFileOutput) { captureSession.addOutput(movieFileOutput) } stopTimer = DispatchWorkItem { self.stopRecording() } DispatchQueue.main.asyncAfter(deadline: .now() + 300, execute: stopTimer!) if let button = statusItem.button { button.image = NSImage(systemSymbolName: "checkmark.rectangle", accessibilityDescription: "Captura") } receivedFrames = false captureSession.startRunning() outputFile = CapturaFile() let outputFormatsSetting = OutputFormatSetting(rawValue: UserDefaults.standard.integer(forKey: "outputFormats")) ?? .all if outputFormatsSetting.shouldSaveMp4() { movieFileOutput.startRecording(to: outputFile!.mp4URL, recordingDelegate: self) } DispatchQueue.main.asyncAfter(deadline: .now() + 1) { if !self.receivedFrames { self.requestPermission() } } } } else { print("Should error") } } } } func stopRecording() { if let button = statusItem.button { button.image = NSImage(systemSymbolName: "dock.arrow.up.rectangle", accessibilityDescription: "Captura") } stopTimer?.cancel() captureState = .uploading captureSession?.stopRunning() captureSession = nil boxListener?.cancel() recordingWindow?.close() self.recordingWindow = nil let outputFormatsSetting = OutputFormatSetting(rawValue: UserDefaults.standard.integer(forKey: "outputFormats")) ?? .all if !outputFormatsSetting.shouldSaveGif() { NotificationCenter.default.post(name: .finalizeRecording, object: nil, userInfo: nil) return } Task.detached { if let outputFile = self.outputFile { await GifRenderer.render(self.images, at: self.fps, to: outputFile.gifURL) NotificationCenter.default.post(name: .finalizeRecording, object: nil, userInfo: nil) } } } func finalizeRecording() { if let button = statusItem.button { button.image = NSImage(systemSymbolName: "checkmark.rectangle.fill", accessibilityDescription: "Captura") } captureState = .uploaded DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { self.reset() } } func reset() { if let button = statusItem.button { button.image = NSImage(systemSymbolName: "rectangle.dashed.badge.record", accessibilityDescription: "Captura") } captureState = .idle stopTimer?.cancel() captureSession?.stopRunning() boxListener?.cancel() recordingWindow?.close() self.recordingWindow = nil } private func requestPermission() { reset() showPopoverWithMessage("Please grant Captura permission to record") if let url = URL(string: "x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenRecording") { NSWorkspace.shared.open(url) } } func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { receivedFrames = true let now = ContinuousClock.now if now - gifCallbackTimer > .nanoseconds(1_000_000_000 / UInt64(fps)) { gifCallbackTimer = now DispatchQueue.main.async { // Get the CVImageBuffer from the sample buffer guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } let ciImage = CIImage(cvImageBuffer: imageBuffer) let context = CIContext() if let cgImage = context.createCGImage(ciImage, from: CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(imageBuffer), height: CVPixelBufferGetHeight(imageBuffer))) { if let cgImage = cgImage.resize(by: self.pixelDensity) { self.images.append(cgImage) } } } } } private func showPopoverWithMessage(_ message: String) { if let button = statusItem.button { (self.popover?.contentViewController as? HelpPopoverViewController)?.updateLabel(message) self.popover?.show(relativeTo: button.bounds, of: button, preferredEdge: NSRectEdge.minY) DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { self.popover?.performClose(nil) } } } // MARK: - AVCaptureFileOutputRecordingDelegate Implementation func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {} }