import SwiftUI import SwiftData import Cocoa import Combine import ReplayKit @main struct CapturaApp: App { @NSApplicationDelegateAdaptor(CapturaAppDelegate.self) var appDelegate var body: some Scene { WindowGroup { PreferencesScreen() .handlesExternalEvents(preferring: Set(arrayLiteral: "PreferencesScreen"), allowing: Set(arrayLiteral: "*")) .frame(width: 650, height: 450) } .handlesExternalEvents(matching: Set(arrayLiteral: "PreferencesScreen")) .modelContainer(for: Item.self) } } class CapturaAppDelegate: NSObject, NSApplicationDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate, NSMenuDelegate { @Environment(\.openURL) var openURL var statusItem: NSStatusItem! var captureState: CaptureState = .idle var recordingWindow: RecordingWindow? = nil var preferencesWindow: PreferencesWindow? = nil var boxListener: AnyCancellable? = nil var popover: NSPopover? = nil var helpShown = false var receivedFrames = false var captureSession: AVCaptureSession? = nil var images: [CGImage] = [] var outputURL: URL? = nil var gifCallbackTimer = ContinuousClock.now var fps = UserDefaults.standard.integer(forKey: "frameRate") var pixelDensity: CGFloat = 1.0 var stopTimer: DispatchWorkItem? func applicationDidFinishLaunching(_ notification: Notification) { setupMenu() NotificationCenter.default.addObserver( self, selector: #selector(self.didReceiveNotification(_:)), name: nil, object: nil) closeWindow() } // MARK: - Setup Functions private func setupMenu() { statusItem = NSStatusBar.system.statusItem(withLength: NSStatusItem.variableLength) if let button = statusItem.button { button.image = NSImage(systemSymbolName: "rectangle.dashed.badge.record", accessibilityDescription: "Captura") } statusItem.isVisible = true statusItem.menu = NSMenu() statusItem.menu?.delegate = self // Create the Popover popover = NSPopover() popover?.contentViewController = HelpPopoverViewController() popover?.behavior = .transient let recordItem = NSMenuItem(title: "Record", action: #selector(CapturaAppDelegate.onClickStartRecording), keyEquivalent: "6") recordItem.keyEquivalentModifierMask = [.command, .shift] statusItem.menu?.addItem(recordItem) statusItem.menu?.addItem(NSMenuItem.separator()) let preferencesItem = NSMenuItem(title: "Preferences", action: #selector(CapturaAppDelegate.onOpenPreferences), keyEquivalent: "") statusItem.menu?.addItem(preferencesItem) let quitItem = NSMenuItem(title: "Quit", action: #selector(CapturaAppDelegate.onQuit), keyEquivalent: "") statusItem.menu?.addItem(quitItem) } private func closeWindow() { if let window = NSApplication.shared.windows.first { window.close() } } // MARK: - UI Event Handlers func menuWillOpen(_ menu: NSMenu) { if captureState != .idle { menu.cancelTracking() if captureState == .recording { stopRecording() } } } @objc private func onClickStartRecording() { NotificationCenter.default.post(name: .startAreaSelection, object: nil, userInfo: nil) } @objc private func onOpenPreferences() { NSApp.activate(ignoringOtherApps: true) if preferencesWindow == nil { preferencesWindow = PreferencesWindow() } else { preferencesWindow?.makeKeyAndOrderFront(nil) } } @objc private func onQuit() { NSApplication.shared.terminate(self) } @objc private func onClickStatusBar(_ sender: NSStatusBarButton) { print("CLICK") if captureState == .recording { stopRecording() } } // MARK: - App State Event Listeners @objc func didReceiveNotification(_ notification: Notification) { switch(notification.name) { case .startAreaSelection: startAreaSelection() case .startRecording: startRecording() case .stopRecording: stopRecording() case .finalizeRecording: finalizeRecording() case .reset: reset() default: return } /* if let data = notification.userInfo?["data"] as? String { print("Data received: \(data)") } */ } @objc func startAreaSelection() { helpShown = false NSApp.activate(ignoringOtherApps: true) if captureState != .selectingArea { captureState = .selectingArea if let button = statusItem.button { let rectInWindow = button.convert(button.bounds, to: nil) let rectInScreen = button.window?.convertToScreen(rectInWindow) recordingWindow = RecordingWindow(rectInScreen) if let view = recordingWindow?.contentView as? RecordingContentView { boxListener = view.$box .debounce(for: .seconds(0.3), scheduler: RunLoop.main) .sink { newValue in if newValue != nil { button.image = NSImage(systemSymbolName: "circle.rectangle.dashed", accessibilityDescription: "Captura") if !self.helpShown { self.helpShown = true self.showPopoverWithMessage("Click here when you're ready to record.") } } } } } } } func startRecording() { captureState = .recording fps = UserDefaults.standard.integer(forKey: "frameRate") outputURL = nil images = []; pixelDensity = recordingWindow?.pixelDensity ?? 1.0 if let view = recordingWindow?.contentView as? RecordingContentView { view.startRecording() if let box = view.box { if let screen = NSScreen.main { let displayId = screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID let screenInput = AVCaptureScreenInput(displayID: displayId) screenInput?.cropRect = box.insetBy(dx: 1, dy: 1) captureSession = AVCaptureSession() if let captureSession { if captureSession.canAddInput(screenInput!) { captureSession.addInput(screenInput!) } let videoOutput = AVCaptureVideoDataOutput() videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer delegate", attributes: [])) if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } let movieFileOutput = AVCaptureMovieFileOutput() if captureSession.canAddOutput(movieFileOutput) { captureSession.addOutput(movieFileOutput) } stopTimer = DispatchWorkItem { self.stopRecording() } DispatchQueue.main.asyncAfter(deadline: .now() + 300, execute: stopTimer!) if let button = statusItem.button { button.image = NSImage(systemSymbolName: "stop.circle", accessibilityDescription: "Captura") } receivedFrames = false captureSession.startRunning() guard let picturesDirectoryURL = FileManager.default.urls(for: .picturesDirectory, in: .userDomainMask).first else { fatalError("Unable to access user's Pictures directory") } outputURL = picturesDirectoryURL.appendingPathComponent("captura/\(filename())").appendingPathExtension("mp4") let outputFormatsSetting = OutputFormatSetting(rawValue: UserDefaults.standard.integer(forKey: "outputFormats")) ?? .all if outputFormatsSetting.shouldSaveMp4() { movieFileOutput.startRecording(to: outputURL!, recordingDelegate: self) } DispatchQueue.main.asyncAfter(deadline: .now() + 1) { if !self.receivedFrames { self.requestPermission() } } } } else { print("Should error") } } } } func stopRecording() { stopTimer?.cancel() captureState = .uploading captureSession?.stopRunning() captureSession = nil Task.detached { if let outputURL = self.outputURL { await self.createGif(url: outputURL.deletingPathExtension().appendingPathExtension("gif")) } } reset() } func finalizeRecording() { captureState = .uploaded // Stopping the recording } func reset() { if let button = statusItem.button { button.image = NSImage(systemSymbolName: "rectangle.dashed.badge.record", accessibilityDescription: "Captura") } captureState = .idle boxListener?.cancel() recordingWindow?.close() self.recordingWindow = nil } private func requestPermission() { reset() showPopoverWithMessage("Please grant Captura permission to record") if let url = URL(string: "x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenRecording") { NSWorkspace.shared.open(url) } } func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { receivedFrames = true let now = ContinuousClock.now if now - gifCallbackTimer > .nanoseconds(1_000_000_000 / UInt64(fps)) { gifCallbackTimer = now DispatchQueue.main.async { // Get the CVImageBuffer from the sample buffer guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } let ciImage = CIImage(cvImageBuffer: imageBuffer) let context = CIContext() if let cgImage = context.createCGImage(ciImage, from: CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(imageBuffer), height: CVPixelBufferGetHeight(imageBuffer))) { if let cgImage = self.resize(image: cgImage, by: self.pixelDensity) { self.images.append(cgImage) } } } } } func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { if let error = error as? NSError { if error.domain == AVFoundationErrorDomain && error.code == -11806 { Task.detached { await self.createGif(url: outputFileURL.deletingPathExtension().appendingPathExtension("gif")) } } } } private func showPopoverWithMessage(_ message: String) { if let button = statusItem.button { (self.popover?.contentViewController as? HelpPopoverViewController)?.updateLabel(message) self.popover?.show(relativeTo: button.bounds, of: button, preferredEdge: NSRectEdge.minY) DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { self.popover?.performClose(nil) } } } func filename() -> String { let dateFormatter = DateFormatter() dateFormatter.dateStyle = .medium dateFormatter.timeStyle = .medium dateFormatter.locale = Locale.current let dateString = dateFormatter.string(from: Date()).replacingOccurrences(of: ":", with: ".") return "Captura \(dateString)" } func createGif(url: URL) async { let outputFormatsSetting = OutputFormatSetting(rawValue: UserDefaults.standard.integer(forKey: "outputFormats")) ?? .all if !outputFormatsSetting.shouldSaveGif() { return } let framedelay = String(format: "%.3f", 1.0 / Double(fps)) let fileProperties = [kCGImagePropertyGIFDictionary as String: [kCGImagePropertyGIFLoopCount as String: 0]] let gifProperties = [kCGImagePropertyGIFDictionary as String: [kCGImagePropertyGIFUnclampedDelayTime as String: framedelay]] let cfURL = url as CFURL if let destination = CGImageDestinationCreateWithURL(cfURL, UTType.gif.identifier as CFString, images.count, nil) { CGImageDestinationSetProperties(destination, fileProperties as CFDictionary?) for image in images { CGImageDestinationAddImage(destination, image, gifProperties as CFDictionary?) } CGImageDestinationFinalize(destination) } } private func resize(image: CGImage, by scale: CGFloat) -> CGImage? { let width = Int(CGFloat(image.width) / scale) let height = Int(CGFloat(image.height) / scale) let bitsPerComponent = image.bitsPerComponent let colorSpace = image.colorSpace ?? CGColorSpace(name: CGColorSpace.sRGB)! let bitmapInfo = image.bitmapInfo.rawValue guard let context = CGContext(data: nil, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: 0, space: colorSpace, bitmapInfo: bitmapInfo) else { return nil } context.interpolationQuality = .high context.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height)) return context.makeImage() } }