import AppKit import AVFoundation class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate { let videoOutput = AVCaptureVideoDataOutput() let movieFileOutput = AVCaptureMovieFileOutput() var receivedFrames = false init(_ screen: NSScreen, box: NSRect) { super.init() let displayId = screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID let screenInput = AVCaptureScreenInput(displayID: displayId) screenInput?.cropRect = box.insetBy(dx: 1, dy: 1) if self.canAddInput(screenInput!) { self.addInput(screenInput!) } videoOutput.setSampleBufferDelegate(self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: [])) if self.canAddOutput(videoOutput) { self.addOutput(videoOutput) } if self.canAddOutput(movieFileOutput) { self.addOutput(movieFileOutput) } } func startRecording() { receivedFrames = false self.startRunning() DispatchQueue.main.asyncAfter(deadline: .now() + 1) { if !self.receivedFrames { NotificationCenter.default.post(name: .failedToStart, object: nil, userInfo: nil) } } } func startRecording(to url: URL) { self.startRecording() movieFileOutput.startRecording(to: url, recordingDelegate: self) } // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate Implementation func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { receivedFrames = true guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } NotificationCenter.default.post(name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer]) } // MARK: - AVCaptureFileOutputRecordingDelegate Implementation func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {} }