4 class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {
6 let videoOutput = AVCaptureVideoDataOutput()
7 let movieFileOutput = AVCaptureMovieFileOutput()
8 var receivedFrames = false
10 init(_ screen: NSScreen, box: NSRect) {
13 let displayId = screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID
14 let screenInput = AVCaptureScreenInput(displayID: displayId)
15 var croppingBox = NSOffsetRect(box, -screen.frame.origin.x, -screen.frame.origin.y)
16 if croppingBox.width.truncatingRemainder(dividingBy: 2) != 0 {
17 croppingBox.size.width -= 1
19 screenInput?.cropRect = croppingBox.insetBy(dx: 1, dy: 1)
21 if self.canAddInput(screenInput!) {
22 self.addInput(screenInput!)
25 videoOutput.setSampleBufferDelegate(self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: []))
27 if self.canAddOutput(videoOutput) {
28 self.addOutput(videoOutput)
31 if self.canAddOutput(movieFileOutput) {
32 self.addOutput(movieFileOutput)
36 func startRecording() {
37 receivedFrames = false
40 DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
41 if !self.receivedFrames {
42 NotificationCenter.default.post(name: .failedToStart, object: nil, userInfo: nil)
47 func startRecording(to url: URL) {
49 movieFileOutput.startRecording(to: url, recordingDelegate: self)
52 // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate Implementation
54 func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
57 guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
58 NotificationCenter.default.post(name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer])
61 // MARK: - AVCaptureFileOutputRecordingDelegate Implementation
63 func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {}