4 class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {
6 let videoOutput = AVCaptureVideoDataOutput()
7 let movieFileOutput = AVCaptureMovieFileOutput()
8 var receivedFrames = false
10 init(_ screen: NSScreen, box: NSRect) {
13 let displayId = screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID
14 let screenInput = AVCaptureScreenInput(displayID: displayId)
15 screenInput?.cropRect = box.insetBy(dx: 1, dy: 1)
17 if self.canAddInput(screenInput!) {
18 self.addInput(screenInput!)
21 videoOutput.setSampleBufferDelegate(self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: []))
23 if self.canAddOutput(videoOutput) {
24 self.addOutput(videoOutput)
27 if self.canAddOutput(movieFileOutput) {
28 self.addOutput(movieFileOutput)
32 func startRecording() {
33 receivedFrames = false
36 DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
37 if !self.receivedFrames {
38 NotificationCenter.default.post(name: .failedToStart, object: nil, userInfo: nil)
43 func startRecording(to url: URL) {
45 movieFileOutput.startRecording(to: url, recordingDelegate: self)
48 // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate Implementation
50 func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
53 guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
54 NotificationCenter.default.post(name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer])
57 // MARK: - AVCaptureFileOutputRecordingDelegate Implementation
59 func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {}