]> git.r.bdr.sh - rbdr/captura/blobdiff - Captura/Domain/CapturaCaptureSession.swift
Format the code
[rbdr/captura] / Captura / Domain / CapturaCaptureSession.swift
index 89432822ce340547bad0c7774a00f5107502a426..dbc72b9bccef479d46e69dfa2d22218893408ee4 100644 (file)
@@ -1,64 +1,75 @@
-import AppKit
 import AVFoundation
+import AppKit
 
-class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {
+class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate,
+  AVCaptureVideoDataOutputSampleBufferDelegate
+{
 
   let videoOutput = AVCaptureVideoDataOutput()
   let movieFileOutput = AVCaptureMovieFileOutput()
   var receivedFrames = false
-  
+
   init(_ screen: NSScreen, box: NSRect) {
     super.init()
-    
-    let displayId = screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID
+
+    let displayId =
+      screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID
     let screenInput = AVCaptureScreenInput(displayID: displayId)
     var croppingBox = NSOffsetRect(box, -screen.frame.origin.x, -screen.frame.origin.y)
     if croppingBox.width.truncatingRemainder(dividingBy: 2) != 0 {
       croppingBox.size.width -= 1
     }
     screenInput?.cropRect = croppingBox.insetBy(dx: 1, dy: 1)
-    
+
     if self.canAddInput(screenInput!) {
       self.addInput(screenInput!)
     }
-    
-    videoOutput.setSampleBufferDelegate(self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: []))
-    
+
+    videoOutput.setSampleBufferDelegate(
+      self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: []))
+
     if self.canAddOutput(videoOutput) {
       self.addOutput(videoOutput)
     }
-    
+
     if self.canAddOutput(movieFileOutput) {
       self.addOutput(movieFileOutput)
     }
   }
-  
+
   func startRecording() {
     receivedFrames = false
     self.startRunning()
-    
+
     DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
       if !self.receivedFrames {
         NotificationCenter.default.post(name: .failedToStart, object: nil, userInfo: nil)
       }
     }
   }
-  
+
   func startRecording(to url: URL) {
     self.startRecording()
     movieFileOutput.startRecording(to: url, recordingDelegate: self)
   }
-  
+
   // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate Implementation
-  
-  func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
+
+  func captureOutput(
+    _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer,
+    from connection: AVCaptureConnection
+  ) {
     receivedFrames = true
-    
+
     guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
-    NotificationCenter.default.post(name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer])
+    NotificationCenter.default.post(
+      name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer])
   }
-  
+
   // MARK: - AVCaptureFileOutputRecordingDelegate Implementation
-  
-  func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {}
+
+  func fileOutput(
+    _ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL,
+    from connections: [AVCaptureConnection], error: Error?
+  ) {}
 }