X-Git-Url: https://git.r.bdr.sh/rbdr/captura/blobdiff_plain/c9b9e1d654ea697afad9f6427d94623bfdf55cce..505c1e620497828ffb914e05dd76d9ab124f144a:/Captura/Domain/CapturaCaptureSession.swift diff --git a/Captura/Domain/CapturaCaptureSession.swift b/Captura/Domain/CapturaCaptureSession.swift index 80240e6..dbc72b9 100644 --- a/Captura/Domain/CapturaCaptureSession.swift +++ b/Captura/Domain/CapturaCaptureSession.swift @@ -1,60 +1,75 @@ -import AppKit import AVFoundation +import AppKit -class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate { +class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate, + AVCaptureVideoDataOutputSampleBufferDelegate +{ let videoOutput = AVCaptureVideoDataOutput() let movieFileOutput = AVCaptureMovieFileOutput() var receivedFrames = false - + init(_ screen: NSScreen, box: NSRect) { super.init() - - let displayId = screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID + + let displayId = + screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID let screenInput = AVCaptureScreenInput(displayID: displayId) - screenInput?.cropRect = box.insetBy(dx: 1, dy: 1) - + var croppingBox = NSOffsetRect(box, -screen.frame.origin.x, -screen.frame.origin.y) + if croppingBox.width.truncatingRemainder(dividingBy: 2) != 0 { + croppingBox.size.width -= 1 + } + screenInput?.cropRect = croppingBox.insetBy(dx: 1, dy: 1) + if self.canAddInput(screenInput!) { self.addInput(screenInput!) } - - videoOutput.setSampleBufferDelegate(self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: [])) - + + videoOutput.setSampleBufferDelegate( + self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: [])) + if self.canAddOutput(videoOutput) { self.addOutput(videoOutput) } - + if self.canAddOutput(movieFileOutput) { self.addOutput(movieFileOutput) } } - + func startRecording() { receivedFrames = false self.startRunning() - + DispatchQueue.main.asyncAfter(deadline: .now() + 1) { if !self.receivedFrames { NotificationCenter.default.post(name: .failedToStart, object: nil, userInfo: nil) } } } - + func startRecording(to url: URL) { self.startRecording() movieFileOutput.startRecording(to: url, recordingDelegate: self) } - + // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate Implementation - - func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + + func captureOutput( + _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) { receivedFrames = true - + guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } - NotificationCenter.default.post(name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer]) + NotificationCenter.default.post( + name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer]) } - + // MARK: - AVCaptureFileOutputRecordingDelegate Implementation - - func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {} + + func fileOutput( + _ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, + from connections: [AVCaptureConnection], error: Error? + ) {} }