]>
Commit | Line | Data |
---|---|---|
1 | import AVFoundation | |
2 | import AppKit | |
3 | ||
4 | class CapturaCaptureSession: AVCaptureSession, AVCaptureFileOutputRecordingDelegate, | |
5 | AVCaptureVideoDataOutputSampleBufferDelegate | |
6 | { | |
7 | ||
8 | let videoOutput = AVCaptureVideoDataOutput() | |
9 | let movieFileOutput = AVCaptureMovieFileOutput() | |
10 | var receivedFrames = false | |
11 | ||
12 | init(_ screen: NSScreen, box: NSRect) { | |
13 | super.init() | |
14 | ||
15 | let displayId = | |
16 | screen.deviceDescription[NSDeviceDescriptionKey("NSScreenNumber")] as! CGDirectDisplayID | |
17 | let screenInput = AVCaptureScreenInput(displayID: displayId) | |
18 | var croppingBox = NSOffsetRect(box, -screen.frame.origin.x, -screen.frame.origin.y) | |
19 | if croppingBox.width.truncatingRemainder(dividingBy: 2) != 0 { | |
20 | croppingBox.size.width -= 1 | |
21 | } | |
22 | screenInput?.cropRect = croppingBox.insetBy(dx: 1, dy: 1) | |
23 | ||
24 | if self.canAddInput(screenInput!) { | |
25 | self.addInput(screenInput!) | |
26 | } | |
27 | ||
28 | videoOutput.setSampleBufferDelegate( | |
29 | self, queue: Dispatch.DispatchQueue(label: "sample buffer delegate", attributes: [])) | |
30 | ||
31 | if self.canAddOutput(videoOutput) { | |
32 | self.addOutput(videoOutput) | |
33 | } | |
34 | ||
35 | if self.canAddOutput(movieFileOutput) { | |
36 | self.addOutput(movieFileOutput) | |
37 | } | |
38 | } | |
39 | ||
40 | func startRecording() { | |
41 | receivedFrames = false | |
42 | self.startRunning() | |
43 | ||
44 | DispatchQueue.main.asyncAfter(deadline: .now() + 1) { | |
45 | if !self.receivedFrames { | |
46 | NotificationCenter.default.post(name: .failedToStart, object: nil, userInfo: nil) | |
47 | } | |
48 | } | |
49 | } | |
50 | ||
51 | func startRecording(to url: URL) { | |
52 | self.startRecording() | |
53 | movieFileOutput.startRecording(to: url, recordingDelegate: self) | |
54 | } | |
55 | ||
56 | // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate Implementation | |
57 | ||
58 | func captureOutput( | |
59 | _ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, | |
60 | from connection: AVCaptureConnection | |
61 | ) { | |
62 | receivedFrames = true | |
63 | ||
64 | guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } | |
65 | NotificationCenter.default.post( | |
66 | name: .receivedFrame, object: nil, userInfo: ["frame": imageBuffer]) | |
67 | } | |
68 | ||
69 | // MARK: - AVCaptureFileOutputRecordingDelegate Implementation | |
70 | ||
71 | func fileOutput( | |
72 | _ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, | |
73 | from connections: [AVCaptureConnection], error: Error? | |
74 | ) {} | |
75 | } |