回答編集履歴
1
サンプルコードを貼りました。
answer
CHANGED
@@ -5,4 +5,154 @@
|
|
5
5
|
|
6
6
|
同様に
|
7
7
|
AVCaptureAudioDataOutput, AVCaptureAudioDataOutputSampleBufferDelegate
|
8
|
-
で音声もキャプチャしてみると良いかと思います。
|
8
|
+
で音声もキャプチャしてみると良いかと思います。
|
9
|
+
|
10
|
+
ソースコード一式を貼ります。
|
11
|
+
|
12
|
+
```swift
|
13
|
+
import UIKit
|
14
|
+
import AVFoundation
|
15
|
+
|
16
|
+
final class ViewController: UIViewController {
|
17
|
+
|
18
|
+
// MARK: - private properties
|
19
|
+
|
20
|
+
private var isRecording = false
|
21
|
+
private var session = AVCaptureSession()
|
22
|
+
private let videoOutput = AVCaptureVideoDataOutput()
|
23
|
+
private let audioOutput = AVCaptureAudioDataOutput()
|
24
|
+
private let fileUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("test").appendingPathExtension("mov")
|
25
|
+
private var assetWriter: AVAssetWriter?
|
26
|
+
private var startTime: CMTime?
|
27
|
+
|
28
|
+
// MARK: - lifecycle
|
29
|
+
|
30
|
+
override func viewDidAppear(_ animated: Bool) {
|
31
|
+
super.viewDidAppear(animated)
|
32
|
+
|
33
|
+
AVCaptureDevice.requestAccess(for: .video) { _ in
|
34
|
+
AVCaptureDevice.requestAccess(for: .audio) { _ in
|
35
|
+
self.setupCaptureSession()
|
36
|
+
}
|
37
|
+
}
|
38
|
+
}
|
39
|
+
|
40
|
+
// MARK: - private functions
|
41
|
+
|
42
|
+
private func setupCaptureSession() {
|
43
|
+
session.sessionPreset = .hd1280x720
|
44
|
+
|
45
|
+
guard
|
46
|
+
let videoDevice = AVCaptureDevice.default(for: .video),
|
47
|
+
let audioDevice = AVCaptureDevice.default(for: .audio),
|
48
|
+
let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
|
49
|
+
let audioInput = try? AVCaptureDeviceInput(device: audioDevice) else {
|
50
|
+
fatalError()
|
51
|
+
}
|
52
|
+
|
53
|
+
session.beginConfiguration()
|
54
|
+
session.addInput(videoInput)
|
55
|
+
session.addInput(audioInput)
|
56
|
+
session.addOutput(videoOutput)
|
57
|
+
session.addOutput(audioOutput)
|
58
|
+
session.commitConfiguration()
|
59
|
+
|
60
|
+
DispatchQueue.main.async {
|
61
|
+
let previewView = PreviewView()
|
62
|
+
previewView.videoPreviewLayer.session = self.session
|
63
|
+
previewView.frame = self.view.bounds
|
64
|
+
previewView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
|
65
|
+
self.view.insertSubview(previewView, at: 0)
|
66
|
+
|
67
|
+
self.session.startRunning()
|
68
|
+
}
|
69
|
+
}
|
70
|
+
|
71
|
+
@IBAction private func onClickButton(_ sender: Any) {
|
72
|
+
if isRecording {
|
73
|
+
isRecording = false
|
74
|
+
finishRecording()
|
75
|
+
} else {
|
76
|
+
isRecording = true
|
77
|
+
startRecording()
|
78
|
+
}
|
79
|
+
}
|
80
|
+
|
81
|
+
private func startRecording() {
|
82
|
+
try? FileManager.default.removeItem(at: fileUrl)
|
83
|
+
assetWriter = try! AVAssetWriter(outputURL: fileUrl, fileType: .mov)
|
84
|
+
|
85
|
+
// video
|
86
|
+
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
|
87
|
+
AVVideoCodecKey : AVVideoCodecType.h264,
|
88
|
+
AVVideoWidthKey : 1280,
|
89
|
+
AVVideoHeightKey : 720
|
90
|
+
])
|
91
|
+
videoInput.expectsMediaDataInRealTime = true
|
92
|
+
assetWriter?.add(videoInput)
|
93
|
+
|
94
|
+
// audio
|
95
|
+
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
|
96
|
+
audioInput.expectsMediaDataInRealTime = true
|
97
|
+
assetWriter?.add(audioInput)
|
98
|
+
|
99
|
+
assetWriter?.startWriting()
|
100
|
+
|
101
|
+
let queue = DispatchQueue.global()
|
102
|
+
videoOutput.setSampleBufferDelegate(self, queue: queue)
|
103
|
+
audioOutput.setSampleBufferDelegate(self, queue: queue)
|
104
|
+
}
|
105
|
+
|
106
|
+
private func finishRecording() {
|
107
|
+
videoOutput.setSampleBufferDelegate(nil, queue: nil)
|
108
|
+
audioOutput.setSampleBufferDelegate(nil, queue: nil)
|
109
|
+
|
110
|
+
startTime = nil
|
111
|
+
|
112
|
+
assetWriter?.finishWriting {
|
113
|
+
guard self.assetWriter!.status == .completed else { fatalError("failed recording") }
|
114
|
+
|
115
|
+
DispatchQueue.main.async {
|
116
|
+
let activity = UIActivityViewController(activityItems: [self.fileUrl], applicationActivities: nil)
|
117
|
+
self.present(activity, animated: true)
|
118
|
+
}
|
119
|
+
}
|
120
|
+
}
|
121
|
+
}
|
122
|
+
|
123
|
+
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
124
|
+
|
125
|
+
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
126
|
+
|
127
|
+
guard isRecording else { fatalError() }
|
128
|
+
|
129
|
+
guard CMSampleBufferDataIsReady(sampleBuffer) else {
|
130
|
+
print("not ready")
|
131
|
+
return
|
132
|
+
}
|
133
|
+
|
134
|
+
if startTime == nil {
|
135
|
+
startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
136
|
+
assetWriter?.startSession(atSourceTime: startTime!)
|
137
|
+
}
|
138
|
+
|
139
|
+
let mediaType: AVMediaType = output is AVCaptureVideoDataOutput ? .video : .audio
|
140
|
+
let input = assetWriter!.inputs.filter { $0.mediaType == mediaType }.first!
|
141
|
+
if input.isReadyForMoreMediaData {
|
142
|
+
print("append (mediaType.rawValue)")
|
143
|
+
input.append(sampleBuffer)
|
144
|
+
}
|
145
|
+
}
|
146
|
+
}
|
147
|
+
|
148
|
+
private class PreviewView: UIView {
|
149
|
+
override class var layerClass: AnyClass {
|
150
|
+
return AVCaptureVideoPreviewLayer.self
|
151
|
+
}
|
152
|
+
|
153
|
+
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
|
154
|
+
return layer as! AVCaptureVideoPreviewLayer
|
155
|
+
}
|
156
|
+
}
|
157
|
+
|
158
|
+
```
|