質問編集履歴

4

正しいものに修正

2016/07/06 10:04

投稿

t_suzuki_
t_suzuki_

スコア7

test CHANGED
File without changes
test CHANGED
@@ -22,7 +22,11 @@
22
22
 
23
23
 
24
24
 
25
- 7/5 追記修
25
+ 7/6常に動いたものにコード書き換え
26
+
27
+ encodeは上記ライブラリ使用
28
+
29
+ 16000KHzのためAACEncoderのfreqIdx = 8
26
30
 
27
31
  ```
28
32
 
@@ -34,53 +38,41 @@
34
38
 
35
39
  var sessions = AVCaptureSession()
36
40
 
41
+ var aacEncoder: AACEncoder = AACEncoder()
42
+
37
43
 
38
44
 
39
45
  func setupMicrophone()
40
46
 
41
47
  {
42
48
 
43
- let audio_output = AVCaptureAudioDataOutput()
49
+ let audio_output = AVCaptureAudioDataOutput()
44
50
 
45
51
  let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
46
52
 
47
- let sampleRate:Double = 16_000
53
+ try! audioSession.setCategory(AVAudioSessionCategoryRecord)
48
54
 
49
- do {
55
+ try! audioSession.setPreferredSampleRate(16000.0)
50
56
 
51
- try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate)
57
+ try! audioSession.setPreferredIOBufferDuration(1024.0/16000.0)
52
58
 
53
- try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
54
-
55
- try AVAudioSession.sharedInstance().setMode(AVAudioSessionModeVideoChat)
56
-
57
- try AVAudioSession.sharedInstance().setActive(true)
59
+ try! audioSession.setActive(true)
58
-
59
- } catch {
60
-
61
- }
62
60
 
63
61
 
64
62
 
65
- try! audioSession.setPreferredIOBufferDuration(1024.0/22000.0)
66
-
67
- try! audioSession.setActive(true)
68
-
69
-
70
-
71
63
  self.sessions.beginConfiguration()
72
64
 
73
- self.sessions = AVCaptureSession.init()
65
+ self.sessions = AVCaptureSession()
74
66
 
75
67
  self.sessions.automaticallyConfiguresApplicationAudioSession = false
76
68
 
77
69
  self.sessions.commitConfiguration()
78
70
 
79
-
71
+
80
72
 
81
73
  sessions.sessionPreset = AVCaptureSessionPresetLow
82
74
 
83
-
75
+
84
76
 
85
77
  let mic = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
86
78
 
@@ -90,7 +82,7 @@
90
82
 
91
83
 
92
84
 
93
- audio_output.setSampleBufferDelegate(aacEncoder, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
85
+ audio_output.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
94
86
 
95
87
 
96
88
 
@@ -110,8 +102,6 @@
110
102
 
111
103
  }
112
104
 
113
-
114
-
115
105
  sessions.addInput(mic_input)
116
106
 
117
107
  sessions.addOutput(audio_output)
@@ -122,75 +112,29 @@
122
112
 
123
113
 
124
114
 
125
- 【追記部分】
115
+ func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
126
116
 
127
- func sampleOutput(audio sampleBuffer: CMSampleBuffer) {
117
+ {
128
118
 
129
- if let blockBufferRef = CMSampleBufferGetDataBuffer(sampleBuffer) {
119
+ if connection == audioConnection {
130
120
 
131
- let bufferLength = CMBlockBufferGetDataLength(blockBufferRef)
121
+ aacEncoder.encodeSampleBuffer(sampleBuffer) { (data:NSData!, error:NSError!) in
132
122
 
133
- if bufferLength != 0 {
123
+ if data != nil{
134
124
 
135
- let data = NSMutableData(length: bufferLength)
125
+ 送信処理(data)
136
126
 
137
- CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data!.mutableBytes)
127
+ NSLog("complete data.length %d", data.length)
138
128
 
139
- let samples = UnsafeMutablePointer<Int8>(data!.mutableBytes)
140
-
141
- let adtsHeader = adtsDataForPacketLength(bufferLength)
142
-
143
- let fullData = NSMutableData(data: adtsHeader)
144
-
145
- fullData.appendBytes(samples, length: bufferLength)
146
-
147
- CMSampleBufferInvalidate(sampleBuffer)
148
-
149
- 送信処理(fullData)
129
+ }
150
130
 
151
131
  }
152
132
 
153
133
  }
154
134
 
155
- }
156
135
 
157
136
 
158
137
 
159
- func adtsDataForPacketLength(packetLength:Int)->NSData{
160
-
161
-
162
-
163
- let adtsLength = 7
164
-
165
- var packet = [UInt8](count: sizeof(UInt8)*adtsLength, repeatedValue: 0)
166
-
167
- let fullLength:Int = adtsLength + packetLength
168
-
169
-
170
-
171
- let lengthMax2bits:Int = (((fullLength & 0xFFFF) & 0x1800) >> 11)
172
-
173
-
174
-
175
- packet[0] = 0xFF as UInt8
176
-
177
- packet[1] = 0xF1 as UInt8
178
-
179
- packet[2] = 0b01100000 as UInt8
180
-
181
- packet[3] = (0x40 | ((UInt8(lengthMax2bits) & 0xFF) << 6)) as United
182
-
183
- packet[4] = UInt8((fullLength&0x7FF) >> 3)
184
-
185
- packet[5] = UInt8(((fullLength&7)<<5) + 0x1F)
186
-
187
- packet[6] = 0x00 as UInt8
188
-
189
- let data = NSData(bytes: UnsafeMutablePointer<Void>(packet), length: Int(adtsLength))
190
-
191
- return data
192
-
193
- }
194
138
 
195
139
  ```
196
140
 
@@ -212,8 +156,6 @@
212
156
 
213
157
 
214
158
 
215
-
216
-
217
159
  ・音楽ファイル.aacをnsDataとして読み込み、adtsヘッダをつけて送信したところ正しく再生された。
218
160
 
219
161
 

3

setup部分修正

2016/07/06 10:04

投稿

t_suzuki_
t_suzuki_

スコア7

test CHANGED
File without changes
test CHANGED
@@ -40,19 +40,33 @@
40
40
 
41
41
  {
42
42
 
43
- let audio_output = AVCaptureAudioDataOutput()
43
+ let audio_output = AVCaptureAudioDataOutput()
44
44
 
45
45
  let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
46
46
 
47
+ let sampleRate:Double = 16_000
48
+
49
+ do {
50
+
51
+ try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate)
52
+
47
- try! audioSession.setCategory(AVAudioSessionCategoryRecord)
53
+ try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
54
+
48
-
55
+ try AVAudioSession.sharedInstance().setMode(AVAudioSessionModeVideoChat)
56
+
49
- //try! audioSession.setPreferredSampleRate(16000.0)
57
+ try AVAudioSession.sharedInstance().setActive(true)
58
+
50
-
59
+ } catch {
60
+
61
+ }
62
+
63
+
64
+
51
- try! audioSession.setPreferredIOBufferDuration(1024.0/44100.0)
65
+ try! audioSession.setPreferredIOBufferDuration(1024.0/22000.0)
52
66
 
53
67
  try! audioSession.setActive(true)
54
68
 
55
-
69
+
56
70
 
57
71
  self.sessions.beginConfiguration()
58
72
 
@@ -76,7 +90,7 @@
76
90
 
77
91
 
78
92
 
79
- audio_output.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
93
+ audio_output.setSampleBufferDelegate(aacEncoder, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
80
94
 
81
95
 
82
96
 
@@ -96,6 +110,8 @@
96
110
 
97
111
  }
98
112
 
113
+
114
+
99
115
  sessions.addInput(mic_input)
100
116
 
101
117
  sessions.addOutput(audio_output)

2

Encode部分を修正、nsDataへの変換を追記

2016/07/05 08:15

投稿

t_suzuki_
t_suzuki_

スコア7

test CHANGED
File without changes
test CHANGED
@@ -18,263 +18,173 @@
18
18
 
19
19
  ###発生している問題・エラーメッセージ・疑問点
20
20
 
21
- 上記ライブラリでエンコードを行うと、エンコード後のデータが削減される?
21
+ AAC decode failed
22
+
23
+
24
+
22
-
25
+ 7/5 追記修正
23
-
24
-
25
-
26
26
 
27
27
  ```
28
28
 
29
- - (void) encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer completionBlock:(void (^)(NSData * encodedData, NSError* error))completionBlock {
30
-
31
- CFRetain(sampleBuffer);
29
+ captureClass
32
-
30
+
31
+
32
+
33
- dispatch_async(_encoderQueue, ^{
33
+ var audioConnection:AVCaptureConnection?
34
+
34
-
35
+ var sessions = AVCaptureSession()
36
+
37
+
38
+
39
+ func setupMicrophone()
40
+
41
+ {
42
+
43
+ let audio_output = AVCaptureAudioDataOutput()
44
+
45
+ let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
46
+
47
+ try! audioSession.setCategory(AVAudioSessionCategoryRecord)
48
+
49
+ //try! audioSession.setPreferredSampleRate(16000.0)
50
+
51
+ try! audioSession.setPreferredIOBufferDuration(1024.0/44100.0)
52
+
35
- if (!_audioConverter) {
53
+ try! audioSession.setActive(true)
54
+
55
+
56
+
36
-
57
+ self.sessions.beginConfiguration()
58
+
37
- [self setupEncoderFromSampleBuffer:sampleBuffer];
59
+ self.sessions = AVCaptureSession.init()
60
+
61
+ self.sessions.automaticallyConfiguresApplicationAudioSession = false
62
+
63
+ self.sessions.commitConfiguration()
64
+
65
+
66
+
67
+ sessions.sessionPreset = AVCaptureSessionPresetLow
68
+
69
+
70
+
71
+ let mic = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
72
+
73
+
74
+
75
+ var mic_input: AVCaptureDeviceInput!
76
+
77
+
78
+
79
+ audio_output.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
80
+
81
+
82
+
83
+ do
84
+
85
+ {
86
+
87
+ mic_input = try AVCaptureDeviceInput(device: mic)
38
88
 
39
89
  }
40
90
 
41
- CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
42
-
43
- CFRetain(blockBuffer);
91
+ catch
44
-
92
+
45
- OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, 0, NULL, &_pcmBufferSize, &_pcmBuffer);
93
+ {
46
-
94
+
47
- NSError *error = nil;
95
+ return
48
-
49
- if (status != kCMBlockBufferNoErr) {
50
-
51
- error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
52
96
 
53
97
  }
54
98
 
99
+ sessions.addInput(mic_input)
100
+
101
+ sessions.addOutput(audio_output)
102
+
55
- NSLog(@"PCM Buffer Size: %zu", _pcmBufferSize);
103
+ audioConnection = audio_output.connectionWithMediaType(AVMediaTypeAudio)
104
+
56
-
105
+ }
106
+
107
+
108
+
57
-
109
+ 【追記部分】
58
-
110
+
59
- memset(_aacBuffer, 0, _aacBufferSize);
111
+ func sampleOutput(audio sampleBuffer: CMSampleBuffer) {
60
-
61
- AudioBufferList outAudioBufferList = {0};
112
+
62
-
63
- outAudioBufferList.mNumberBuffers = 1;
64
-
65
- outAudioBufferList.mBuffers[0].mNumberChannels = 1;
66
-
67
- outAudioBufferList.mBuffers[0].mDataByteSize = _aacBufferSize;
113
+ if let blockBufferRef = CMSampleBufferGetDataBuffer(sampleBuffer) {
68
-
114
+
69
- outAudioBufferList.mBuffers[0].mData = _aacBuffer;
115
+ let bufferLength = CMBlockBufferGetDataLength(blockBufferRef)
70
-
71
- AudioStreamPacketDescription *outPacketDescription = NULL;
116
+
72
-
73
- UInt32 ioOutputDataPacketSize = 1;
74
-
75
- NSLog(@"サイズ0: %u", (unsigned int)outAudioBufferList.mBuffers[0].mDataByteSize);
76
-
77
- status = AudioConverterFillComplexBuffer(_audioConverter, inInputDataProc, (__bridge void *)(self), &ioOutputDataPacketSize, &outAudioBufferList, outPacketDescription);
78
-
79
-
80
-
81
- NSLog(@"サイズ1: %u", (unsigned int)outAudioBufferList.mBuffers[0].mDataByteSize);
82
-
83
- NSData *data = nil;
84
-
85
- if (status == 0) {
117
+ if bufferLength != 0 {
118
+
86
-
119
+ let data = NSMutableData(length: bufferLength)
120
+
87
- NSData *rawAAC = [NSData dataWithBytes:outAudioBufferList.mBuffers[0].mData length:outAudioBufferList.mBuffers[0].mDataByteSize];
121
+ CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data!.mutableBytes)
122
+
88
-
123
+ let samples = UnsafeMutablePointer<Int8>(data!.mutableBytes)
124
+
89
- NSData *adtsHeader = [self adtsDataForPacketLength:rawAAC.length];
125
+ let adtsHeader = adtsDataForPacketLength(bufferLength)
90
-
126
+
91
- NSMutableData *fullData = [NSMutableData dataWithData:adtsHeader];
127
+ let fullData = NSMutableData(data: adtsHeader)
92
-
128
+
93
- [fullData appendData:rawAAC];
129
+ fullData.appendBytes(samples, length: bufferLength)
130
+
94
-
131
+ CMSampleBufferInvalidate(sampleBuffer)
132
+
95
- data = fullData;
133
+ 送信処理(fullData)
96
-
134
+
97
- } else {
135
+ }
98
-
99
- error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
100
-
101
- NSLog(@"%@",error);
102
136
 
103
137
  }
104
138
 
105
- if (completionBlock) {
106
-
107
- dispatch_async(_callbackQueue, ^{
108
-
109
- completionBlock(data, error);
110
-
111
- });
112
-
113
- }
114
-
115
- CFRelease(sampleBuffer);
116
-
117
- CFRelease(blockBuffer);
118
-
119
- });
120
-
121
- }
122
-
123
-
124
-
125
- static OSStatus inInputDataProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)
126
-
127
- {
128
-
129
- AACEncoder *encoder = (__bridge AACEncoder *)(inUserData);
130
-
131
- UInt32 requestedPackets = *ioNumberDataPackets;
132
-
133
- //NSLog(@"Number of packets requested: %d", (unsigned int)requestedPackets);
134
-
135
- size_t copiedSamples = [encoder copyPCMSamplesIntoBuffer:ioData];
136
-
137
- if (copiedSamples < requestedPackets) {
138
-
139
- NSLog(@"PCM buffer isn't full enough!");
140
-
141
- *ioNumberDataPackets = 0;
142
-
143
- return -1;
144
-
145
139
  }
146
140
 
147
- *ioNumberDataPackets = 1;
148
-
149
- NSLog(@"Copied %zu samples into ioData", copiedSamples);
150
-
151
- return noErr;
152
-
153
- }
154
-
155
-
156
-
157
-
158
-
159
- 【追記】
160
-
161
- captureClass
162
-
163
-
164
-
165
- var audioConnection:AVCaptureConnection?
166
-
167
- var sessions = AVCaptureSession()
168
-
169
-
170
-
171
- func setupMicrophone()
172
-
173
- {
174
-
175
- let audio_output = AVCaptureAudioDataOutput()
176
-
177
- let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
178
-
179
- try! audioSession.setCategory(AVAudioSessionCategoryRecord)
180
-
181
- //try! audioSession.setPreferredSampleRate(16000.0)
182
-
183
- try! audioSession.setPreferredIOBufferDuration(1024.0/44100.0)
184
-
185
- try! audioSession.setActive(true)
186
-
187
-
188
-
189
- self.sessions.beginConfiguration()
190
-
191
- self.sessions = AVCaptureSession.init()
192
-
193
- self.sessions.automaticallyConfiguresApplicationAudioSession = false
194
-
195
- self.sessions.commitConfiguration()
196
-
197
-
198
-
199
- sessions.sessionPreset = AVCaptureSessionPresetLow
200
-
201
-
202
-
203
- let mic = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
204
-
205
-
206
-
207
- var mic_input: AVCaptureDeviceInput!
208
-
209
-
210
-
211
- audio_output.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
212
-
213
-
214
-
215
- do
216
-
217
- {
218
-
219
- mic_input = try AVCaptureDeviceInput(device: mic)
220
-
221
- }
222
-
223
- catch
224
-
225
- {
226
-
227
- return
228
-
229
- }
230
-
231
- sessions.addInput(mic_input)
232
-
233
- sessions.addOutput(audio_output)
234
-
235
- audioConnection = audio_output.connectionWithMediaType(AVMediaTypeAudio)
141
+
142
+
143
+ func adtsDataForPacketLength(packetLength:Int)->NSData{
144
+
145
+
146
+
147
+ let adtsLength = 7
148
+
149
+ var packet = [UInt8](count: sizeof(UInt8)*adtsLength, repeatedValue: 0)
150
+
151
+ let fullLength:Int = adtsLength + packetLength
152
+
153
+
154
+
155
+ let lengthMax2bits:Int = (((fullLength & 0xFFFF) & 0x1800) >> 11)
156
+
157
+
158
+
159
+ packet[0] = 0xFF as UInt8
160
+
161
+ packet[1] = 0xF1 as UInt8
162
+
163
+ packet[2] = 0b01100000 as UInt8
164
+
165
+ packet[3] = (0x40 | ((UInt8(lengthMax2bits) & 0xFF) << 6)) as United
166
+
167
+ packet[4] = UInt8((fullLength&0x7FF) >> 3)
168
+
169
+ packet[5] = UInt8(((fullLength&7)<<5) + 0x1F)
170
+
171
+ packet[6] = 0x00 as UInt8
172
+
173
+ let data = NSData(bytes: UnsafeMutablePointer<Void>(packet), length: Int(adtsLength))
174
+
175
+ return data
236
176
 
237
177
  }
238
178
 
239
-
240
-
241
- func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef!, fromConnection connection: AVCaptureConnection!){
242
-
243
- if connection == audioConnection {
244
-
245
- let aacEncoder = AACEncoder()
246
-
247
- aacEncoder().encodeSampleBuffer(sampleBuffer) { (aacData:NSData!, err:NSError!) in
248
-
249
- if aacData != nil{
250
-
251
-
252
-
253
- }
254
-
255
- }
256
-
257
- }
258
-
259
179
  ```
260
180
 
261
- 上記コードのNSLog(サイズ0)ではmDataByteSize:4096だが
181
+
262
-
263
- (サイズ1)ではmDataByteSize:4になる。
264
-
265
-
266
-
267
- またaudioSessionのSampleRateを16000にすると、inInputDataProcにおいてPCM buffer isn't full enough!と表示されエラーになる。
268
182
 
269
183
 
270
184
 
271
185
  ###考えられる原因
272
186
 
273
- ・録音方法、samplebufferの使い方に問題がある?
274
-
275
- ライブラリ自体に問題がある?
187
+ nsDataへの変換に問題がある?
276
-
277
- ・ライブラリの設定に問題がある?(init,setup)
278
188
 
279
189
 
280
190
 
@@ -284,7 +194,11 @@
284
194
 
285
195
  ・ACCaptureのsampleRate,IOBufferDurationを変更。
286
196
 
197
+
198
+
199
+
200
+
287
- ・captureOutputにおいsamplebufferをAVAssetWriter用いmp4とて書き出すと正しく録音できていた。
201
+ 音楽ファイル.aacをnsDataとし読み込み、adtsヘッダつけ送信ころ正しく再生された。
288
202
 
289
203
 
290
204
 

1

コードを追記

2016/07/05 08:13

投稿

t_suzuki_
t_suzuki_

スコア7

test CHANGED
File without changes
test CHANGED
@@ -4,9 +4,7 @@
4
4
 
5
5
  xcode7.3にて、iphoneから録音した音声をリアルタイムに送信したい。
6
6
 
7
- 録音はAVCaptureSessionで行い、captureOutputにて得られるsampleBuffer(lpcm)をAACタに変換
7
+ 録音はAVCaptureSessionで行い、captureOutputにて得られるsampleBuffer(llcm)をformat:AAC,16000Khzにエンコ
8
-
9
- その際16000Khzにエンコード。
10
8
 
11
9
 
12
10
 
@@ -154,6 +152,110 @@
154
152
 
155
153
  }
156
154
 
155
+
156
+
157
+
158
+
159
+ 【追記】
160
+
161
+ captureClass
162
+
163
+
164
+
165
+ var audioConnection:AVCaptureConnection?
166
+
167
+ var sessions = AVCaptureSession()
168
+
169
+
170
+
171
+ func setupMicrophone()
172
+
173
+ {
174
+
175
+ let audio_output = AVCaptureAudioDataOutput()
176
+
177
+ let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
178
+
179
+ try! audioSession.setCategory(AVAudioSessionCategoryRecord)
180
+
181
+ //try! audioSession.setPreferredSampleRate(16000.0)
182
+
183
+ try! audioSession.setPreferredIOBufferDuration(1024.0/44100.0)
184
+
185
+ try! audioSession.setActive(true)
186
+
187
+
188
+
189
+ self.sessions.beginConfiguration()
190
+
191
+ self.sessions = AVCaptureSession.init()
192
+
193
+ self.sessions.automaticallyConfiguresApplicationAudioSession = false
194
+
195
+ self.sessions.commitConfiguration()
196
+
197
+
198
+
199
+ sessions.sessionPreset = AVCaptureSessionPresetLow
200
+
201
+
202
+
203
+ let mic = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
204
+
205
+
206
+
207
+ var mic_input: AVCaptureDeviceInput!
208
+
209
+
210
+
211
+ audio_output.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
212
+
213
+
214
+
215
+ do
216
+
217
+ {
218
+
219
+ mic_input = try AVCaptureDeviceInput(device: mic)
220
+
221
+ }
222
+
223
+ catch
224
+
225
+ {
226
+
227
+ return
228
+
229
+ }
230
+
231
+ sessions.addInput(mic_input)
232
+
233
+ sessions.addOutput(audio_output)
234
+
235
+ audioConnection = audio_output.connectionWithMediaType(AVMediaTypeAudio)
236
+
237
+ }
238
+
239
+
240
+
241
+ func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef!, fromConnection connection: AVCaptureConnection!){
242
+
243
+ if connection == audioConnection {
244
+
245
+ let aacEncoder = AACEncoder()
246
+
247
+ aacEncoder().encodeSampleBuffer(sampleBuffer) { (aacData:NSData!, err:NSError!) in
248
+
249
+ if aacData != nil{
250
+
251
+
252
+
253
+ }
254
+
255
+ }
256
+
257
+ }
258
+
157
259
  ```
158
260
 
159
261
  上記コードのNSLog(サイズ0)ではmDataByteSize:4096だが
@@ -182,7 +284,7 @@
182
284
 
183
285
  ・ACCaptureのsampleRate,IOBufferDurationを変更。
184
286
 
185
-
287
+ ・captureOutputにおいてsamplebufferをAVAssetWriterを用いてmp4として書き出すと正しく録音できていた。
186
288
 
187
289
 
188
290