質問編集履歴
4
正しいものに修正
title
CHANGED
File without changes
|
body
CHANGED
@@ -10,41 +10,37 @@
|
|
10
10
|
###発生している問題・エラーメッセージ・疑問点
|
11
11
|
AAC decode failed
|
12
12
|
|
13
|
-
7/
|
13
|
+
7/6 正常に動いたものにコード書き換え
|
14
|
+
encodeは上記ライブラリ使用
|
15
|
+
16000KHzのためAACEncoderのfreqIdx = 8
|
14
16
|
```
|
15
17
|
captureClass
|
16
18
|
|
17
19
|
var audioConnection:AVCaptureConnection?
|
18
20
|
var sessions = AVCaptureSession()
|
21
|
+
var aacEncoder: AACEncoder = AACEncoder()
|
19
22
|
|
20
23
|
func setupMicrophone()
|
21
24
|
{
|
22
|
-
|
25
|
+
let audio_output = AVCaptureAudioDataOutput()
|
23
26
|
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
|
24
|
-
let sampleRate:Double = 16_000
|
25
|
-
do {
|
26
|
-
try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate)
|
27
|
-
|
27
|
+
try! audioSession.setCategory(AVAudioSessionCategoryRecord)
|
28
|
+
try! audioSession.setPreferredSampleRate(16000.0)
|
28
|
-
|
29
|
+
try! audioSession.setPreferredIOBufferDuration(1024.0/16000.0)
|
29
|
-
|
30
|
+
try! audioSession.setActive(true)
|
30
|
-
} catch {
|
31
|
-
}
|
32
31
|
|
33
|
-
try! audioSession.setPreferredIOBufferDuration(1024.0/22000.0)
|
34
|
-
try! audioSession.setActive(true)
|
35
|
-
|
36
32
|
self.sessions.beginConfiguration()
|
37
|
-
self.sessions = AVCaptureSession
|
33
|
+
self.sessions = AVCaptureSession()
|
38
34
|
self.sessions.automaticallyConfiguresApplicationAudioSession = false
|
39
35
|
self.sessions.commitConfiguration()
|
40
|
-
|
36
|
+
|
41
37
|
sessions.sessionPreset = AVCaptureSessionPresetLow
|
42
|
-
|
38
|
+
|
43
39
|
let mic = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
|
44
40
|
|
45
41
|
var mic_input: AVCaptureDeviceInput!
|
46
42
|
|
47
|
-
audio_output.setSampleBufferDelegate(
|
43
|
+
audio_output.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
|
48
44
|
|
49
45
|
do
|
50
46
|
{
|
@@ -54,47 +50,23 @@
|
|
54
50
|
{
|
55
51
|
return
|
56
52
|
}
|
57
|
-
|
58
53
|
sessions.addInput(mic_input)
|
59
54
|
sessions.addOutput(audio_output)
|
60
55
|
audioConnection = audio_output.connectionWithMediaType(AVMediaTypeAudio)
|
61
56
|
}
|
62
57
|
|
63
|
-
【追記部分】
|
64
|
-
|
58
|
+
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
|
59
|
+
{
|
60
|
+
if connection == audioConnection {
|
65
|
-
|
61
|
+
aacEncoder.encodeSampleBuffer(sampleBuffer) { (data:NSData!, error:NSError!) in
|
66
|
-
let bufferLength = CMBlockBufferGetDataLength(blockBufferRef)
|
67
|
-
|
62
|
+
if data != nil{
|
68
|
-
let data = NSMutableData(length: bufferLength)
|
69
|
-
CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data!.mutableBytes)
|
70
|
-
let samples = UnsafeMutablePointer<Int8>(data!.mutableBytes)
|
71
|
-
let adtsHeader = adtsDataForPacketLength(bufferLength)
|
72
|
-
let fullData = NSMutableData(data: adtsHeader)
|
73
|
-
fullData.appendBytes(samples, length: bufferLength)
|
74
|
-
CMSampleBufferInvalidate(sampleBuffer)
|
75
|
-
|
63
|
+
送信処理(data)
|
64
|
+
NSLog("complete data.length %d", data.length)
|
65
|
+
}
|
76
66
|
}
|
77
67
|
}
|
78
|
-
}
|
79
68
|
|
80
|
-
|
69
|
+
|
81
|
-
|
82
|
-
let adtsLength = 7
|
83
|
-
var packet = [UInt8](count: sizeof(UInt8)*adtsLength, repeatedValue: 0)
|
84
|
-
let fullLength:Int = adtsLength + packetLength
|
85
|
-
|
86
|
-
let lengthMax2bits:Int = (((fullLength & 0xFFFF) & 0x1800) >> 11)
|
87
|
-
|
88
|
-
packet[0] = 0xFF as UInt8
|
89
|
-
packet[1] = 0xF1 as UInt8
|
90
|
-
packet[2] = 0b01100000 as UInt8
|
91
|
-
packet[3] = (0x40 | ((UInt8(lengthMax2bits) & 0xFF) << 6)) as United
|
92
|
-
packet[4] = UInt8((fullLength&0x7FF) >> 3)
|
93
|
-
packet[5] = UInt8(((fullLength&7)<<5) + 0x1F)
|
94
|
-
packet[6] = 0x00 as UInt8
|
95
|
-
let data = NSData(bytes: UnsafeMutablePointer<Void>(packet), length: Int(adtsLength))
|
96
|
-
return data
|
97
|
-
}
|
98
70
|
```
|
99
71
|
|
100
72
|
|
@@ -105,7 +77,6 @@
|
|
105
77
|
・adtsヘッダのfreqIdxを変更。
|
106
78
|
・ACCaptureのsampleRate,IOBufferDurationを変更。
|
107
79
|
|
108
|
-
|
109
80
|
・音楽ファイル.aacをnsDataとして読み込み、adtsヘッダをつけて送信したところ正しく再生された。
|
110
81
|
|
111
82
|
|
3
setup部分修正
title
CHANGED
File without changes
|
body
CHANGED
@@ -19,13 +19,20 @@
|
|
19
19
|
|
20
20
|
func setupMicrophone()
|
21
21
|
{
|
22
|
-
|
22
|
+
let audio_output = AVCaptureAudioDataOutput()
|
23
23
|
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
|
24
|
+
let sampleRate:Double = 16_000
|
25
|
+
do {
|
26
|
+
try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate)
|
24
|
-
|
27
|
+
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
|
28
|
+
try AVAudioSession.sharedInstance().setMode(AVAudioSessionModeVideoChat)
|
25
|
-
|
29
|
+
try AVAudioSession.sharedInstance().setActive(true)
|
30
|
+
} catch {
|
31
|
+
}
|
32
|
+
|
26
|
-
try! audioSession.setPreferredIOBufferDuration(1024.0/
|
33
|
+
try! audioSession.setPreferredIOBufferDuration(1024.0/22000.0)
|
27
34
|
try! audioSession.setActive(true)
|
28
|
-
|
35
|
+
|
29
36
|
self.sessions.beginConfiguration()
|
30
37
|
self.sessions = AVCaptureSession.init()
|
31
38
|
self.sessions.automaticallyConfiguresApplicationAudioSession = false
|
@@ -37,7 +44,7 @@
|
|
37
44
|
|
38
45
|
var mic_input: AVCaptureDeviceInput!
|
39
46
|
|
40
|
-
audio_output.setSampleBufferDelegate(
|
47
|
+
audio_output.setSampleBufferDelegate(aacEncoder, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
|
41
48
|
|
42
49
|
do
|
43
50
|
{
|
@@ -47,6 +54,7 @@
|
|
47
54
|
{
|
48
55
|
return
|
49
56
|
}
|
57
|
+
|
50
58
|
sessions.addInput(mic_input)
|
51
59
|
sessions.addOutput(audio_output)
|
52
60
|
audioConnection = audio_output.connectionWithMediaType(AVMediaTypeAudio)
|
2
Encode部分を修正、nsDataへの変換を追記
title
CHANGED
File without changes
|
body
CHANGED
@@ -8,76 +8,10 @@
|
|
8
8
|
|
9
9
|
|
10
10
|
###発生している問題・エラーメッセージ・疑問点
|
11
|
-
|
11
|
+
AAC decode failed
|
12
12
|
|
13
|
-
|
13
|
+
7/5 追記修正
|
14
14
|
```
|
15
|
-
- (void) encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer completionBlock:(void (^)(NSData * encodedData, NSError* error))completionBlock {
|
16
|
-
CFRetain(sampleBuffer);
|
17
|
-
dispatch_async(_encoderQueue, ^{
|
18
|
-
if (!_audioConverter) {
|
19
|
-
[self setupEncoderFromSampleBuffer:sampleBuffer];
|
20
|
-
}
|
21
|
-
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
|
22
|
-
CFRetain(blockBuffer);
|
23
|
-
OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, 0, NULL, &_pcmBufferSize, &_pcmBuffer);
|
24
|
-
NSError *error = nil;
|
25
|
-
if (status != kCMBlockBufferNoErr) {
|
26
|
-
error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
|
27
|
-
}
|
28
|
-
NSLog(@"PCM Buffer Size: %zu", _pcmBufferSize);
|
29
|
-
|
30
|
-
memset(_aacBuffer, 0, _aacBufferSize);
|
31
|
-
AudioBufferList outAudioBufferList = {0};
|
32
|
-
outAudioBufferList.mNumberBuffers = 1;
|
33
|
-
outAudioBufferList.mBuffers[0].mNumberChannels = 1;
|
34
|
-
outAudioBufferList.mBuffers[0].mDataByteSize = _aacBufferSize;
|
35
|
-
outAudioBufferList.mBuffers[0].mData = _aacBuffer;
|
36
|
-
AudioStreamPacketDescription *outPacketDescription = NULL;
|
37
|
-
UInt32 ioOutputDataPacketSize = 1;
|
38
|
-
NSLog(@"サイズ0: %u", (unsigned int)outAudioBufferList.mBuffers[0].mDataByteSize);
|
39
|
-
status = AudioConverterFillComplexBuffer(_audioConverter, inInputDataProc, (__bridge void *)(self), &ioOutputDataPacketSize, &outAudioBufferList, outPacketDescription);
|
40
|
-
|
41
|
-
NSLog(@"サイズ1: %u", (unsigned int)outAudioBufferList.mBuffers[0].mDataByteSize);
|
42
|
-
NSData *data = nil;
|
43
|
-
if (status == 0) {
|
44
|
-
NSData *rawAAC = [NSData dataWithBytes:outAudioBufferList.mBuffers[0].mData length:outAudioBufferList.mBuffers[0].mDataByteSize];
|
45
|
-
NSData *adtsHeader = [self adtsDataForPacketLength:rawAAC.length];
|
46
|
-
NSMutableData *fullData = [NSMutableData dataWithData:adtsHeader];
|
47
|
-
[fullData appendData:rawAAC];
|
48
|
-
data = fullData;
|
49
|
-
} else {
|
50
|
-
error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
|
51
|
-
NSLog(@"%@",error);
|
52
|
-
}
|
53
|
-
if (completionBlock) {
|
54
|
-
dispatch_async(_callbackQueue, ^{
|
55
|
-
completionBlock(data, error);
|
56
|
-
});
|
57
|
-
}
|
58
|
-
CFRelease(sampleBuffer);
|
59
|
-
CFRelease(blockBuffer);
|
60
|
-
});
|
61
|
-
}
|
62
|
-
|
63
|
-
static OSStatus inInputDataProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)
|
64
|
-
{
|
65
|
-
AACEncoder *encoder = (__bridge AACEncoder *)(inUserData);
|
66
|
-
UInt32 requestedPackets = *ioNumberDataPackets;
|
67
|
-
//NSLog(@"Number of packets requested: %d", (unsigned int)requestedPackets);
|
68
|
-
size_t copiedSamples = [encoder copyPCMSamplesIntoBuffer:ioData];
|
69
|
-
if (copiedSamples < requestedPackets) {
|
70
|
-
NSLog(@"PCM buffer isn't full enough!");
|
71
|
-
*ioNumberDataPackets = 0;
|
72
|
-
return -1;
|
73
|
-
}
|
74
|
-
*ioNumberDataPackets = 1;
|
75
|
-
NSLog(@"Copied %zu samples into ioData", copiedSamples);
|
76
|
-
return noErr;
|
77
|
-
}
|
78
|
-
|
79
|
-
|
80
|
-
【追記】
|
81
15
|
captureClass
|
82
16
|
|
83
17
|
var audioConnection:AVCaptureConnection?
|
@@ -118,30 +52,53 @@
|
|
118
52
|
audioConnection = audio_output.connectionWithMediaType(AVMediaTypeAudio)
|
119
53
|
}
|
120
54
|
|
121
|
-
|
55
|
+
【追記部分】
|
122
|
-
|
56
|
+
func sampleOutput(audio sampleBuffer: CMSampleBuffer) {
|
123
|
-
let aacEncoder = AACEncoder()
|
124
|
-
|
57
|
+
if let blockBufferRef = CMSampleBufferGetDataBuffer(sampleBuffer) {
|
58
|
+
let bufferLength = CMBlockBufferGetDataLength(blockBufferRef)
|
125
|
-
if
|
59
|
+
if bufferLength != 0 {
|
126
|
-
|
60
|
+
let data = NSMutableData(length: bufferLength)
|
61
|
+
CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data!.mutableBytes)
|
62
|
+
let samples = UnsafeMutablePointer<Int8>(data!.mutableBytes)
|
63
|
+
let adtsHeader = adtsDataForPacketLength(bufferLength)
|
64
|
+
let fullData = NSMutableData(data: adtsHeader)
|
65
|
+
fullData.appendBytes(samples, length: bufferLength)
|
66
|
+
CMSampleBufferInvalidate(sampleBuffer)
|
67
|
+
送信処理(fullData)
|
127
68
|
}
|
128
69
|
}
|
129
|
-
}
|
70
|
+
}
|
71
|
+
|
72
|
+
func adtsDataForPacketLength(packetLength:Int)->NSData{
|
73
|
+
|
74
|
+
let adtsLength = 7
|
75
|
+
var packet = [UInt8](count: sizeof(UInt8)*adtsLength, repeatedValue: 0)
|
76
|
+
let fullLength:Int = adtsLength + packetLength
|
77
|
+
|
78
|
+
let lengthMax2bits:Int = (((fullLength & 0xFFFF) & 0x1800) >> 11)
|
79
|
+
|
80
|
+
packet[0] = 0xFF as UInt8
|
81
|
+
packet[1] = 0xF1 as UInt8
|
82
|
+
packet[2] = 0b01100000 as UInt8
|
83
|
+
packet[3] = (0x40 | ((UInt8(lengthMax2bits) & 0xFF) << 6)) as United
|
84
|
+
packet[4] = UInt8((fullLength&0x7FF) >> 3)
|
85
|
+
packet[5] = UInt8(((fullLength&7)<<5) + 0x1F)
|
86
|
+
packet[6] = 0x00 as UInt8
|
87
|
+
let data = NSData(bytes: UnsafeMutablePointer<Void>(packet), length: Int(adtsLength))
|
88
|
+
return data
|
89
|
+
}
|
130
90
|
```
|
131
|
-
上記コードのNSLog(サイズ0)ではmDataByteSize:4096だが
|
132
|
-
(サイズ1)ではmDataByteSize:4になる。
|
133
91
|
|
134
|
-
またaudioSessionのSampleRateを16000にすると、inInputDataProcにおいてPCM buffer isn't full enough!と表示されエラーになる。
|
135
92
|
|
136
93
|
###考えられる原因
|
137
|
-
・録音方法、samplebufferの使い方に問題がある?
|
138
|
-
・
|
94
|
+
・nsDataへの変換に問題がある?
|
139
|
-
・ライブラリの設定に問題がある?(init,setup)
|
140
95
|
|
141
96
|
###試したこと
|
142
97
|
・adtsヘッダのfreqIdxを変更。
|
143
98
|
・ACCaptureのsampleRate,IOBufferDurationを変更。
|
144
|
-
・captureOutputにおいてsamplebufferをAVAssetWriterを用いてmp4として書き出すと正しく録音できていた。
|
145
99
|
|
146
100
|
|
101
|
+
・音楽ファイル.aacをnsDataとして読み込み、adtsヘッダをつけて送信したところ正しく再生された。
|
102
|
+
|
103
|
+
|
147
104
|
当方、xcodeを触り始めて間もない初心者ですが、どなたかご教授願いします。
|
1
コードを追記
title
CHANGED
File without changes
|
body
CHANGED
@@ -1,8 +1,7 @@
|
|
1
1
|
###前提・実現したいこと
|
2
2
|
|
3
3
|
xcode7.3にて、iphoneから録音した音声をリアルタイムに送信したい。
|
4
|
-
録音はAVCaptureSessionで行い、captureOutputにて得られるsampleBuffer(
|
4
|
+
録音はAVCaptureSessionで行い、captureOutputにて得られるsampleBuffer(llcm)をformat:AAC,16000Khzにエンコード。
|
5
|
-
その際16000Khzにエンコード。
|
6
5
|
|
7
6
|
エンコードは以下のライブラリを使用。
|
8
7
|
[エンコードライブラリ](https://github.com/chrisballinger/FFmpeg-iOS-Encoder/blob/master/FFmpegEncoder/AACEncoder.m)
|
@@ -76,6 +75,58 @@
|
|
76
75
|
NSLog(@"Copied %zu samples into ioData", copiedSamples);
|
77
76
|
return noErr;
|
78
77
|
}
|
78
|
+
|
79
|
+
|
80
|
+
【追記】
|
81
|
+
captureClass
|
82
|
+
|
83
|
+
var audioConnection:AVCaptureConnection?
|
84
|
+
var sessions = AVCaptureSession()
|
85
|
+
|
86
|
+
func setupMicrophone()
|
87
|
+
{
|
88
|
+
let audio_output = AVCaptureAudioDataOutput()
|
89
|
+
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
|
90
|
+
try! audioSession.setCategory(AVAudioSessionCategoryRecord)
|
91
|
+
//try! audioSession.setPreferredSampleRate(16000.0)
|
92
|
+
try! audioSession.setPreferredIOBufferDuration(1024.0/44100.0)
|
93
|
+
try! audioSession.setActive(true)
|
94
|
+
|
95
|
+
self.sessions.beginConfiguration()
|
96
|
+
self.sessions = AVCaptureSession.init()
|
97
|
+
self.sessions.automaticallyConfiguresApplicationAudioSession = false
|
98
|
+
self.sessions.commitConfiguration()
|
99
|
+
|
100
|
+
sessions.sessionPreset = AVCaptureSessionPresetLow
|
101
|
+
|
102
|
+
let mic = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
|
103
|
+
|
104
|
+
var mic_input: AVCaptureDeviceInput!
|
105
|
+
|
106
|
+
audio_output.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
|
107
|
+
|
108
|
+
do
|
109
|
+
{
|
110
|
+
mic_input = try AVCaptureDeviceInput(device: mic)
|
111
|
+
}
|
112
|
+
catch
|
113
|
+
{
|
114
|
+
return
|
115
|
+
}
|
116
|
+
sessions.addInput(mic_input)
|
117
|
+
sessions.addOutput(audio_output)
|
118
|
+
audioConnection = audio_output.connectionWithMediaType(AVMediaTypeAudio)
|
119
|
+
}
|
120
|
+
|
121
|
+
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef!, fromConnection connection: AVCaptureConnection!){
|
122
|
+
if connection == audioConnection {
|
123
|
+
let aacEncoder = AACEncoder()
|
124
|
+
aacEncoder().encodeSampleBuffer(sampleBuffer) { (aacData:NSData!, err:NSError!) in
|
125
|
+
if aacData != nil{
|
126
|
+
|
127
|
+
}
|
128
|
+
}
|
129
|
+
}
|
79
130
|
```
|
80
131
|
上記コードのNSLog(サイズ0)ではmDataByteSize:4096だが
|
81
132
|
(サイズ1)ではmDataByteSize:4になる。
|
@@ -90,7 +141,7 @@
|
|
90
141
|
###試したこと
|
91
142
|
・adtsヘッダのfreqIdxを変更。
|
92
143
|
・ACCaptureのsampleRate,IOBufferDurationを変更。
|
144
|
+
・captureOutputにおいてsamplebufferをAVAssetWriterを用いてmp4として書き出すと正しく録音できていた。
|
93
145
|
|
94
146
|
|
95
|
-
|
96
147
|
当方、xcodeを触り始めて間もない初心者ですが、どなたかご教授願いします。
|