ForgetSou | Blog

❤ 武统台湾 刻不容缓 ❤

0%

iOS开发-AudioUnit实时录音(OC)

1.单声道录音

//  FSUnitRecorder.h

#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>

NS_ASSUME_NONNULL_BEGIN

typedef void (^kAudioUnitRecorderOnputBlock)(AudioBufferList *bufferList);

@interface FSUnitRecorder : NSObject

@property (assign, nonatomic) double sampleRate;
@property (assign, nonatomic, readonly) BOOL isRecording;
@property (copy, nonatomic) kAudioUnitRecorderOnputBlock bufferListBlock;

- (void)start;
- (void)stop;

@end

NS_ASSUME_NONNULL_END
//  FSUnitRecorder.m
#import "FSUnitRecorder.h"

@interface FSUnitRecorder ()
{
AudioUnit audioUnit;
BOOL audioComponentInitialized;
}

@property (nonatomic,assign) AudioStreamBasicDescription inputStreamDesc;

@end

@implementation FSUnitRecorder

- (instancetype)init {
self = [super init];
if (self) {
self = [super init];
if (self) {
[self defaultSetting];
}
return self;
}
return self;
}

- (void)defaultSetting {
// 优先16000,如果设备不支持使用其它采样率
NSArray *sampleRates = @[@16000, @11025, @22050, @44100];
for (NSNumber *sampleRate in sampleRates) {
OSStatus status = [self prepareRecord:sampleRate.doubleValue];
if (status == noErr) {
self.sampleRate = [sampleRate doubleValue];
break;
}
}
}

- (OSStatus)prepareRecord:(double)sampleRate {
OSStatus status = noErr;

NSError *error;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionMixWithOthers | AVAudioSessionCategoryOptionAllowBluetooth error:&error];
[[AVAudioSession sharedInstance] setActive:YES error:&error];
// This doesn't seem to really indicate a problem (iPhone 6s Plus)
#ifdef IGNORE
NSInteger inputChannels = session.inputNumberOfChannels;
if (!inputChannels) {
NSLog(@"ERROR: NO AUDIO INPUT DEVICE");
return -1;
}
#endif

if (!audioComponentInitialized) {
audioComponentInitialized = YES;
// Describe the RemoteIO unit
AudioComponentDescription audioComponentDescription;
audioComponentDescription.componentType = kAudioUnitType_Output;
audioComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
audioComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
audioComponentDescription.componentFlags = 0;
audioComponentDescription.componentFlagsMask = 0;

// Get the RemoteIO unit
AudioComponent remoteIOComponent = AudioComponentFindNext(NULL,&audioComponentDescription);
status = AudioComponentInstanceNew(remoteIOComponent,&(self->audioUnit));
if (CheckError(status, "Couldn't get RemoteIO unit instance")) {
return status;
}
}

UInt32 oneFlag = 1;
AudioUnitElement bus0 = 0;
AudioUnitElement bus1 = 1;

if ((NO)) {
// Configure the RemoteIO unit for playback
status = AudioUnitSetProperty (self->audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
bus0,
&oneFlag,
sizeof(oneFlag));
if (CheckError(status, "Couldn't enable RemoteIO output")) {
return status;
}
}

// Configure the RemoteIO unit for input
status = AudioUnitSetProperty(self->audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
bus1,
&oneFlag,
sizeof(oneFlag));
if (CheckError(status, "Couldn't enable RemoteIO input")) {
return status;
}

AudioStreamBasicDescription asbd;
memset(&asbd, 0, sizeof(asbd));
asbd.mSampleRate = sampleRate; // 采样率
asbd.mFormatID = kAudioFormatLinearPCM;
asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
asbd.mBytesPerPacket = 2;
asbd.mFramesPerPacket = 1;
asbd.mBytesPerFrame = 2;
asbd.mChannelsPerFrame = 2;
asbd.mBitsPerChannel = 16;

// Set format for output (bus 0) on the RemoteIO's input scope
status = AudioUnitSetProperty(self->audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
bus0,
&asbd,
sizeof(asbd));
if (CheckError(status, "Couldn't set the ASBD for RemoteIO on input scope/bus 0")) {
return status;
}

// Set format for mic input (bus 1) on RemoteIO's output scope
status = AudioUnitSetProperty(self->audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
bus1,
&asbd,
sizeof(asbd));
if (CheckError(status, "Couldn't set the ASBD for RemoteIO on output scope/bus 1")) {
return status;
}

// Set the recording callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = inputCallBackFun;
callbackStruct.inputProcRefCon = (__bridge void *) self;
status = AudioUnitSetProperty(self->audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
bus1,
&callbackStruct,
sizeof (callbackStruct));
if (CheckError(status, "Couldn't set RemoteIO's render callback on bus 0")) {
return status;
}

if ((NO)) {
// Set the playback callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = playbackCallback;
callbackStruct.inputProcRefCon = (__bridge void *) self;
status = AudioUnitSetProperty(self->audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global,
bus0,
&callbackStruct,
sizeof (callbackStruct));
if (CheckError(status, "Couldn't set RemoteIO's render callback on bus 0")) {
return status;
}
}

// Initialize the RemoteIO unit
status = AudioUnitInitialize(self->audioUnit);
if (CheckError(status, "Couldn't initialize the RemoteIO unit")) {
return status;
}

return status;
}

- (void)start {
[self deleteAudioFile];
CheckError(AudioOutputUnitStart(audioUnit), "AudioOutputUnitStop failed");
_isRecording = YES;
}

- (void)stop {
CheckError(AudioOutputUnitStop(audioUnit),
"AudioOutputUnitStop failed");
_isRecording = NO;
}

- (void)deleteAudioFile {
NSString *pcmPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:@"record.mp3"];
if ([[NSFileManager defaultManager] fileExistsAtPath:pcmPath]) {
[[NSFileManager defaultManager] removeItemAtPath:pcmPath error:nil];
}
}

- (void)dealloc {
CheckError(AudioComponentInstanceDispose(audioUnit),
"AudioComponentInstanceDispose failed");
NSLog(@"UnitRecorder销毁");
}

static OSStatus CheckError(OSStatus error, const char *operation) {
if (error == noErr) {
return error;
}
char errorString[20] = "";
// See if it appears to be a 4-char-code
*(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(error);
if (isprint(errorString[1]) && isprint(errorString[2]) &&
isprint(errorString[3]) && isprint(errorString[4])) {
errorString[0] = errorString[5] = '\'';
errorString[6] = '\0';
} else {
// No, format it as an integer
sprintf(errorString, "%d", (int)error);
}
fprintf(stderr, "Error: %s (%s)\n", operation, errorString);
return error;
}

static OSStatus playbackCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
OSStatus status = noErr;

// Notes: ioData contains buffers (may be more than one!)
// Fill them up as much as you can. Remember to set the size value in each buffer to match how
// much data is in the buffer.
FSUnitRecorder *recorder = (__bridge FSUnitRecorder *) inRefCon;

UInt32 bus1 = 1;
status = AudioUnitRender(recorder->audioUnit,
ioActionFlags,
inTimeStamp,
bus1,
inNumberFrames,
ioData);
CheckError(status, "Couldn't render from RemoteIO unit");
return status;
}

static OSStatus inputCallBackFun(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList * __nullable ioData)
{

FSUnitRecorder *recorder = (__bridge FSUnitRecorder *)(inRefCon);

AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0].mData = NULL;
bufferList.mBuffers[0].mDataByteSize = 0;

AudioUnitRender(recorder->audioUnit,
ioActionFlags,
inTimeStamp,
1,
inNumberFrames,
&bufferList);
if (recorder.bufferListBlock) {
recorder.bufferListBlock(&bufferList);
}

return noErr;
}

@end

使用

- (FSUnitRecorder *)recorder {
if (!_recorder) {
_recorder = [[FSUnitRecorder alloc] init];
}
return _recorder;
}

@weakify(self);
self.recorder.bufferListBlock = ^(AudioBufferList * _Nonnull bufferList) {
@strongify(self);
AudioBuffer buffer = bufferList->mBuffers[0];
NSData *data = [NSData dataWithBytes:buffer.mData length:buffer.mDataByteSize];
// 处理数据
[self processSampleData:data];
};

2.双声道录音

说明:
公司新业务要接入蓝牙耳机,支持蓝牙耳机,左右耳机分别进行语音识别等功能。该业务牵扯到实时双通道录音,分别提取左右buffer,类似的业务需求市场上也是有的,比如AirPods,百度的一款蓝牙耳机(小度APP“流浪地球模式”,具体可以买一个个试用下)。
废话不多说了,直接看代码就行。

ZDUnitRecorder.h

#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>

NS_ASSUME_NONNULL_BEGIN

typedef void (^kAudioUnitRecorderOnputBlock)(AudioBufferList *bufferList);

@interface ZDUnitRecorder : NSObject

@property (assign, nonatomic) double sampleRate;
@property (assign, nonatomic, readonly) BOOL isRecording;
@property (copy, nonatomic) kAudioUnitRecorderOnputBlock bufferListBlock;

- (void)start;
- (void)stop;

@end

NS_ASSUME_NONNULL_END

ZDUnitRecorder.m

#import "ZDUnitRecorder.h"

@interface ZDUnitRecorder ()
{
AudioUnit audioUnit;
BOOL audioComponentInitialized;
}

@property (nonatomic,assign) AudioStreamBasicDescription inputStreamDesc;

@end

@implementation ZDUnitRecorder

- (instancetype)init {
self = [super init];
if (self) {
self = [super init];
if (self) {
[self defaultSetting];
}
return self;
}
return self;
}

- (void)defaultSetting {
NSArray *sampleRates = @[@16000, @11025, @22050, @44100];
for (NSNumber *sampleRate in sampleRates) {
OSStatus status = [self prepareRecord:sampleRate.doubleValue];
if (status == noErr) {
self.sampleRate = [sampleRate doubleValue];
break;
}
}
}

- (OSStatus)prepareRecord:(double)sampleRate {
OSStatus status = noErr;

NSError *error;
AVAudioSessionCategoryOptions options = AVAudioSessionCategoryOptionMixWithOthers
| AVAudioSessionCategoryOptionAllowBluetooth
| AVAudioSessionCategoryOptionDefaultToSpeaker
| AVAudioSessionCategoryOptionAllowAirPlay
| AVAudioSessionCategoryOptionAllowBluetoothA2DP;

[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:options
error:&error];
[[AVAudioSession sharedInstance] setActive:YES error:&error];

#ifdef IGNORE
NSInteger inputChannels = session.inputNumberOfChannels;
if (!inputChannels) {
NSLog(@"ERROR: NO AUDIO INPUT DEVICE");
return -1;
}
#endif

if (!audioComponentInitialized) {
audioComponentInitialized = YES;
// 描述音频组件
AudioComponentDescription audioComponentDescription;
audioComponentDescription.componentType = kAudioUnitType_Output;
audioComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
audioComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
audioComponentDescription.componentFlags = 0;
audioComponentDescription.componentFlagsMask = 0;

// 查找音频单元
AudioComponent remoteIOComponent = AudioComponentFindNext(NULL, &audioComponentDescription);
// 创建音频单元实例
status = AudioComponentInstanceNew(remoteIOComponent, &(self->audioUnit));
if (CheckError(status, "Couldn't get RemoteIO unit instance")) {
return status;
}
}

UInt32 oneFlag = 1;
AudioUnitElement bus0 = 0;
AudioUnitElement bus1 = 1;

if ((NO)) {
// Configure the RemoteIO unit for playback
status = AudioUnitSetProperty (self->audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
bus0,
&oneFlag,
sizeof(oneFlag));
if (CheckError(status, "Couldn't enable RemoteIO output")) {
return status;
}
}

// Configure the RemoteIO unit for input
status = AudioUnitSetProperty(self->audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
bus1,
&oneFlag,
sizeof(oneFlag));
if (CheckError(status, "Couldn't enable RemoteIO input")) {
return status;
}
// 音频流基础描述
AudioStreamBasicDescription asbd = {0};
asbd.mSampleRate = sampleRate;//采样率
asbd.mFormatID = kAudioFormatLinearPCM;//原始数据为PCM格式
asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsNonInterleaved;
asbd.mChannelsPerFrame = 2;//每帧的声道数量
asbd.mFramesPerPacket = 1;//每个数据包多少帧
asbd.mBitsPerChannel = 16;//16位
asbd.mBytesPerFrame = asbd.mChannelsPerFrame * asbd.mBitsPerChannel / 8;//每帧多少字节 bytes -> bit / 8
asbd.mBytesPerPacket = asbd.mFramesPerPacket * asbd.mBytesPerFrame;//每个包多少字节

// Set format for output (bus 0) on the RemoteIO's input scope
status = AudioUnitSetProperty(self->audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
bus0,
&asbd,
sizeof(asbd));
if (CheckError(status, "Couldn't set the ASBD for RemoteIO on input scope/bus 0")) {
return status;
}

// Set format for mic input (bus 1) on RemoteIO's output scope
status = AudioUnitSetProperty(self->audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
bus1,
&asbd,
sizeof(asbd));
if (CheckError(status, "Couldn't set the ASBD for RemoteIO on output scope/bus 1")) {
return status;
}

// Set the recording callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = inputCallBackFun;
callbackStruct.inputProcRefCon = (__bridge void *) self;
status = AudioUnitSetProperty(self->audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
bus1,
&callbackStruct,
sizeof (callbackStruct));
if (CheckError(status, "Couldn't set RemoteIO's render callback on bus 0")) {
return status;
}

if ((NO)) {
// Set the playback callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = playbackCallback;
callbackStruct.inputProcRefCon = (__bridge void *) self;
status = AudioUnitSetProperty(self->audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global,
bus0,
&callbackStruct,
sizeof (callbackStruct));
if (CheckError(status, "Couldn't set RemoteIO's render callback on bus 0")) {
return status;
}
}

// Initialize the RemoteIO unit
status = AudioUnitInitialize(self->audioUnit);
if (CheckError(status, "Couldn't initialize the RemoteIO unit")) {
return status;
}

return status;
}

- (void)start {
[self deleteAudioFile];
CheckError(AudioOutputUnitStart(audioUnit), "AudioOutputUnitStop failed");
_isRecording = YES;
}

- (void)stop {
CheckError(AudioOutputUnitStop(audioUnit),
"AudioOutputUnitStop failed");
_isRecording = NO;
}

- (void)deleteAudioFile {
NSString *pcmPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:@"record.mp3"];
if ([[NSFileManager defaultManager] fileExistsAtPath:pcmPath]) {
[[NSFileManager defaultManager] removeItemAtPath:pcmPath error:nil];
}
}

- (void)dealloc {
CheckError(AudioComponentInstanceDispose(audioUnit),
"AudioComponentInstanceDispose failed");
NSLog(@"UnitRecorder销毁");
}

static OSStatus CheckError(OSStatus error, const char *operation) {
if (error == noErr) {
return error;
}
char errorString[20] = "";
// See if it appears to be a 4-char-code
*(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(error);
if (isprint(errorString[1]) && isprint(errorString[2]) &&
isprint(errorString[3]) && isprint(errorString[4])) {
errorString[0] = errorString[5] = '\'';
errorString[6] = '\0';
} else {
// No, format it as an integer
sprintf(errorString, "%d", (int)error);
}
fprintf(stderr, "Error: %s (%s)\n", operation, errorString);
return error;
}

static OSStatus playbackCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
OSStatus status = noErr;

// Notes: ioData contains buffers (may be more than one!)
// Fill them up as much as you can. Remember to set the size value in each buffer to match how
// much data is in the buffer.
ZDUnitRecorder *recorder = (__bridge ZDUnitRecorder *) inRefCon;

UInt32 bus1 = 1;
status = AudioUnitRender(recorder->audioUnit,
ioActionFlags,
inTimeStamp,
bus1,
inNumberFrames,
ioData);
CheckError(status, "Couldn't render from RemoteIO unit");
return status;
}

static OSStatus inputCallBackFun(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList * __nullable ioData)
{
ZDUnitRecorder *recorder = (__bridge ZDUnitRecorder *)(inRefCon);

AudioBuffer buffer;
buffer.mData = NULL;
buffer.mDataByteSize = 0;
buffer.mNumberChannels = 2;

AudioBuffer buffer2;
buffer2.mData = NULL;
buffer2.mDataByteSize = 0;
buffer2.mNumberChannels = 2;

AudioBufferList bufferList;
bufferList.mNumberBuffers = 2;
bufferList.mBuffers[0] = buffer;
bufferList.mBuffers[1] = buffer2;

AudioUnitRender(recorder->audioUnit,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
&bufferList);
if (recorder.bufferListBlock) {
recorder.bufferListBlock(&bufferList);
}

return noErr;
}

@end

如何使用?

#import "ViewController.h"

@interface ViewController ()

@property (strong, nonatomic) ZDUnitRecorder *recorder;

@end

- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
if (self.recorder.isRecording) {
[self.recorder stop];
} else {
[self.recorder start];
}
}

#prama mark - Lazy Load
- (ZDUnitRecorder *)recorder {

if (!_recorder) {
_recorder = [[ZDUnitRecorder alloc] init];
_recorder.bufferListBlock = ^(AudioBufferList * _Nonnull bufferList) {
AudioBuffer left = bufferList->mBuffers[0];// 左耳机buffer
AudioBuffer right = bufferList->mBuffers[1];// 右耳机buffer
NSData *leftData = [NSData dataWithBytes:left.mData length:left.mDataByteSize];
NSData *rightData = [NSData dataWithBytes:right.mData length:right.mDataByteSize];
// 保存到沙盒/直接播报看业务需求决定
// 音频文件转MP3/AAC格式,网上很多资料不再继续叙述了。
[self handleAudioData:leftData rightData:rightData];
};
}
return _recorder;
}

//****//
Best Regard!
生命不止,奋斗不息

//****//

-------------本文结束感谢您的阅读-------------
坚持原创技术分享,您的支持将鼓励我继续创作!

欢迎关注我的其它发布渠道