您的位置:首页 > 移动开发 > IOS开发

iOS利用GCDAsyncSocket发送语音,以及语音wav转码amr

2016-06-16 17:06 453 查看
1.新建工程添加pod文件

target 'MyVedio' do
pod 'CocoaAsyncSocket'
end


2.引入VoiceConvert音频转码文件(下载 )晕上传的时候没有通过。我也是醉了。这里就只有大家自己去下载了

3.引入要用到的头文件

#import "MyVedioController.h"
#import <GCDAsyncSocket.h>
#import <ifaddrs.h>
#import <arpa/inet.h>
#import "VoiceConvert/VoiceConverter.h"
@interface MyVedioController ()<GCDAsyncSocketDelegate>

@property (nonatomic,strong) AVAudioRecorder *recoder;
@property (nonatomic,strong) NSString *filePath;//wav录音路径
@property (nonatomic,strong) NSString *fileSendPath;//发送录音路径
@property (nonatomic,strong) AVAudioPlayer *audioPlayer;
@property (nonatomic,strong) GCDAsyncSocket *listenSocket;//主机
@property (nonatomic,strong) GCDAsyncSocket *clientSocket;//客户机
@property (nonnull,strong) NSMutableData *amrData;//录音总数据
@end
NSMutableArray *array;//录音列表
NSInteger countLenth=0;//记录socket传过来的录音总大小
4.开始录音

- (void) BeginAudio
{
[self mySetting];
NSString *name = @"vedio.wav";
NSString *file = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject] stringByAppendingPathComponent:name];
NSURL *url = [NSURL fileURLWithPath:file];
self.filePath = file;
NSLog(@"=====%@",file);
//    NSMutableDictionary *settings = [NSMutableDictionary dictionary];//录音时所必需的参数设置m4a格式
//    [settings setValue:[NSNumber numberWithInteger:kAudioFormatAppleLossless] forKey:AVFormatIDKey];
//    [settings setValue:[NSNumber numberWithFloat:44100.0f] forKey:AVSampleRateKey];
//    [settings setValue:[NSNumber numberWithInteger:1] forKey:AVNumberOfChannelsKey];
//    [settings setValue:[NSNumber numberWithInteger:AVAudioQualityLow] forKey:AVEncoderAudioQualityKey];
//创建录音配置字典wav格式
//    NSMutableDictionary *dic = [NSMutableDictionary dictionary];
//    dic[AVFormatIDKey] = @(kAudioFormatLinearPCM);
//    dic[AVSampleRateKey] = @(8000.0);
//    dic[AVNumberOfChannelsKey] = @(1);
//    dic[AVLinearPCMBitDepthKey] = @(16);
self.recoder = [[AVAudioRecorder alloc]initWithURL:url settings:[VoiceConverter GetAudioRecorderSettingDict] error:nil];
self.recoder.meteringEnabled = YES;
[self.recoder prepareToRecord];//准备录音
[self.recoder record];//开始录音
}
- (void) stop
{
[array addObject:self.filePath];
if(self.recoder != nil)
{
[self.recoder stop];
self.fileSendPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject] stringByAppendingPathComponent:@"vedio.amr"];
int res =[VoiceConverter ConvertWavToAmr:self.filePath amrSavePath:self.fileSendPath];//停止录音过后把wav转化成amr
if(res==1)
{
NSLog(@"to amr success");
}
else
{
NSLog(@"to amr fail");
}
}
}
<pre name="code" class="objc">- (void) mySetting
{
if ([[[UIDevice currentDevice] systemVersion] compare:@"7.0"] != NSOrderedAscending)
{
//7.0第一次运行会提示,是否允许使用麦克风
AVAudioSession *session = [AVAudioSession sharedInstance];
NSError *sessionError;
//AVAudioSessionCategoryPlayAndRecord用于录音和播放
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:&sessionError];
if(session == nil)
NSLog(@"Error creating session: %@", [sessionError description]);
else
[session setActive:YES error:nil];
}
}



5.socket 发送和接送录音文件

- (void) socketServer
{
self.listenSocket = [[GCDAsyncSocket alloc]initWithDelegate:self delegateQueue:dispatch_get_main_queue()];
NSError *err;
if(![self.listenSocket acceptOnPort:5000 error:&err])
{
NSLog(@"error:%@",err);
}
NSLog(@"socket:开放成功 ip:%@",[self getIPAddress]);
}
#pragma mark --delegate
- (void)socket:(GCDAsyncSocket *)sock didAcceptNewSocket:(GCDAsyncSocket *)newSocket
{
self.clientSocket = newSocket;
NSLog(@"链接成功");
NSLog(@"服务器地址:%@ 端口:%d",self.clientSocket.connectedHost,self.clientSocket.connectedPort);
[self.clientSocket readDataWithTimeout:-1 tag:0];
self.amrData = [NSMutableData data];
}
- (void) socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag
{
NSLog(@"数据接收成功 ====%ld",[data length]);//第一次socket传过来的是录音文件的总大小
if(countLenth == 0)
{
countLenth = [[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding] integerValue];
[self.clientSocket readDataWithTimeout:-1 tag:0];
}
else
{
NSInteger length = [data length];
if(length < countLenth)//如果接受到的录音文件的大小小于总大小,说明没有没有接受完。所以继续接受
{
[self.clientSocket readDataWithTimeout:-1 tag:0];//读取传输过来的录音文件
}
[self.amrData appendData:data];
NSLog(@"data=%ld,lenth=%ld",self.amrData.length,countLenth);
}
if(countLenth == self.amrData.length)//录音文件接受完成,现将arm文件写入内存,然后在转成wav格式播放
{
NSString *saveAMRPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject] stringByAppendingPathComponent:@"recieve.amr"];
NSString *saveWAVPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject] stringByAppendingPathComponent:@"recieve.wav"];
[data writeToURL:[NSURL fileURLWithPath:saveAMRPath] atomically:YES];
int res = [VoiceConverter ConvertAmrToWav:saveAMRPath wavSavePath:saveWAVPath];
if(res==1)
{
NSLog(@"%@",saveWAVPath);
[self MyBoFang:saveWAVPath];
}
else
{
NSLog(@"转换格式出错");
}
}
}
- (void)socket:(GCDAsyncSocket *)sock didWriteDataWithTag:(long)tag
{
NSLog(@"发送完毕");
}
- (NSString *)getIPAddress//获取服务机的ip地址
{
NSString *address = @"error";
struct ifaddrs *interfaces = NULL;
struct ifaddrs *temp_addr = NULL;
int success = 0;

// retrieve the current interfaces - returns 0 on success
success = getifaddrs(&interfaces);
if (success == 0) {
// Loop through linked list of interfaces
temp_addr = interfaces;
while (temp_addr != NULL) {
if( temp_addr->ifa_addr->sa_family == AF_INET) {
// Check if interface is en0 which is the wifi connection on the iPhone
if ([[NSString stringWithUTF8String:temp_addr->ifa_name] isEqualToString:@"en0"]) {
// Get NSString from C String
address = [NSString stringWithUTF8String:inet_ntoa(((struct sockaddr_in *)temp_addr->ifa_addr)->sin_addr)];
}
}

temp_addr = temp_addr->ifa_next;
}
}

// Free memory
freeifaddrs(interfaces);

return address;
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息