之前做这块内容时,查阅了很多博客,实现过程中做过很多坑,希望记录下来帮助更多的开发者避免踩坑,白白浪费大好时光。
需求
1. 收到推送消息播放音频,支持合成;
2. 离线:用户未打开APP或者杀死程序,可接收推送消息;
3. 息屏:用户启动APP在锁屏状态下,可接收推送消息;
4. 是语音播报消息时,不展示推送弹窗;
前期准备
1.配置证书
创建项目的推送证书、APP Groups(语音文件数据共享);
2.友盟集成
集成友盟推送(或者极光推送)本文使用的是友盟推送;
3.项目中配置
a.主程序配置如图
b扩展服务配置如图
4.重点配置友盟推送消息
a.payload 消息JSON结构
{
"aps" : {
"alert" : {
"title" : "Game Request", // 标题
"body" : "Bob wants to play poker", //内容
},
"badge" : 5, //角标
"sound" : "binggo.caf", //声音
"content-available" : 1,
"mutable-content" : 1, //一定要配置,否则扩展收不到消息
},
}
b. 测试友盟推送时一定要带配置"mutable-content" : 1 ,并且位置一定要写对;
友盟语音播报推送消息一定要配置在内容链接那个位置,否则UNNotificationServiceExtension收不到推送消息;
语音播报功能实现
1. 注册通知及获取设备号
AppDelegate.m 文件
///设备号获取
-(void)application:(UIApplication *)application didRegisterForRemoteNotificationsWithDeviceToken:(NSData *)deviceToken
{
// 友盟注册推送
[UMessage registerDeviceToken:deviceToken];
[UMessage setBadgeClear:NO];
//Xcode11打的包,iOS13获取Token有变化
if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 13) {
if (![deviceToken isKindOfClass:[NSData class]]) {
//记录获取token失败的描述
return;
}
const unsigned *tokenBytes = (const unsigned *)[deviceToken bytes];
NSString *strToken = [NSString stringWithFormat:@"%08x%08x%08x%08x%08x%08x%08x%08x",
ntohl(tokenBytes[0]), ntohl(tokenBytes[1]), ntohl(tokenBytes[2]),
ntohl(tokenBytes[3]), ntohl(tokenBytes[4]), ntohl(tokenBytes[5]),
ntohl(tokenBytes[6]), ntohl(tokenBytes[7])];
ZFLog(@"deviceToken= %@", strToken);
[ZFDevice Instance].device = strToken;
if (strToken.length==64) {
[ZFUserDefaults setObject:strToken forKey:kPHONE_DEVICE_TOKEN];
[ZFUserDefaults synchronize];
}
} else {
NSString *token = [NSString
stringWithFormat:@"%@",deviceToken];
token = [token stringByReplacingOccurrencesOfString:@"<" withString:@""];
token = [token stringByReplacingOccurrencesOfString:@">" withString:@""];
token = [token stringByReplacingOccurrencesOfString:@" " withString:@""];
ZFLog(@"deviceToken= %@", token);
[ZFDevice Instance].device = token;
if (token.length==64) {
[ZFUserDefaults setObject:token forKey:kPHONE_DEVICE_TOKEN];
[ZFUserDefaults synchronize];
}
}
}
#pragma mark - UNUserNotificationCenterDelegate
//iOS10新增:处理前台收到通知的代理方法
- (void)userNotificationCenter:(UNUserNotificationCenter *)center willPresentNotification:(UNNotification *)notification withCompletionHandler:(void (^)(UNNotificationPresentationOptions))completionHandler API_AVAILABLE(ios(10.0)){
NSDictionary * userInfo = notification.request.content.userInfo;
if([notification.request.trigger isKindOfClass:[UNPushNotificationTrigger class]]) {
//应用处于前台时的远程推送接受
//关闭友盟自带的弹出框
[UMessage setAutoAlert:NO];
[UMessage setBadgeClear:NO];
//必须加这句代码
[UMessage didReceiveRemoteNotification:userInfo];
}
NSString *isVoice = userInfo[@"isVoice"];
if([isVoice isEqualToString:@"0"]) {
//当应用处于前台时提示设置,需要哪个可以设置哪一个
completionHandler(UNNotificationPresentationOptionSound |
UNNotificationPresentationOptionBadge |
UNNotificationPresentationOptionAlert);
} else {
//语音播报,关闭消息推送通知弹窗
completionHandler(0);
}
}
2. 新建Notification Service Extension 通知服务扩展,处理语音播报
a.分发扩展消息
- (void)didReceiveNotificationRequest:(UNNotificationRequest *)request withContentHandler:(void (^)(UNNotificationContent * _Nonnull))contentHandler {
self.contentHandler = contentHandler;
self.bestAttemptContent = [request.content mutableCopy];
//Step1: 推送json解析,
NSString *isVoice = self.bestAttemptContent.userInfo[@"isVoice"];
NSLog(@"收到语音播报消息:isVoice=%@", isVoice);
if ([isVoice isEqualToString:@"0"]) {
self.contentHandler(self.bestAttemptContent);
} else {
//根据userInfo得到消息内容,从中获取需要播报的内容,本地取到音乐文件数组
NSArray *mp3Arr = [self getMp3Files:self.bestAttemptContent.userInfo];
if (@available(iOS 15, *)) {
//15.0以后音乐合成播报的方法
} else {
//12.0~15.0音乐合成播报的方法
}
}
}
b. ios15.0合成音频语音播报的代码
#define kFileManager [NSFileManager defaultManager]
#define kAppGroupID @"group.com.xxx.app"
/**
* iOS15.0 以后版本 针对不播报语音问题新增
* 音频合并
* @param index 从第几个资源开始合成,默认0
* @param tmparray 资源文件数组
* @params completionBlock 执行完成的回调
*/
-(void)v15_pushLocalNotificationToApp:(NSInteger)index withArray:(NSArray *)tmparray bestAttemptContent:(nonnull UNMutableNotificationContent *)bestAttemptContent completed:(nonnull GHNotificationPushCompletionBlock)completionBlock
{
/************************合成音频并播放*****************************/
NSMutableArray <AVURLAsset *> *audioAssetArr = [NSMutableArray arrayWithCapacity:0];
// 音频轨道数组
NSMutableArray <AVMutableCompositionTrack *> *audioCompositionTrackArr = [NSMutableArray arrayWithCapacity:0];
// 音频素材轨道数组
NSMutableArray <AVAssetTrack *> *audioAssetTrackArr = [NSMutableArray arrayWithCapacity:0];
AVMutableComposition *audioCompostion = [AVMutableComposition composition];
for (NSString *soundStr in tmparray)
{
NSString *audioPath = [[NSBundle mainBundle] pathForResource:soundStr ofType:@"mp3"];
AVURLAsset *audioAsset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audioPath]];
[audioAssetArr addObject:audioAsset];
// 音频轨道
AVMutableCompositionTrack *audioCompositionTrack = [audioCompostion addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:0];
[audioCompositionTrackArr addObject:audioCompositionTrack];
// 音频素材轨道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
[audioAssetTrackArr addObject:audioAssetTrack];
}
float audioDuration = 0.0f;
//设置拼接的时间,第一个音频时为kCMTimeZero,第二个音频时为第一个音频的duration,第三个音频时为第一个音频.duration+第二个音频.duration,第四个音频时为第一个音频.duration+第二个音频.duration+第三个音频.duration,后面的依次类推
CMTime cmTime = kCMTimeZero;
for (int i = 0; i < audioAssetArr.count; i ++)
{
if (i == 0) {
cmTime = kCMTimeZero;
} else {
cmTime = CMTimeAdd(cmTime, audioAssetArr[i-1].duration);
}
// 音频合并 - 插入音轨文件
[audioCompositionTrackArr[i] insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAssetArr[i].duration) ofTrack:audioAssetTrackArr[i] atTime:cmTime error:nil];
// 将每个音频的时间加起来
audioDuration += CMTimeGetSeconds(audioAssetArr[i].duration);
}
NSFileManager *fileManager = kFileManager;
fileManager.delegate = self;
AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:audioCompostion presetName:AVAssetExportPresetAppleM4A];
NSString *outPutFilePath = [[self.v15_filePath stringByDeletingLastPathComponent] stringByAppendingPathComponent:@"paySound.m4a"];
if ([kFileManager fileExistsAtPath:outPutFilePath])
{
[kFileManager removeItemAtPath:outPutFilePath error:nil];
}
session.outputURL = [NSURL fileURLWithPath:outPutFilePath];
session.outputFileType = @"com.apple.m4a-audio";
session.shouldOptimizeForNetworkUse = YES;
[session exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
switch ([session status]) {
case AVAssetExportSessionStatusFailed: {
NSString *error = [[session error] description];
NSLog(@"合成失败:%@", error);
completionBlock(NO, audioDuration);
} break;
case AVAssetExportSessionStatusCancelled: {
completionBlock(NO, audioDuration);
} break;
case AVAssetExportSessionStatusCompleted: {
//获取分组的共享目录
NSURL *groupURL = [kFileManager containerURLForSecurityApplicationGroupIdentifier:kAppGroupID];//此处id要与开发者中心创建时一致
// 要先创建Sounds目录,不然复制文件的时候会报Sounds不是一个目录
NSURL *filePathURL = [groupURL URLByAppendingPathComponent:@"Library/Sounds"];
NSError *error = nil;
[fileManager createDirectoryAtURL:filePathURL withIntermediateDirectories:YES attributes:nil error:&error];
NSURL *fileURL = [groupURL URLByAppendingPathComponent:@"Library/Sounds/sound.m4a"];
// 每次都先删除原来的文件
if ([kFileManager fileExistsAtPath:fileURL.path]) {
NSError *err = nil;
[kFileManager removeItemAtURL:fileURL error:&err];
}
// 复制文件到appgroup中
NSError *err = nil;
BOOL copyRes = [kFileManager copyItemAtPath:outPutFilePath toPath:fileURL.path error:&err];
// 开始语音播报
if (copyRes) {
UNMutableNotificationContent *content = [[UNMutableNotificationContent alloc] init];
content.sound = [UNNotificationSound soundNamed:[NSString stringWithFormat:@"sound.m4a"]];
content.userInfo = @{@"locationPlaySound":@"1"};
if (@available(iOS 15, *)) {
// IOS15必须设置title或者body才能播放自定义的语音
content.body = [NSString localizedUserNotificationStringForKey:@"语音播报提醒" arguments:nil];
}
#if __IPHONE_OS_VERSION_MAX_ALLOWED >= 150000
if (@available(iOS 15.0, *)) {
bestAttemptContent.interruptionLevel = UNNotificationInterruptionLevelTimeSensitive;
}
#endif
UNNotificationSound * sound = [UNNotificationSound soundNamed:[NSString stringWithFormat:@"sound.m4a"]];
bestAttemptContent.sound = sound;
completionBlock(copyRes, audioDuration);
} else {
completionBlock(copyRes, audioDuration);
}
} break;
default: {
completionBlock(NO, audioDuration);
} break;
}
});
}];
/************************合成音频并播放*****************************/
}
- (NSString *)v15_filePath
{
if (!_v15_filePath)
{
_v15_filePath = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES).firstObject;
_v15_filePath = [_v15_filePath stringByAppendingPathComponent:@"user"];
if ([kFileManager createDirectoryAtPath:_v15_filePath withIntermediateDirectories:YES attributes:nil error:nil])
{
_v15_filePath = [_v15_filePath stringByAppendingPathComponent:@"testAudio.aac"];
}
}
return _v15_filePath;
}
c. ios12.1~15.0之前合成音频语音播报的代码
//循环调用本地通知,播放音频文件
/**
* iOS12.0~15.0 播报语音
* 音频合并
* @param index 从第几个资源开始合并,默认0
* @param tmparray 资源文件数组
* @params completionBlock 执行完成的回调
*/
-(void)pushLocalNotificationToApp:(NSInteger)index withArray:(NSArray *)tmparray completed:(GHNotificationPushCompleted)completed{
__block NSInteger tmpindex = index;
if(tmpindex < [tmparray count]){
//获取本地mpe3e文件时长
NSString *mp3Name = [NSString stringWithFormat:@"%@",tmparray[tmpindex]];
if (!mp3Name) {
mp3Name = @"money";
}
NSString *audioFileURL = [[NSBundle mainBundle] pathForResource:mp3Name ofType:@"mp3"];
AVURLAsset *audioAsset=[AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:audioFileURL] options:nil];
CMTime audioDuration=audioAsset.duration;
float audioDurationSeconds = CMTimeGetSeconds(audioDuration);
// NSLog(@"文件名:%@, 时间长度:%.5f", mp3Name, audioDurationSeconds);
UNUserNotificationCenter *center = [UNUserNotificationCenter currentNotificationCenter];
UNMutableNotificationContent *content = [[UNMutableNotificationContent alloc] init]; //标题
content.sound = [UNNotificationSound soundNamed:[NSString stringWithFormat:@"%@.mp3",mp3Name]];
// repeats,是否重复,如果重复的话时间必须大于60s,要不会报错
UNTimeIntervalNotificationTrigger *trigger = [UNTimeIntervalNotificationTrigger triggerWithTimeInterval:0.1 repeats:NO];
/* */
//添加通知的标识符,可以用于移除,更新等搡作
NSString * identifier = [NSString stringWithFormat:@"%@%f",@"noticeId",audioDurationSeconds];
UNNotificationRequest *request = [UNNotificationRequest requestWithIdentifier:identifier content:content trigger:trigger];
[center addNotificationRequest:request withCompletionHandler:^(NSError *_Nullable error) {
//第一条推送成功后,递归执行
float time = audioDurationSeconds + 0.15;
tmpindex = tmpindex+1;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, time * NSEC_PER_SEC);
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
[self pushLocalNotificationToApp:tmpindex withArray:tmparray completed:completed];
});
}];
}else{
completed();
}
}
真机测试,开启推送通知,关闭静音模式!!!
至此,开发完成!
总结
1. iOS12.1之前扩展服务Notification Service Extension中使用AVAudioPlayer失效;
2. 项目target-Capabilities-Background Modes中要记得勾选Background fetch
和Remote notifications
;
3. 推送消息内一定要包含"mutable -content"=1;
4. 测试推送消息,记得打开通知、关闭静音模式、加大音量;
5. 关于服务端配置production_mode:当设置为false时,表示应用程序处于非生产模式下。可以理解为:真机调试时production_mode=false,打包提测或者发布production_mode=true;