// get the data format of the file
size = sizeof(dataFormat);
AudioFileGetProperty(audioFile, kAudioFilePropertyDataFormat, &size, &dataFormat);
// create a new playback queue using the specified data format and buffer callback
AudioQueueNewOutput(&dataFormat, BufferCallback, self, nil, nil, 0, &queue);
// calculate number of packets to read and allocate space for packet descriptions if needed
if (dataFormat.mBytesPerPacket == 0 || dataFormat.mFramesPerPacket == 0)
{
// Ask Core Audio to give us a conservative estimate of the largest packet
size = sizeof(maxPacketSize);
AudioFileGetProperty(audioFile, kAudioFilePropertyPacketSizeUpperBound, &size, &maxPacketSize);
if (maxPacketSize > kxxxBufferSizeBytes)
{
maxPacketSize = kxxxBufferSizeBytes;
NSLog(@"Size out of bounds!");
}
// calculate how many packs to read
numPacketsToRead = kxxxBufferSizeBytes / maxPacketSize;
// will need a packet description for each packet to allocate space accordingly
packetDescs = malloc(sizeof(AudioStreamPacketDescription) * numPacketsToRead);
}
else
{
// constant bitrate
numPacketsToRead = kxxxBufferSizeBytes / dataFormat.mBytesPerPacket;
// don't need packet descriptions for CBR data
packetDescs = nil;
}
// see if file uses a magic cookie (a magic cookie is meta data which some formats use)
AudioFileGetPropertyInfo(audioFile, kAudioFilePropertyMagicCookieData, &size, nil);
if (size > 0)
{
// copy the cookie data from the file into the audio queue
cookie = malloc(sizeof(char) * size);
AudioFileGetProperty(audioFile, kAudioFilePropertyMagicCookieData, &size, cookie);
AudioQueueSetProperty(queue, kAudioQueueProperty_MagicCookie, cookie, size);
free(cookie);
}
// we want to know when the playing state changes so we can properly dispose of the audio queue when it's done
AudioQueueAddPropertyListener(queue, kAudioQueueProperty_IsRunning, propertyListenerCallback, self);
// allocate and prime buffers with some data
packetIndex = 0;
for (i = 0; i < NUM_QUEUE_BUFFERS; i++)
{
AudioQueueAllocateBuffer(queue, kxxxBufferSizeBytes, &buffers);
if ([self readPacketsIntoBuffer:buffers] == 0)
{
// this might happen if the file was so short that it needed less buffers than we planned on using
break;
}
}
repeat = NO;
trackClosed = NO;
trackEnded = NO;
kxxxTrackActive = YES;
return self;
}
- (void) setGain:(Float32) gain
{
if (trackClosed)
return;
AudioQueueSetParameter(queue, kAudioQueueParam_Volume, gain);
}
- (void) setRepeat:(BOOL) yn
{
repeat = yn;
}
- (void) play
{
if (trackClosed)
return;
OSStatus result = AudioQueuePrime(queue, 1, nil);
if (result)
{
NSLog(@"play: error priming AudioQueue");
return;
}
AudioQueueStart(queue, nil);
}
- (void) playURL{
[NSThread detachNewThreadSelector:@selector(startPlay) toTarget:self withObject:nil];
}
- (void) stopURL
{
if (stream)
{
CFReadStreamClose(stream);
CFRelease(stream);
stream = nil;
if (trackEnded)
{
return;
}
if (started)
{
//
// Set finished to true *before* we call stop. This is to handle our
// third thread...
// - This method is called from main (UI) thread
// - The AudioQueue thread (which owns the AudioQueue buffers nad
// will delete them as soon as we call AudioQueueStop)
// - URL connection thread is copying data from AudioStream to
// AudioQueue buffer
// We set this flag to tell the URL connection thread to stop
// copying.
//
pthread_mutex_lock(&mutex2);
trackEnded = true;
OSStatus err = AudioQueueStop(queue, true);
if (err) { NSLog(@"AudioQueueStop"); }
pthread_mutex_unlock(&mutex2);
pthread_mutex_lock(&mutex);
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
}
else
{
trackEnded = true;
self.isPlaying = YES;
self.isPlaying = NO;
}
}
}
- (void) setPlayingWhenAutoLock {
#ifdef TARGET_OS_IPHONE
// Set the audio session category so that we continue to play if the iPhone/iPod auto-locks.
AudioSessionInitialize (NULL, // 'NULL' to use the default (main) run loop
NULL, // 'NULL' to use the default run loop mode
MyAudioSessionInterruptionListener, // a reference to your interruption callback
self // data to pass to your interruption listener callback
);
UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback;
AudioSessionSetProperty (kAudioSessionProperty_AudioCategory, sizeof (sessionCategory), &sessionCategory);
AudioSessionSetActive(true);
#endif
}
- (void) startPlay{
[self retain];
//NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
//
// Attempt to guess the file type from the URL. Reading the MIME type
// from the CFReadStream would be a better approach since lots of
// URL's don't have the right extension.
//
// If you have a fixed file-type, you may want to hardcode this.
//
AudioFileTypeID fileTypeHint = kAudioFileMP3Type;
NSString *fileExtension = [[url path] pathExtension];
if ([fileExtension isEqual:@"mp3"])
{
fileTypeHint = kAudioFileMP3Type;
}
else if ([fileExtension isEqual:@"wav"])
{
fileTypeHint = kAudioFileWAVEType;
}
else if ([fileExtension isEqual:@"aifc"])
{
fileTypeHint = kAudioFileAIFCType;
}
else if ([fileExtension isEqual:@"aiff"])
{
fileTypeHint = kAudioFileAIFFType;
}
else if ([fileExtension isEqual:@"m4a"])
{
fileTypeHint = kAudioFileM4AType;
}
else if ([fileExtension isEqual:@"mp4"])
{
fileTypeHint = kAudioFileMPEG4Type;
}
else if ([fileExtension isEqual:@"caf"])
{
fileTypeHint = kAudioFileCAFType;
}
else if ([fileExtension isEqual:@"aac"])
{
fileTypeHint = kAudioFileAAC_ADTSType;
}
// initialize a mutex and condition so that we can block on buffers in use.
pthread_mutex_init(&mutex, NULL);
pthread_cond_init(&cond, NULL);
pthread_mutex_init(&mutex2, NULL);
// create an audio file stream parser
OSStatus err = AudioFileStreamOpen(self, MyPropertyListenerProc, MyPacketsProc, fileTypeHint, &audioFileStream);
if (err) { NSLog(@"AudioFileStreamOpen"); goto cleanup; }
//
// Create the GET request
//
CFHTTPMessageRef message= CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (CFURLRef)url, kCFHTTPVersion1_1);
stream = CFReadStreamCreateForHTTPRequest(NULL, message);
CFRelease(message);
if (!CFReadStreamOpen(stream))
{
CFRelease(stream);
goto cleanup;
}
//
// Set our callback function to receive the data
//
CFStreamClientContext context = {0, self, NULL, NULL, NULL};
CFReadStreamSetClient(stream, kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered, ReadStreamCallBack, &context);
CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
//
// Process the run loop until playback is finished or failed.
//
do
{
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.25, false);
if (failed)
{
[self stopURL];
- (void)playBackIsRunningStateChanged
{
if (trackEnded)
{
// go ahead and close the track now
trackClosed = YES;
AudioQueueDispose(queue, YES);
AudioFileClose(audioFile);
kxxxTrackActive = NO;
// we're not in the main thread during this callback, so enqueue a message on the main thread to post notification
// that we're done, or else the notification will have to be handled in this thread, making things more difficult
[self performSelectorOnMainThread:@selector(postTrackFinishedPlayingNotification:) withObject:nil waitUntilDone:NO];
}
}
static void BufferCallback(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef buffer)
{
// redirect back to the class to handle it there instead, so we have direct access to the instance variables
[(xxxxx*)inUserData callbackForBuffer:buffer];
}
- (void) callbackForBuffer:(AudioQueueBufferRef) buffer
{
// I guess it's possible for the callback to continue to be called since this is in another thread, so to be safe,
// don't do anything else if the track is closed, and also don't bother reading anymore packets if the track ended
if (trackClosed || trackEnded)
return;
if ([self readPacketsIntoBuffer:buffer] == 0)
{
if (repeat)
{
// End Of File reached, so rewind and refill the buffer using the beginning of the file instead
packetIndex = 0;
[self readPacketsIntoBuffer:buffer];
}
else
{
// set it to stop, but let it play to the end, where the property listener will pick up that it actually finished
AudioQueueStop(queue, NO);
trackEnded = YES;
}
}
}
- (void) postTrackFinishedPlayingNotification:(id) object
{
// if we're here then we're in the main thread as specified by the callback, so now we can post notification that
// the track is done without the notification observer(s) having to worry about thread safety and autorelease pools
[[NSNotificationCenter defaultCenter] postNotificationName:xxxTrackFinishedPlayingNotification object:self];
}
#ifdef TARGET_OS_IPHONE
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:NSLocalizedStringFromTable(@"Audio Error", @"Errors", nil) message:NSLocalizedStringFromTable(@"Attempt to play streaming audio failed.", @"Errors", nil) delegate:self cancelButtonTitle:@"OK" otherButtonTitles: nil];
[alert performSelector:@selector(show) onThread:[NSThread mainThread] withObject:nil waitUntilDone:YES];
[alert release];
#else
NSAlert *alert = [NSAlert alertWithMessageText:NSLocalizedString(@"Audio Error", @"") defaultButton:NSLocalizedString(@"OK", @"") alternateButton:nil otherButton:nil informativeTextWithFormat:@"Attempt to play streaming audio failed."];
[alert performSelector:@selector(runModal) onThread:[NSThread mainThread] withObject:nil waitUntilDone:NO];
#endif
break;
}
} while (isPlaying || !trackEnded);
cleanup:
//
// Cleanup the read stream if it is still open
//
if (stream)
{
CFReadStreamClose(stream);
CFRelease(stream);
stream = nil;
}
//
// Close the audio file strea,
//
err = AudioFileStreamClose(audioFileStream);
if (err) { NSLog(@"AudioFileStreamClose"); goto cleanup; }
//
// Dispose of the Audio Queue
//
if (started)
{
err = AudioQueueDispose(queue, true);
if (err) { NSLog(@"AudioQueueDispose"); goto cleanup; }
}
//[pool release];
[self release];
}
- (void)pause
{
if (trackClosed)
return;
AudioQueuePause(queue);
}
#pragma mark -
#pragma mark Callback
static void propertyListenerCallback(void *inUserData, AudioQueueRef queueObject, AudioQueuePropertyID propertyID)
{
// redirect back to the class to handle it there instead, so we have direct access to the instance variables
if (propertyID == kAudioQueueProperty_IsRunning)
[(xxxxx*)inUserData playBackIsRunningStateChanged];
}
- (UInt32)readPacketsIntoBuffer:(AudioQueueBufferRef)buffer
{
UInt32 numBytes, numPackets;
// read packets into buffer from file
numPackets = numPacketsToRead;
AudioFileReadPackets(audioFile, NO, &numBytes, packetDescs, packetIndex, &numPackets, buffer->mAudioData);
if (numPackets > 0)
{
// - End Of File has not been reached yet since we read some packets, so enqueue the buffer we just read into
// the audio queue, to be played next
// - (packetDescs ? numPackets : 0) means that if there are packet descriptions (which are used only for Variable
// BitRate data (VBR)) we'll have to send one for each packet, otherwise zero
buffer->mAudioDataByteSize = numBytes;
AudioQueueEnqueueBuffer(queue, buffer, (packetDescs ? numPackets : 0), packetDescs);
// move ahead to be ready for next time we need to read from the file
packetIndex += numPackets;
}
return numPackets;
}
@end
代码中对指针形式和数组形式2种各写了不同的代码,大家可以根据个人爱好使用,而且还将url和文件path2种播放方式写在了一起。这个类是xxxxx.h