2016-03-08 92 views
0

最近,我的项目希望通过AAC实现音频通信,所以我使用AudioQueue,但存在一个问题,即我的播放器的回调函数不起作用,在我的项目中,回调函数已经工作了3次。即for函数工作3次。并且当活动AudioQueueStart(mQueue, NULL)时,回调函数永远不会被调用。使用AudioQueue播放AAC,但回调函数不起作用

我使用两个iPhone来运行。他们通过udp socket连接,并且我确信这部分是好的。我可以得到正确的音频数据。

我修改了一个演示文件数据而不是内存播放。所以我不知道这是一个问题吗?

这里是我的代码:AQPlayer.h

#include <AudioToolbox/AudioToolbox.h> 

#include "CAStreamBasicDescription.h" 
#include "CAXException.h" 

#define kNumberBuffers 3 
#define kBufferDurationSeconds 0.5 

class AQPlayer 
{ 
public: 
    AQPlayer(); 
    ~AQPlayer(); 

    OSStatus      StartQueue(BOOL inResume); 
    OSStatus      StopQueue(); 
    OSStatus      PauseQueue(); 

    AudioQueueRef     Queue()     { return mQueue; } 
    CAStreamBasicDescription  DataFormat() const  { return mDataFormat; } 
    Boolean       IsRunning() const  { return (mIsRunning) ? true : false; } 
    Boolean       IsInitialized() const { return mIsInitialized; } 
    CFStringRef      GetFilePath() const  { return (mFilePath) ? mFilePath : CFSTR(""); } 
    Boolean       IsLooping() const  { return mIsLooping; } 

    void SetLooping(Boolean inIsLooping) { mIsLooping = inIsLooping; } 
    void CreateQueueForFile(CFStringRef inFilePath); 
    void DisposeQueue(Boolean inDisposeFile); 
    void prepareAudioQueue(); 
    void start(); 
    void stop(); 

private: 
    UInt32       GetNumPacketsToRead()    { return mNumPacketsToRead; } 
    SInt64       GetCurrentPacket()     { return mCurrentPacket; } 
    AudioFileID      GetAudioFileID()     { return mAudioFile; } 
    void       SetCurrentPacket(SInt64 inPacket) { mCurrentPacket = inPacket; } 

    void       SetupNewQueue(); 

    AudioQueueRef     mQueue; 
    AudioQueueBufferRef    mBuffers[kNumberBuffers]; 
    AudioFileID      mAudioFile; 
    CFStringRef      mFilePath; 
    CAStreamBasicDescription  mDataFormat; 

    Boolean       mIsInitialized; 
    UInt32       mNumPacketsToRead; 
    SInt64       mCurrentPacket; 
    UInt32       mIsRunning; 
    Boolean       mIsDone; 
    Boolean       mIsLooping; 

    static void isRunningProc(  void *    inUserData, 
           AudioQueueRef   inAQ, 
           AudioQueuePropertyID inID); 

    static void AQBufferCallback( void *     inUserData, 
           AudioQueueRef   inAQ, 
           AudioQueueBufferRef  inCompleteAQBuffer); 

    void CalculateBytesForTime( CAStreamBasicDescription & inDesc, 
           UInt32 inMaxPacketSize, 
           Float64 inSeconds, 
           UInt32 *outBufferSize, 
           UInt32 *outNumPackets); 

}; 

,这里是AQPlayer.mm

#include "package.h" 
#include "udpsocket.h" 
#define MAXPACKETSIZE 1000 
#define BUFFER_SIZE 4000 

#include "AQPlayer.h" 


extern udpsocket *udp; 

void AQPlayer::AQBufferCallback(void *     inUserData, 
           AudioQueueRef   inAQ, 
           AudioQueueBufferRef  inCompleteAQBuffer) 
{ 

    AQPlayer *THIS = (AQPlayer *)inUserData; 

    dispatch_semaphore_wait(udp->sempahore,DISPATCH_TIME_FOREVER); 
    NSLog(@"read begin"); 

    while ([udp->AudioQueue count]>0 && 
     ![[udp->AudioQueue objectAtIndex:0] isKindOfClass:[AUDIO_CACHE_OBJECT class]]) 
    { 
     [udp->AudioQueue removeObjectAtIndex:0]; 
    } 
    if([udp->AudioQueue count]<1){ 
     [NSThread sleepForTimeInterval:0.05]; 
     AQBufferCallback(inUserData, inAQ, inCompleteAQBuffer); 
     return; 
    } 
    int packets = 0; 
    int dataLen = 0; 
    AUDIO_CACHE_OBJECT *pack; 

    char *data = (char*)malloc(sizeof(char)*BUFFER_SIZE);; 
    memset(data, 0, BUFFER_SIZE); 
    pack = [udp->AudioQueue firstObject]; 

    while (dataLen+pack.datalen<BUFFER_SIZE && [udp->AudioQueue count]>0 /*&& packets<21*/) { 
     memcpy(data+dataLen, [pack GetData], [pack datalen]); 
     dataLen+=[pack datalen]; 
     [udp->AudioQueue removeObjectAtIndex:0]; 
     //   [pack memset]; 
     packets ++; 

     while ([udp->AudioQueue count]>1 && 
       ![[udp->AudioQueue objectAtIndex:0] isKindOfClass:[AUDIO_CACHE_OBJECT class]]) 
     { 
      [udp->AudioQueue removeObjectAtIndex:0]; 
     } 
     if([udp->AudioQueue count]<1){ 
      break; 
     } 
     pack = [udp->AudioQueue firstObject]; 

    } 

    memcpy(inCompleteAQBuffer->mAudioData, data, dataLen); 

    inCompleteAQBuffer->mAudioDataByteSize = dataLen; 
    inCompleteAQBuffer->mPacketDescriptionCount = packets; 
    AudioQueueEnqueueBuffer(inAQ, inCompleteAQBuffer, 0,NULL); 
    THIS->mCurrentPacket += packets; 
    free(data); 
    NSLog(@"read end --- %lld",THIS->mCurrentPacket); 

} 

void AQPlayer::isRunningProc ( void *    inUserData, 
           AudioQueueRef   inAQ, 
           AudioQueuePropertyID inID) 
{ 
    AQPlayer *THIS = (AQPlayer *)inUserData; 
    UInt32 size = sizeof(THIS->mIsRunning); 
    OSStatus result = AudioQueueGetProperty (inAQ, kAudioQueueProperty_IsRunning, &THIS->mIsRunning, &size); 

    if ((result == noErr) && (!THIS->mIsRunning)) 
     [[NSNotificationCenter defaultCenter] postNotificationName: @"playbackQueueStopped" object: nil]; 
} 

void AQPlayer::CalculateBytesForTime (CAStreamBasicDescription & inDesc, UInt32 inMaxPacketSize, Float64 inSeconds, UInt32 *outBufferSize, UInt32 *outNumPackets) 
{ 
    // we only use time here as a guideline 
    // we're really trying to get somewhere between 16K and 64K buffers, but not allocate too much if we don't need it 
    static const int maxBufferSize = 0x10000; // limit size to 64K 
    static const int minBufferSize = 0x4000; // limit size to 16K 

    if (inDesc.mFramesPerPacket) { 
     Float64 numPacketsForTime = inDesc.mSampleRate/inDesc.mFramesPerPacket * inSeconds; 
     *outBufferSize = numPacketsForTime * inMaxPacketSize; 
    } else { 
     // if frames per packet is zero, then the codec has no predictable packet == time 
     // so we can't tailor this (we don't know how many Packets represent a time period 
     // we'll just return a default buffer size 
     *outBufferSize = maxBufferSize > inMaxPacketSize ? maxBufferSize : inMaxPacketSize; 
    } 

    // we're going to limit our size to our default 
    if (*outBufferSize > maxBufferSize && *outBufferSize > inMaxPacketSize) 
     *outBufferSize = maxBufferSize; 
    else { 
     // also make sure we're not too small - we don't want to go the disk for too small chunks 
     if (*outBufferSize < minBufferSize) 
      *outBufferSize = minBufferSize; 
    } 
    *outNumPackets = *outBufferSize/inMaxPacketSize; 
} 

AQPlayer::AQPlayer() : 
mQueue(0), 
mAudioFile(0), 
mFilePath(NULL), 
mIsRunning(false), 
mIsInitialized(false), 
mNumPacketsToRead(0), 
mCurrentPacket(0), 
mIsDone(false), 
mIsLooping(false) { } 

AQPlayer::~AQPlayer() 
{ 
    DisposeQueue(true); 
} 





OSStatus AQPlayer::StartQueue(BOOL inResume) 
{ 
    if (mQueue == NULL) 
     CreateQueueForFile(mFilePath); 

    mIsDone = false; 
    if (!inResume) 
     mCurrentPacket = 0; 

    for (int i = 0; i < kNumberBuffers; ++i) { 
     AQBufferCallback (this, mQueue, mBuffers[i]); 
    } 
    NSLog(@"audioqueuestart"); 
    UInt32 i =0; 
    AudioQueuePrime(mQueue, 0, &i); 
    NSLog(@"%d",(unsigned int)i); 
    return AudioQueueStart(mQueue, NULL); 
} 

OSStatus AQPlayer::StopQueue() 
{ 
    OSStatus result = AudioQueueStop(mQueue, true); 
    if (result) printf("ERROR STOPPING QUEUE!\n"); 

    return result; 
} 

OSStatus AQPlayer::PauseQueue() 
{ 
    OSStatus result = AudioQueuePause(mQueue); 

    return result; 
} 

void AQPlayer::CreateQueueForFile(CFStringRef inFilePath) 
{ 

    try { 

      UInt32 size = sizeof(mDataFormat); 

      mDataFormat.mSampleRate   = 44100; 
      mDataFormat.mFormatID   = kAudioFormatMPEG4AAC; 
      mDataFormat.mFormatFlags  = 0; 
      mDataFormat.mFramesPerPacket = 1024; 
      mDataFormat.mChannelsPerFrame = 2; 
      mDataFormat.mBitsPerChannel  = 0;//表示这是一个压缩格式 
      mDataFormat.mBytesPerPacket  = 0;//表示这是一个变比特率压缩 
      mDataFormat.mBytesPerFrame  = 0; 
      mDataFormat.mReserved   = 0; 
      //aqc.bufferByteSize     = 2000; 

     SetupNewQueue(); 
    } 
    catch (NSException *e) { 

     fprintf(stderr, "Error: %@ (%@)\n", [e debugDescription], [e description]); 
    } 
} 

void AQPlayer::SetupNewQueue() 
{ 
    AudioQueueNewOutput(&mDataFormat, AQPlayer::AQBufferCallback, this,//NULL,NULL,0,&mQueue); 
             CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &mQueue); 
    UInt32 bufferByteSize; 
    UInt32 maxPacketSize = MAXPACKETSIZE; 
    UInt32 size = sizeof(maxPacketSize); 
    CalculateBytesForTime (mDataFormat, maxPacketSize, kBufferDurationSeconds, &bufferByteSize, &mNumPacketsToRead); 

    size = sizeof(UInt32); 


    AudioQueueAddPropertyListener(mQueue, kAudioQueueProperty_IsRunning, isRunningProc, this); 

    bool isFormatVBR = (mDataFormat.mBytesPerPacket == 0 || mDataFormat.mFramesPerPacket == 0); 
    AudioQueueSetParameter(mQueue, kAudioQueueParam_Volume, 1.0); 


    for (int i = 0; i < kNumberBuffers; ++i) { 
     AudioQueueAllocateBuffer(mQueue, bufferByteSize, &mBuffers[i]); 
    } 

    // set the volume of the queue 
    mIsInitialized = true; 
} 
void AQPlayer::DisposeQueue(Boolean inDisposeFile) 
{ 
    if (mQueue) 
    { 
     AudioQueueDispose(mQueue, true); 
     mQueue = NULL; 
    } 
    if (inDisposeFile) 
    { 
     if (mAudioFile) 
     { 
      AudioFileClose(mAudioFile); 
      mAudioFile = 0; 
     } 
     if (mFilePath) 
     { 
      CFRelease(mFilePath); 
      mFilePath = NULL; 
     } 
    } 
    mIsInitialized = false; 
} 

感谢您的时间。

回答

0

您的输入是VBR,因此您的缓冲区需要随附的数据包描述数组。当您最初创建缓冲区时,请使用AudioQueueAllocateBufferWithPacketDescriptions:而不是AudioQueueAllocateBuffer:。然后在您的回调过程中为while循环中的每个数据包设置mStartOffset,mDataByteSizemVariableFramesInPacket(始终为零)。

+0

非常感谢。这非常有帮助。 – Jun