2014-09-25 72 views
3

我在服务器上记录数据并立即将它们发送到客户端。 客户端收到UDP数据包这样的:IO直接播放来自UDP流(NSData)的原始音频

(void)udpSocket:(GCDAsyncUdpSocket *)sock didReceiveData:**(NSData *)data** fromAddress:(NSData *)address withFilterContext:(id)filterContext 
{  
if (!isRunning) return; 
if (data) 
{   
} 
else 
{  
} 
} 

现在的原始数据是数据变量。我想立即播放它。我真的坐在这个问题上,就像2天...我只想简单的事情像Java中的音频轨道。我读了很多关于音频队列等,但仍然不理解它。你能给我一个提示,但请以代码的形式。在我看来,我检查了每个网站 - .-寻找每个例子,但不明白他们。回调函数在一些缓冲区被填充后开始(在很多例子中),但我不明白我可以如何使用NSData填充它们。

回答

0

我有兴趣听到这个问题的答案。我的解决方案是在iOS中使用OpenAL制作我自己的音频服务器,OpenAL开箱即可呈现音频缓冲区 - 基本上只有一个线程处理消耗从服务器发送的音频流 - 另一个线程用于运行您自己的OpenAL服务器我在这里概述:

#import <OpenAL/al.h> 
#import <OpenAL/alc.h> 
#import <AudioToolbox/ExtendedAudioFile.h> 


-(void) init_openal { 

openal_device = alcOpenDevice(NULL); 

if (openal_device != NULL) { 

    // create context 

    openal_context = alcCreateContext(openal_device, 0); 

    if (openal_context != NULL) { 

     // activate this new context 

     alcMakeContextCurrent(openal_context); 

    } else { 

     NSLog(@"STR_OPENAL ERROR - failed to create context"); 
     return; 
    } 

} else { 

    NSLog(@"STR_OPENAL ERROR - failed to get audio device"); 
    return; 
} 

alGenBuffers(MAX_OPENAL_QUEUE_BUFFERS, available_AL_buffer_array); // allocate the buffer array to given number of buffers 

alGenSources(1, & streaming_source); 

printf("STR_OPENAL streaming_source starts with %u\n", streaming_source); 

printf("STR_OPENAL initialization of available_AL_buffer_array_curr_index to 0\n"); 

available_AL_buffer_array_curr_index = 0; 

self.previous_local_lpcm_buffer = 0; 

[self get_next_buffer]; 

} // init_openal 



// calling init and retrieving buffers logic left out goes here 

-(void) inner_run { 

ALenum al_error; 

// UN queue used buffers 

ALint buffers_processed = 0; 

alGetSourcei(streaming_source, AL_BUFFERS_PROCESSED, & buffers_processed); // get source parameter num used buffs 

while (buffers_processed > 0) {  // we have a consumed buffer so we need to replenish 

    NSLog(@"STR_OPENAL inner_run seeing consumed buffer"); 

    ALuint unqueued_buffer; 

    alSourceUnqueueBuffers(streaming_source, 1, & unqueued_buffer); 

    // about to decrement available_AL_buffer_array_curr_index 

    available_AL_buffer_array_curr_index--; 

    printf("STR_OPENAL to NEW %d with unqueued_buffer %d\n", 
      available_AL_buffer_array_curr_index, 
      unqueued_buffer); 

    available_AL_buffer_array[available_AL_buffer_array_curr_index] = unqueued_buffer; 

    buffers_processed--; 
} 

// queue UP fresh buffers 

if (available_AL_buffer_array_curr_index >= MAX_OPENAL_QUEUE_BUFFERS) { 

    printf("STR_OPENAL about to sleep since internal OpenAL queue is full\n"); 

    [NSThread sleepUntilDate:[NSDate dateWithTimeIntervalSinceNow: SLEEP_ON_OPENAL_QUEUE_FULL]]; 

} else { 

    NSLog(@"STR_OPENAL YYYYYYYYY available_AL_buffer_array_curr_index %d MAX_OPENAL_QUEUE_BUFFERS %d", 
      available_AL_buffer_array_curr_index, 
      MAX_OPENAL_QUEUE_BUFFERS 
     ); 

    ALuint curr_audio_buffer = available_AL_buffer_array[available_AL_buffer_array_curr_index]; 

    ALsizei size_buff; 
    ALenum data_format; 
    ALsizei sample_rate; 

    size_buff = MAX_SIZE_CIRCULAR_BUFFER; // works nicely with 1016064 

    sample_rate = lpcm_output_sampling_frequency; 
    data_format = AL_FORMAT_STEREO16;  // AL_FORMAT_STEREO16 == 4355 (0x1103) --- AL_FORMAT_MONO16 

    printf("STR_OPENAL curr_audio_buffer is %u data_format %u size_buff %u\n", 
      curr_audio_buffer, 
      data_format, 
      size_buff 
      ); 


    // write_output_file([TS_ONLY_delete_this_var_temp_aif_fullpath 
    // cStringUsingEncoding:NSUTF8StringEncoding], curr_lpcm_buffer, 
    // curr_lpcm_buffer_sizeof); 


    if (self.local_lpcm_buffer == self.previous_local_lpcm_buffer) { 

     printf("STR_OPENAL NOTICE - need to throttle up openal sleep duration seeing same value for local_lpcm_buffer %d - so will skip loading into alBufferData\n", 
       (int) self.local_lpcm_buffer); 

    } else { 


     NSLog(@"STR_OPENAL about to call alBufferData curr_audio_buffer %d local_lpcm_buffer address %d local_aac_index %d", 
       curr_audio_buffer, 
       (int) self.local_lpcm_buffer, 
       self.local_aac_index); 

     // copy audio data into curr_buffer 

     alBufferData(curr_audio_buffer, data_format, self.local_lpcm_buffer, size_buff, sample_rate); // curr_audio_buffer is an INT index determining which buffer to use 

     self.previous_local_lpcm_buffer = self.local_lpcm_buffer; 

     alSourceQueueBuffers(streaming_source, 1, & curr_audio_buffer); 

     printf("STR_OPENAL about to increment available_AL_buffer_array_curr_index from OLD %d", 
      available_AL_buffer_array_curr_index); 

     available_AL_buffer_array_curr_index++; 

     printf("STR_OPENAL available_AL_buffer_array_curr_index to NEW %d\n", available_AL_buffer_array_curr_index); 
    } 

    al_error = alGetError(); 
    if(AL_NO_ERROR != al_error) 
    { 
     NSLog(@"STR_OPENAL ERROR - alSourceQueueBuffers error: %s", alGetString(al_error)); 
     return; 
    } 

    ALenum current_playing_state; 

    alGetSourcei(streaming_source, AL_SOURCE_STATE, & current_playing_state); // get source parameter STATE 

    al_error = alGetError(); 
    if(AL_NO_ERROR != al_error) 
    { 
     NSLog(@"STR_OPENAL ERROR - alGetSourcei error: %s", alGetString(al_error)); 
     return; 
    } 

    if (AL_PLAYING != current_playing_state) { 

     ALint buffers_queued = 0; 

     alGetSourcei(streaming_source, AL_BUFFERS_QUEUED, & buffers_queued); // get source parameter num queued buffs 

     NSLog(@"STR_OPENAL NOTICE - play is NOT AL_PLAYING: %x, buffers_queued: %d", current_playing_state, buffers_queued); 

     if (buffers_queued > 0 && NO == self.streaming_paused) { 

      // restart play 

      NSLog(@"STR_OPENAL about to restart play"); 

      alSourcePlay(streaming_source); 

      al_error = alGetError(); 
      if (AL_NO_ERROR != al_error) { 

       NSLog(@"STR_OPENAL ERROR - alSourcePlay error: %s", alGetString(al_error)); 
      } 
     } 
    } 


    if (self.last_aac_index == self.local_aac_index && available_AL_buffer_array_curr_index == 0) { 

     NSLog(@"STR_OPENAL reached end of event tell parent"); 

     [self send_running_condition_message_to_parent: rendered_last_buffer]; 

     flag_continue_running = false; // terminate since all rendering work is done 

    } else { 

     [self get_next_buffer]; 
    } 
} 
}  //  inner_run 
+0

ty你的代码:D但openAL api更难理解。所以你有你的streaming_source并将它传递给unqueued_buffer?我尝试了解这些代码,但我必须在OpenAL中使用低(更好的0)体验。有没有“更容易”的建议? – Tony 2014-09-25 17:15:29

0

我认为这里没有现成的iOS解决方案。尝试深入研究CoreAudio框架。 或寻找一些现成的图书馆StreamingKit Library