Hank,
        I have included the key methods which decodes and renders the audio 
data.

i uses the following structures to hold the data
typedef struct
{
    UInt32 mDataByteSize;
    UInt32 mNumOfPackets;
    UInt32 mPacketOffset;
    UInt32 mNumOfChannels;
    UInt32 mBytesPerPacket;
    char *mData;
    AudioStreamPacketDescription *mPacketDesc;
}AudioConverterBuffer;

typedef struct
{
    UInt32 mDataByteSize;
    UInt32 mNumOfPackets;
    UInt32 mBufferIndex;
    UInt32 mNumOfChannels;
    UInt32 mPacketOffset;
    UInt32 mBytesPerPacket;
    char *mData;
    AudioStreamPacketDescription *mPacketDesc;
}AudioPlayBuffer;

-----------------------

- (void)handleAudioPackets:(const void *)inInputData
             numberOfBytes:(UInt32)inNumberBytes
           numberOfPackets:(UInt32)inNumberPackets
  streamPacketDescriptions:(AudioStreamPacketDescription *)inPacketDescription
{
    OSStatus error = noErr;
    AudioBufferList fillBufferList;
    AudioStreamPacketDescription *outputPacketDesc = NULL;
    UInt32 inOutDataPackets;
    UInt32 numOutputPackets;
    UInt64 dataSize;
    size_t bufSpaceRemaining;

    if (_converter == NULL) {
        error = AudioConverterNew(&mRawFormat, &mSourceFormat, &_converter);
        if (error) {
            [self printErrorMessage:@"AudioConverterNew failure" 
withStatus:error];
            return;
        }
        [self getAndSetMagicCookie];
        _converterOutputBuffer = (char *)malloc(sizeof(char) * kDefaultBufSize);
    }

    numOutputPackets = kDefaultBufSize / mSourceFormat.mBytesPerPacket;

    _converterBuffer.mDataByteSize = inNumberBytes;
    _converterBuffer.mNumOfPackets = inNumberPackets;
    _converterBuffer.mNumOfChannels = mSourceFormat.mChannelsPerFrame;
    _converterBuffer.mPacketOffset = 0;
    _converterBuffer.mBytesPerPacket = inNumberBytes / inNumberPackets;
    _converterBuffer.mData = (char *)inInputData;
    _converterBuffer.mPacketDesc = inPacketDescription;

    while (_converterBuffer.mPacketOffset < inNumberPackets) {
        fillBufferList.mNumberBuffers = 1;
        fillBufferList.mBuffers[0].mNumberChannels = 
mSourceFormat.mChannelsPerFrame;
        fillBufferList.mBuffers[0].mDataByteSize = kDefaultBufSize;
        fillBufferList.mBuffers[0].mData = _converterOutputBuffer;
        inOutDataPackets = numOutputPackets;
        error = AudioConverterFillComplexBuffer(_converter,
                                                encoderDataProc, 
                                                self, 
                                                &inOutDataPackets,
                                                &fillBufferList,
                                                outputPacketDesc);
        dataSize = fillBufferList.mBuffers[0].mDataByteSize;
        bufSpaceRemaining = kDefaultBufSize - _bytesFilled;
        if (bufSpaceRemaining < dataSize) {
            [self enqueueAudioPlayBuffer];
        }   
        _playBuffer[_fillBufferIndex].mDataByteSize = dataSize;
        memcpy(_playBuffer[_fillBufferIndex].mData, 
fillBufferList.mBuffers[0].mData,dataSize);
        _playBuffer[_fillBufferIndex].mPacketDesc = NULL;
        _bytesFilled += dataSize;
        _packetsFilled += inOutDataPackets;
    }   
    
}

//Audio Unit Render Callback method
- (OSStatus)handleRenderInputWithFlags:(AudioUnitRenderActionFlags 
*)ioActionFlags
                             timeStamp:(const AudioTimeStamp *)inTimeStamp
                             busNumber:(UInt32)inBusNumber
                        numberOfFrames:(UInt32)inNumberFrames
                            bufferList:(AudioBufferList *)ioData
{
    char *sourceBuffer = _playBuffer[_playBufferIndex].mData;
    UInt32 byteOffset = _playBuffer[_playBufferIndex].mPacketOffset * 
_playBuffer[_playBufferIndex].mBytesPerPacket;
    memcpy(ioData->mBuffers[0].mData, &sourceBuffer[byteOffset], 
ioData->mBuffers[0].mDataByteSize);
    _playBuffer[_playBufferIndex].mPacketOffset += inNumberFrames;
    
    return noErr;
}

//AudioUnit Render Notification Method
- (OSStatus)handleRenderNotification:(AudioUnitRenderActionFlags *)ioActionFlags
                           timeStamp:(const AudioTimeStamp *)inTimeStamp
                           busNumber:(UInt32)inBusNumber
                      numberOfFrames:(UInt32)inNumberFrames
                          bufferList:(AudioBufferList *)ioData
{
    if (*ioActionFlags & kAudioUnitRenderAction_PostRender) {
        UInt32 maxPackets = _playBuffer[_playBufferIndex].mDataByteSize / 4;
        if (_playBuffer[_playBufferIndex].mPacketOffset >= maxPackets) {
            pthread_mutex_lock(&_queueBufferMutex);
            _bufferUsed[_playBufferIndex] = NO;
            pthread_cond_signal(&_queueBufferReadyCond);
            pthread_mutex_unlock(&_queueBufferMutex);
            if (++_playBufferIndex >= kMaxPlayBuffer) {
                _playBufferIndex = 0;
            }
        }
    }
    return noErr;
}

- (OSStatus)handleEncoderDataProc:(AudioConverterRef)inAudioConverter
                  numberOfPackets:(UInt32 *)ioNumberDataPackets
                       bufferList:(AudioBufferList *)ioData
                packetDescription:(AudioStreamPacketDescription 
**)outputPacketDescription
{
    if (*ioNumberDataPackets > _converterBuffer.mDataByteSize) {
        *ioNumberDataPackets = _converterBuffer.mDataByteSize;
    }
    UInt32 offset = _converterBuffer.mPacketOffset;
    AudioStreamPacketDescription *pDesc = _converterBuffer.mPacketDesc;
    ioData->mNumberBuffers = 1;
    ioData->mBuffers[0].mDataByteSize = pDesc[offset].mDataByteSize;
    ioData->mBuffers[0].mNumberChannels = _converterBuffer.mNumOfChannels;
    ioData->mBuffers[0].mData = 
&_converterBuffer.mData[pDesc[offset].mStartOffset];
    if (outputPacketDescription) {
        if (_converterBuffer.mPacketDesc) {
            *outputPacketDescription = &pDesc[_converterBuffer.mPacketOffset];
            (*outputPacketDescription)->mStartOffset = 0;
        } else {
            *outputPacketDescription = NULL;
        }
    }
    _converterBuffer.mPacketOffset += *ioNumberDataPackets;
    return noErr;
}



Thanks
Sasikumar


On 27-Apr-2011, at 7:08 AM, Hank Heijink (Mailinglists) wrote:

> I believe Audio units need 44100 Hz, but I'm not sure about that. Regardless, 
> decoding mp3 with an AudioConverter should not introduce any glitches at that 
> rate - any iOS device should be able to handle that without issues.
> 
> The 4096 is the right value for any iOS device. Without seeing your code, I 
> don't know where your glitches are coming from. Are you by any chance reading 
> from disk on the same thread that you're playing audio on, or doing anything 
> else that could cause that thread to block?
> 
> Hank
> 
> On Apr 26, 2011, at 9:19 PM, Sasikumar JP wrote:
> 
>> Hank,
>>      I have tried with by setting "kAudioUnitProperty_MaximumFramesPerSlice" 
>> property as 4096 for Mixer and EQ Audio Units. Now audio playback did not 
>> stop when the device goes to sleep mode.
>> 
>> But  i hear glitches along with Audio.  
>> 
>> I have looked at apple sample code MixerHost. it plays nicely even the 
>> device is in sleep mode. But Apple sample code simply mixes 2 PCM audio 
>> files and play it. 
>> 
>> Where as my application converts the stream mp3 in to PCM using 
>> AudioConverterFillComplexBuffer API in realtime and plays it using 
>> AudioUnit. 
>> 
>> Does the Audio Conversion introduce any delay in audio packet generation, 
>> which could leads to the glitches in audio playback (when the device in the 
>> sleep mode)?
>> 
>> Should i set different sample value(4096) for different hardware (iPod Touch 
>> 2nd/3rd/4th Gen...etc)
>> 
>> How to derive the number of samples for different Sampling Rate? (As per the 
>> Apple document 4096 samples @ 44.1kHz)
>> 
>> Thanks
>> Sasikumar JP
>> 
>> On 26-Apr-2011, at 8:44 PM, Hank Heijink (Mailinglists) wrote:
>> 
>>> You're thinking of kAudioUnitProperty_MaximumFramesPerSlice 
>>> (http://developer.apple.com/library/ios/#qa/qa1606/_index.html). You have 
>>> to set that to 4096 frames per slice for every audio unit that's not the 
>>> remoteIO unit.
>>> 
>>> Hank
>>> 
>>> On Apr 26, 2011, at 11:05 AM, Gregory Wieber wrote:
>>> 
>>>> In the iOS docs on audio sessions there's a section on apps becoming 
>>>> background tasks. I don't have them in front of me now, but you have to 
>>>> set a property for buffer size when the app goes into the background for 
>>>> it to work.
>>>> 
>>>> Sent from my iPhone
>>>> 
>>>> On Apr 25, 2011, at 9:38 PM, Sasikumar JP <jps...@gmail.com> wrote:
>>>> 
>>>>> Hi,
>>>>> I am working on streaming audio player application for iOS. currently i 
>>>>> am experimenting with AudioQueue and AudioUnit for playback.
>>>>> 
>>>>> Both works fine in the normal condition. But AudioUnit playback stopped 
>>>>> playing when device (ipod touch 2nd gen) goes to sleep mode, where as it 
>>>>> works fine with AudioQueue.
>>>>> 
>>>>> I have set "kAudioSessionCategory_MediaPlayback" session category for 
>>>>> both cases. I assume this session category should continue the playback 
>>>>> even in sleep mode.
>>>>> 
>>>>> I am not sure what i am missing here. 
>>>>> 
>>>>> I want to add pan/effects features to my application, so i am trying with 
>>>>> AudioUnit.
>>>>> 
>>>>> Any help is highly appreciated 
>>>>> 
>>>>> Thanks
>>>>> Sasikumar JP _______________________________________________
>>>>> Do not post admin requests to the list. They will be ignored.
>>>>> Coreaudio-api mailing list      (coreaudio-...@lists.apple.com)
>>>>> Help/Unsubscribe/Update your Subscription:
>>>>> http://lists.apple.com/mailman/options/coreaudio-api/gregorywieber%40gmail.com
>>>>> 
>>>>> This email sent to gregorywie...@gmail.com
>>>> _______________________________________________
>>>> Do not post admin requests to the list. They will be ignored.
>>>> Coreaudio-api mailing list      (coreaudio-...@lists.apple.com)
>>>> Help/Unsubscribe/Update your Subscription:
>>>> http://lists.apple.com/mailman/options/coreaudio-api/hank.list%40runbox.com
>>>> 
>>>> This email sent to hank.l...@runbox.com
>>>> 
>>> 
>> 
>> 
> 

_______________________________________________

Cocoa-dev mailing list (Cocoa-dev@lists.apple.com)

Please do not post admin requests or moderator comments to the list.
Contact the moderators at cocoa-dev-admins(at)lists.apple.com

Help/Unsubscribe/Update your Subscription:
http://lists.apple.com/mailman/options/cocoa-dev/archive%40mail-archive.com

This email sent to arch...@mail-archive.com

Reply via email to