Note that there are some explanatory texts on larger screens.

plurals
  1. POCan anybody help me in recording iPhone output sound through Audio Unit
    primarykey
    data
    text
    <p>this is my code : i use this code to record the iPhone output audio by using Audio Unit then saving the output in output.caf but the output.caf file is empty any body have idea about what shall i do ? the output audio file is empty </p> <p>this is intializing the audio unit </p> <pre><code>-(void) initializaeOutputUnit { OSStatus status; // Describe audio component AudioComponentDescription desc; desc.componentType = kAudioUnitType_Output; desc.componentSubType = kAudioUnitSubType_RemoteIO; desc.componentFlags = 0; desc.componentFlagsMask = 0; desc.componentManufacturer = kAudioUnitManufacturer_Apple; // Get component AudioComponent inputComponent = AudioComponentFindNext(NULL, &amp;desc); // Get audio units status = AudioComponentInstanceNew(inputComponent, &amp;audioUnit); // Enable IO for recording UInt32 flag = 1; status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &amp;flag, sizeof(flag)); // Enable IO for playback status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &amp;flag, sizeof(flag)); // Describe format AudioStreamBasicDescription audioFormat={0}; audioFormat.mSampleRate = 44100.00; audioFormat.mFormatID = kAudioFormatLinearPCM; audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; audioFormat.mFramesPerPacket = 1; audioFormat.mChannelsPerFrame = 1; audioFormat.mBitsPerChannel = 16; audioFormat.mBytesPerPacket = 2; audioFormat.mBytesPerFrame = 2; // Apply format status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &amp;audioFormat, sizeof(audioFormat)); status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &amp;audioFormat, sizeof(audioFormat)); // Set input callback AURenderCallbackStruct callbackStruct; callbackStruct.inputProc = recordingCallback; callbackStruct.inputProcRefCon = self; status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &amp;callbackStruct, sizeof(callbackStruct)); // Set output callback callbackStruct.inputProc = playbackCallback; callbackStruct.inputProcRefCon = self; status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &amp;callbackStruct, sizeof(callbackStruct)); // Disable buffer allocation for the recorder (optional - do this if we want to pass in our own) flag = 0; status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_ShouldAllocateBuffer, kAudioUnitScope_Output, kInputBus, &amp;flag, sizeof(flag)); AudioUnitInitialize(audioUnit); AudioOutputUnitStart(audioUnit); // On initialise le fichier audio NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; NSString *destinationFilePath = [[[NSString alloc] initWithFormat: @"%@/output.caf", documentsDirectory] autorelease]; NSLog(@"&gt;&gt;&gt; %@", destinationFilePath); CFURLRef destinationURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false); OSStatus setupErr = ExtAudioFileCreateWithURL(destinationURL, kAudioFileWAVEType, &amp;audioFormat, NULL, kAudioFileFlags_EraseFile, &amp;effectState.audioFileRef); CFRelease(destinationURL); NSAssert(setupErr == noErr, @"Couldn't create file for writing"); setupErr = ExtAudioFileSetProperty(effectState.audioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &amp;audioFormat); NSAssert(setupErr == noErr, @"Couldn't create file for format"); setupErr = ExtAudioFileWriteAsync(effectState.audioFileRef, 0, NULL); NSAssert(setupErr == noErr, @"Couldn't initialize write buffers for audio file"); } </code></pre> <p>the recording call back </p> <pre><code>static OSStatus recordingCallback (void * inRefCon, AudioUnitRenderActionFlags * ioActionFlags, const AudioTimeStamp * inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList * ioData) { NSLog(@"callback"); if (*ioActionFlags == kAudioUnitRenderAction_PostRender&amp;&amp;inBusNumber==0) { AudioBufferList *bufferList; // &lt;- Fill this up with buffers (you will want to malloc it, as it's a dynamic-length list) EffectState *effectState = (EffectState *)inRefCon; AudioUnit rioUnit =[(MixerHostAudio*)inRefCon getAudioUnit]; OSStatus status; NSLog(@"de5eal el call back "); // BELOW I GET THE ERROR status = AudioUnitRender( rioUnit, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, bufferList); if (noErr != status) { NSLog(@"AudioUnitRender error"); return noErr;} // Now, we have the samples we just read sitting in buffers in bufferList ExtAudioFileWriteAsync(effectState-&gt;audioFileRef, inNumberFrames, bufferList); } return noErr; } // then stop Recording - (void) stopRecord { AudioOutputUnitStop(audioUnit); AudioUnitUninitialize(audioUnit); } </code></pre>
    singulars
    1. This table or related slice is empty.
    plurals
    1. This table or related slice is empty.
 

Querying!

 
Guidance

SQuiL has stopped working due to an internal error.

If you are curious you may find further information in the browser console, which is accessible through the devtools (F12).

Reload