Note that there are some explanatory texts on larger screens.

plurals
  1. POHow to read and write audio file using NSInputStream and NSOutputStream
    primarykey
    data
    text
    <p>I record and save the caf audio file using AVAudioRecorder. When I convert the (300 mb).caf to .wav format the Application crashed with error(Received memory warning. Level=1 and Received memory warning. Level=2). How to use NSInputStream for reading audio file and NSOutputStream for writing audio file.</p> <pre><code>-(void) convertToWav:(NSNumber *) numIndex { // set up an AVAssetReader to read from the iPod Library int index = [numIndex integerValue]; NSString *strName; NSString *strFilePath1 =[delegate.strCassettePathSide stringByAppendingPathComponent:@"audio_list.plist"]; bool bTapeInfoFileExists = [[NSFileManager defaultManager] fileExistsAtPath:strFilePath1]; if (bTapeInfoFileExists) { NSMutableDictionary *dictInfo = [[NSMutableDictionary alloc] initWithContentsOfFile:strFilePath1]; if ([dictInfo valueForKey:@"lastName"]) strName =[dictInfo valueForKey:@"lastName"]; else strName= [delegate.arrNameList objectAtIndex:0]; } else { strName = [delegate.arrNameList objectAtIndex:0]; } NSString *cafFilePath =[[delegate.arrSessionList objectAtIndex:index] valueForKey:@"path"]; NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath]; AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil]; NSError *assetError = nil; AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset error:&amp;assetError] ; if (assetError) { NSLog (@"error: %@", assetError); return; } AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks audioSettings: nil]; if (! [assetReader canAddOutput: assetReaderOutput]) { NSLog (@"can't add reader output... die!"); return; } [assetReader addOutput: assetReaderOutput]; NSString *strWavFileName = [NSString stringWithFormat:@"%@.wav",[[cafFilePath lastPathComponent] stringByDeletingPathExtension]]; NSString *wavFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:strWavFileName]; if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath]) { [[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil]; } NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath]; AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL fileType:AVFileTypeWAVE error:&amp;assetError]; if (assetError) { NSLog (@"error: %@", assetError); return; } AppDelegate *appDelegate =[[UIApplication sharedApplication]delegate]; int nSampleRate=[[appDelegate.dictWAVQuality valueForKey:@"samplerate"] integerValue]; AudioChannelLayout channelLayout; memset(&amp;channelLayout, 0, sizeof(AudioChannelLayout)); channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, [NSNumber numberWithFloat:nSampleRate], AVSampleRateKey, [NSNumber numberWithInt:2], AVNumberOfChannelsKey, [NSData dataWithBytes:&amp;channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey, [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey, [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved, [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey, [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey, nil]; AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings]; if ([assetWriter canAddInput:assetWriterInput]) { [assetWriter addInput:assetWriterInput]; } else { NSLog(@"can't add asset writer input... die!"); return; } assetWriterInput.expectsMediaDataInRealTime = NO; [assetWriter startWriting]; [assetReader startReading]; AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0]; CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale); [assetWriter startSessionAtSourceTime: startTime]; __block UInt64 convertedByteCount = 0; dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL); [assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock: ^ { while (assetWriterInput.readyForMoreMediaData) { CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer]; if (nextBuffer) { // append buffer [assetWriterInput appendSampleBuffer: nextBuffer]; convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer); } else { [assetWriterInput markAsFinished]; // [assetWriter finishWriting]; [assetReader cancelReading]; [dictTemp setValue:wavFilePath forKey:@"path"]; [dictTemp setValue:nil forKey:@"progress"]; [delegate.arrSessionList replaceObjectAtIndex:index withObject:dictTemp]; NSString *strListFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:@"audiolist.plist"]; [delegate.arrSessionList writeToFile:strListFilePath atomically:YES]; break; } } }];} </code></pre>
    singulars
    1. This table or related slice is empty.
    plurals
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
 

Querying!

 
Guidance

SQuiL has stopped working due to an internal error.

If you are curious you may find further information in the browser console, which is accessible through the devtools (F12).

Reload