Note that there are some explanatory texts on larger screens.

plurals
  1. POHow can I save the data re-encoded using AVWriterInput int the memory?
    primarykey
    data
    text
    <pre><code>- (void)setUpSession{ m_session = [[AVCaptureSession alloc] init]; m_session.sessionPreset = AVCaptureSessionPresetLow; // video Input NSError *error = NULL; AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; if (videoDevice != NULL) { AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&amp;error]; if (videoInput != NULL) { if ([m_session canAddInput:videoInput]) { [m_session addInput:videoInput]; } } else { // NSLog(@"%@", [error localizedFailureReason]); } } // audio input if (![videoDevice hasMediaType:AVMediaTypeAudio] &amp;&amp; ![videoDevice hasMediaType:AVMediaTypeMuxed]) { AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; if (audioDevice != nil) { AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&amp;error]; if (audioDevice != nil) { if ([m_session canAddInput:audioInput]) { [m_session addInput:audioInput]; } } else { // NSLog(@"%@", [error localizedFailureReason]); } } } else { NSLog(@"videoDevice has audioMedia or muxed"); } // video Output m_videoOut = [[AVCaptureVideoDataOutput alloc] init]; m_videoOut.alwaysDiscardsLateVideoFrames = NO; m_videoOut.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; //kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange if ([m_session canAddOutput:m_videoOut]) { [m_session addOutput:m_videoOut]; } AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:m_session]; layer.frame = m_videoView.frame; layer.videoGravity = AVLayerVideoGravityResize; [m_videoView.layer addSublayer:layer]; // audio output m_audioOut = [[AVCaptureAudioDataOutput alloc] init]; if ([m_session canAddOutput:m_audioOut]) { [m_session addOutput:m_audioOut]; } // Setup the queue dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL); [m_videoOut setSampleBufferDelegate:self queue:queue]; [m_audioOut setSampleBufferDelegate:self queue:queue]; dispatch_release(queue); } - (void)setUpWriter{ NSError *error = nil; NSURL *url = [self recordFilePath]; m_writer = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie error:&amp;error]; NSParameterAssert(m_writer); // Add video input NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:640], AVVideoWidthKey, [NSNumber numberWithInt:480], AVVideoHeightKey, nil]; m_videoWriteInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; NSParameterAssert(m_videoWriteInput); m_videoWriteInput.expectsMediaDataInRealTime = YES; // Add the audio input AudioChannelLayout acl; bzero( &amp;acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary *audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey, [NSNumber numberWithInt: 1], AVNumberOfChannelsKey, [NSNumber numberWithFloat: 16000.0], AVSampleRateKey, [NSNumber numberWithInt: 32000], AVEncoderBitRateKey, [NSData dataWithBytes:&amp;acl length:sizeof(acl)], AVChannelLayoutKey, nil]; m_audioWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings] retain]; m_audioWriterInput.expectsMediaDataInRealTime = YES; // add input [m_writer addInput:m_videoWriteInput]; [m_writer addInput:m_audioWriterInput]; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ if( !CMSampleBufferDataIsReady(sampleBuffer) ) { NSLog( @"sample buffer is not ready. Skipping sample" ); return; } if( m_bRecording == YES ) { CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); if( m_writer.status != AVAssetWriterStatusWriting ) { if (m_writer.status == AVAssetWriterStatusFailed) { NSLog(@"ERROR:%@\r\n", m_writer.error); } else { [m_writer startWriting]; [m_writer startSessionAtSourceTime:lastSampleTime]; } } if( captureOutput == m_videoOut ) { [self newVideoSample:sampleBuffer]; NSLog(@"i am video"); } else { NSParameterAssert(captureOutput == m_audioOut); [self newAudioSample:sampleBuffer]; NSLog(@"i am audio"); } } } - (void)newVideoSample:(CMSampleBufferRef)sampleBuffer{ if(m_bRecording) { if( m_writer.status &gt; AVAssetWriterStatusWriting ) { NSLog(@"Warning: writer status is %d\r\n", m_writer.status); if(m_writer.status == AVAssetWriterStatusFailed) { NSLog(@"Error: %@\r\n", m_writer.error); } return; } if (m_videoWriteInput.readyForMoreMediaData) { if(![m_videoWriteInput appendSampleBuffer:sampleBuffer]) { NSLog(@"Unable to write to video input\r\n"); } } else { NSLog(@"video input readyForMoreMediaData is NO\r\n"); } } } </code></pre> <p>AS we know, using AVWriterInput can re-encode data, then save it to local file.But if I want to save the re-encoded data to the memeory, How can I get the data?what should I do? Any suggestion?</p>
    singulars
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    plurals
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
 

Querying!

 
Guidance

SQuiL has stopped working due to an internal error.

If you are curious you may find further information in the browser console, which is accessible through the devtools (F12).

Reload