Note that there are some explanatory texts on larger screens.

plurals
  1. POScreen Recording in ipad
    primarykey
    data
    text
    <p>I Am facing a problem from last two weeks. Actually I am working on an iPad app. In which I want to do annotation and also make screen recording of that annotations. Annotation part is working fine but it gives problem when I start recording. The problem is that it losts it's smoothness and gives lags during screen recording. For Screen Recording I am using AVAsset Writer. Codes are fine for both Annotation and Screen recording.... But I don't know where is the problem??</p> <p><strong>My ScreenShot Size is (1050,650)</strong> </p> <p>Should I use Grand Central Dispatch to solve this problem??? Can anybody help me to solve out my problem.....</p> <p>Plz plz help me....</p> <p><strong>MY CODE</strong></p> <pre><code>// For Annotation - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event { mouseSwiped = NO; UITouch *touch = [touches anyObject]; if ([touch tapCount] == 2) { drawImage.image = nil; //Double click to undo drawing. return; } lastPoint = [touch locationInView:self.view]; lastPoint.y -= 20; } - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event { mouseSwiped = YES; UITouch *touch = [touches anyObject]; CGPoint currentPoint = [touch locationInView:self.view]; currentPoint.y -= 20; // UIGraphicsBeginImageContext(canvasView.frame.size); UIGraphicsBeginImageContext(drawImage.frame.size); [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)]; CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound); CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 10.0); CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0); CGContextBeginPath(UIGraphicsGetCurrentContext()); CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y); CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y); CGContextStrokePath(UIGraphicsGetCurrentContext()); drawImage.image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); lastPoint = currentPoint; } - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event { UITouch *touch = [touches anyObject]; if ([touch tapCount] == 2) { drawImage.image = nil; return; } if(!mouseSwiped) { UIGraphicsBeginImageContext(drawImage.frame.size); [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)]; CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound); CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0); CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0); CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y); CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y); CGContextStrokePath(UIGraphicsGetCurrentContext()); CGContextFlush(UIGraphicsGetCurrentContext()); drawImage.image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); } } //For Screen Recording #define FRAME_WIDTH 1024 #define FRAME_HEIGHT 650 #define TIME_SCALE 600 - (UIImage*)screenshot { UIGraphicsBeginImageContext(drawImage.frame.size); [self.view.layer renderInContext:UIGraphicsGetCurrentContext()]; UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return viewImage; } -(NSURL*) pathToDocumentsDirectory { NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mov"]; outputURL = [[NSURL alloc] initFileURLWithPath:outputPath]; NSFileManager* fileManager = [NSFileManager defaultManager]; if ([fileManager fileExistsAtPath:outputPath]) { NSError* error; if ([fileManager removeItemAtPath:outputPath error:&amp;error] == NO) { NSLog(@"Could not delete old recording file at path: %@", outputPath); } } [outputPath release]; return [outputURL autorelease]; } -(void) writeSample: (NSTimer*) _timer { if (assetWriterInput.readyForMoreMediaData) { // CMSampleBufferRef sample = nil; CVReturn cvErr = kCVReturnSuccess; // get screenshot image! CGImageRef image = (CGImageRef) [[self screenshot] CGImage]; NSLog (@"made screenshot"); // prepare the pixel buffer CVPixelBufferRef pixelBuffer = NULL; CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image)); NSLog (@"copied image data"); cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, FRAME_WIDTH, FRAME_HEIGHT, kCVPixelFormatType_32BGRA, (void*)CFDataGetBytePtr(imageData), CGImageGetBytesPerRow(image), NULL, NULL, NULL, &amp;pixelBuffer); NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr); // calculate the time CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent(); CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime; NSLog (@"elapsedTime: %f", elapsedTime); CMTime presentationTime = CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE); // write the sample BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime]; if (appended) { NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime)); } else { NSLog (@"failed to append"); [self stopRecording]; } } } -(void) startRecording { movieURL = [self pathToDocumentsDirectory]; NSLog(@"path=%@",movieURL); movieError = nil; [assetWriter release]; assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType: AVFileTypeQuickTimeMovie error: &amp;movieError]; [self writer]; // start writing samples to it NSDate* start = [NSDate date]; frameRate=40.0f; float processingSeconds = [[NSDate date] timeIntervalSinceDate:start]; delayRemaining = (1.0 / self.frameRate) - processingSeconds; [assetWriterTimer release]; assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:delayRemaining &gt; 0.0 ? delayRemaining : 0.01 target:self selector:@selector (writeSample:) userInfo:nil repeats:YES] ; } -(void)writer { NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey, [NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey, nil]; assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo outputSettings:assetWriterInputSettings]; assetWriterInput.expectsMediaDataInRealTime = YES; [assetWriter addInput:assetWriterInput]; [assetWriterPixelBufferAdaptor release]; assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:assetWriterInput sourcePixelBufferAttributes:nil]; [assetWriter startWriting]; firstFrameWallClockTime = CFAbsoluteTimeGetCurrent(); [assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)]; } -(void) stopRecording { [assetWriterTimer invalidate]; assetWriterTimer = nil; [assetWriter finishWriting]; NSLog (@"finished writing"); } </code></pre>
    singulars
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    plurals
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
 

Querying!

 
Guidance

SQuiL has stopped working due to an internal error.

If you are curious you may find further information in the browser console, which is accessible through the devtools (F12).

Reload