I am trying to add an image on a video. Everything works fine except one thing, the image is distorted:
Here is the code :
//Capture the image UIGraphicsBeginImageContextWithOptions(self.captureView.bounds.size, false, UIScreen.main.scale) self.captureView.layer.render(in: UIGraphicsGetCurrentContext()!) let image = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() let watermarkVideo = WatermakVideo() //video file let videoFile = Bundle.main.path(forResource: "videoTrim", ofType: "mp4") let videoURL = URL(fileURLWithPath: videoFile!) let imageFrame = captureView.frame watermarkVideo.createWatermark(image, frame: imageFrame, video: videoURL)
Here is the class WatermakVideo
: https://www.dropbox.com/s/0d6i7ap9qu4klp5/WatermakVideo.zip
I would be grateful if you could help me fix this issue.
1 Answers
Answers 1
Copy the below into your file. I had the same issue and solved the problem two weeks ago:
-(void)forStackOverflow:(NSURL*)url{ AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil]; AVMutableComposition* mixComposition = [AVMutableComposition composition]; AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; //If you need audio as well add the Asset Track for audio here [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil]; [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil]; [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]]; CGSize sizeOfVideo=compositionVideoTrack.naturalSize; CGFloat scaleWidth = sizeOfVideo.height/self.view.frame.size.width; CGFloat scaleHeight = sizeOfVideo.width/self.view.frame.size.height; // add image UIImage *myImage=[UIImage imageNamed:@"YOUR IMAGE PATH"]; CALayer *layerCa = [CALayer layer]; layerCa.contents = (id)myImage.CGImage; layerCa.frame = CGRectMake(5*scaleWidth, 0, self.birdSize.width*scaleWidth, self.birdSize.height*scaleWidth); layerCa.opacity = 1.0; // add Text on image CATextLayer *textOfvideo=[[CATextLayer alloc] init]; textOfvideo.alignmentMode = kCAAlignmentLeft; [textOfvideo setFont:(__bridge CFTypeRef)([UIFont fontWithName:@"Arial" size:64.00])];//fontUsed is the name of font [textOfvideo setFrame:CGRectMake(layerCa.frame.size.width/6, layerCa.frame.size.height/8*7-layerCa.frame.size.height/3, layerCa.frame.size.width/1.5, layerCa.frame.size.height/3)]; [textOfvideo setAlignmentMode:kCAAlignmentCenter]; [textOfvideo setForegroundColor:[[UIColor redColor] CGColor]]; UILabel*label = [[UILabel alloc]init]; [label setText:self.questionString]; label.textAlignment = NSTextAlignmentCenter; label.numberOfLines = 4; label.adjustsFontSizeToFitWidth = YES; [label setFont:[UIFont fontWithName:@"Arial" size:64.00]]; //[label.layer setBackgroundColor:[[UIColor blackColor] CGColor]]; [label.layer setFrame:CGRectMake(0, 0, textOfvideo.frame.size.width, textOfvideo.frame.size.height)]; [textOfvideo addSublayer:label.layer]; [layerCa addSublayer:textOfvideo]; CALayer *parentLayer=[CALayer layer]; CALayer *videoLayer=[CALayer layer]; parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height); videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.height,sizeOfVideo.width); [parentLayer addSublayer:videoLayer]; //[parentLayer addSublayer:optionalLayer]; [parentLayer addSublayer:layerCa]; [parentLayer setBackgroundColor:[UIColor blueColor].CGColor]; AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ; videoComposition.frameDuration=CMTimeMake(1, 30); videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; //AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]); AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; BOOL isVideoAssetPortrait_ = NO; [layerInstruction setTransform:videoTrack.preferredTransform atTime:kCMTimeZero]; CGSize naturalSize; naturalSize = videoTrack.naturalSize; float renderWidth, renderHeight; renderWidth = naturalSize.width; renderHeight = naturalSize.height; videoComposition.renderSize = naturalSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width); instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction]; videoComposition.instructions = [NSArray arrayWithObject: instruction]; NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0]; NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; [dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"]; NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/utput_%@.mov", [dateFormatter stringFromDate:[NSDate date]]]; AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; exportSession.videoComposition=videoComposition; exportSession.outputURL = [NSURL fileURLWithPath:destinationPath]; exportSession.outputFileType = AVFileTypeQuickTimeMovie; [exportSession exportAsynchronouslyWithCompletionHandler:^{ switch (exportSession.status) { case AVAssetExportSessionStatusCompleted: NSLog(@"Export OK"); if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) { UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, @selector(video:didFinishSavingWithError:contextInfo:), nil); } break; case AVAssetExportSessionStatusFailed: NSLog (@"AVAssetExportSessionStatusFailed: %@", exportSession.error); break; case AVAssetExportSessionStatusCancelled: NSLog(@"Export Cancelled"); break; } self.currentUrl = exportSession.outputURL; dispatch_async(dispatch_get_main_queue(), ^{ }); }]; }
0 comments:
Post a Comment