FffmpegPlugin.m 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211
  1. #import "FffmpegPlugin.h"
  2. #import <AVFoundation/AVFoundation.h>
  3. @implementation FffmpegPlugin
  4. + (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
  5. FlutterMethodChannel* channel = [FlutterMethodChannel
  6. methodChannelWithName:@"fffmpeg"
  7. binaryMessenger:[registrar messenger]];
  8. FffmpegPlugin* instance = [[FffmpegPlugin alloc] init];
  9. [registrar addMethodCallDelegate:instance channel:channel];
  10. }
  11. - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
  12. if ([@"getPlatformVersion" isEqualToString:call.method]) {
  13. result([@"iOS " stringByAppendingString:[[UIDevice currentDevice] systemVersion]]);
  14. }
  15. if([@"exeCommand" isEqualToString:call.method]){
  16. NSDictionary* arguments = call.arguments[@"arguments"];
  17. NSString *input=arguments[@"inputPath"];
  18. NSString *output=arguments[@"outputPath"];
  19. NSLog(input);
  20. NSLog(output);
  21. //test
  22. // [self addWaterPicWithVideoPath1:input outPath:output result:result];
  23. } else if([@"addwaterMark" isEqualToString:call.method]){
  24. NSDictionary* arguments = call.arguments[@"arguments"];
  25. NSString *input=arguments[@"inputPath"];
  26. NSString *output=arguments[@"outputPath"];
  27. NSLog(input);
  28. NSLog(output);
  29. NSString *watermarkPath=arguments[@"watermarkPath"];
  30. //LeftTop RightTop RightBottom LeftBottom
  31. NSString *position=arguments[@"position"];
  32. [self addWaterPicWithVideoPath:input outPath:output result:result watermarkPath:watermarkPath position:position];
  33. }else {
  34. result(FlutterMethodNotImplemented);
  35. }
  36. }
  37. - (void)addWaterPicWithVideoPath:(NSString*)path outPath:(NSString*)outPath result:(FlutterResult)result watermarkPath:(NSString*)watermarkPath position:(NSString*)position
  38. {
  39. //1 创建AVAsset实例
  40. AVURLAsset* videoAsset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:path]];
  41. AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
  42. //3 视频通道
  43. AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
  44. preferredTrackID:kCMPersistentTrackID_Invalid];
  45. [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
  46. ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject]
  47. atTime:kCMTimeZero error:nil];
  48. //2 音频通道
  49. AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
  50. preferredTrackID:kCMPersistentTrackID_Invalid];
  51. [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
  52. ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject]
  53. atTime:kCMTimeZero error:nil];
  54. //3.1 AVMutableVideoCompositionInstruction 视频轨道中的一个视频,可以缩放、旋转等
  55. AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  56. mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
  57. // 3.2 AVMutableVideoCompositionLayerInstruction 一个视频轨道,包含了这个轨道上的所有视频素材
  58. AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
  59. [videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
  60. // 3.3 - Add instructions
  61. mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
  62. //AVMutableVideoComposition:管理所有视频轨道,水印添加就在这上面
  63. AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
  64. CGAffineTransform translateToCenter;
  65. CGAffineTransform mixedTransform;
  66. translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.height, 0.0);
  67. mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2);
  68. mainCompositionInst.renderSize = CGSizeMake(videoTrack.naturalSize.width,videoTrack.naturalSize.height);
  69. AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
  70. CGSize naturalSize = videoAssetTrack.naturalSize;
  71. float renderWidth, renderHeight;
  72. renderWidth = naturalSize.width;
  73. renderHeight = naturalSize.height;
  74. mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
  75. mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
  76. mainCompositionInst.frameDuration = CMTimeMake(1, 30);
  77. [self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize watermarkPath:watermarkPath position:position];
  78. // // 4 - 输出路径
  79. NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
  80. NSString *documentsDirectory = [paths objectAtIndex:0];
  81. NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
  82. [NSString stringWithFormat:@"FinalVideo-%d.mp4",arc4random() % 1000]];
  83. NSURL* videoUrl = [NSURL fileURLWithPath:outPath];
  84. // 5 - 视频文件输出
  85. AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
  86. presetName:AVAssetExportPresetHighestQuality];
  87. exporter.outputURL = videoUrl;
  88. exporter.outputFileType = AVFileTypeMPEG4;
  89. exporter.shouldOptimizeForNetworkUse = YES;
  90. exporter.videoComposition = mainCompositionInst;
  91. [exporter exportAsynchronouslyWithCompletionHandler:^{
  92. dispatch_async(dispatch_get_main_queue(), ^{
  93. if( exporter.status == AVAssetExportSessionStatusCompleted ){
  94. NSLog(@"ios-addmark-ok");
  95. UISaveVideoAtPathToSavedPhotosAlbum(myPathDocs, nil, nil, nil);
  96. result(outPath);
  97. }else if( exporter.status == AVAssetExportSessionStatusFailed )
  98. {
  99. NSLog(@"not-ok");
  100. NSLog(exporter.error.localizedDescription);
  101. result(@"notok");
  102. }
  103. });
  104. }];
  105. }
  106. /**
  107. 设置水印及其对应视频的位置
  108. @param composition 视频的结构
  109. @param size 视频的尺寸
  110. */
  111. - (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size watermarkPath:(NSString*)watermarkPath position:(NSString*)position
  112. {
  113. // 文字
  114. // CATextLayer *subtitle1Text = [[CATextLayer alloc] init];
  115. // // [subtitle1Text setFont:@"Helvetica-Bold"];
  116. // [subtitle1Text setFontSize:36];
  117. // [subtitle1Text setFrame:CGRectMake(10, size.height-10-100, size.width, 100)];
  118. // [subtitle1Text setString:@"ZHIMABAOBAO"];
  119. // // [subtitle1Text setAlignmentMode:kCAAlignmentCenter];
  120. // [subtitle1Text setForegroundColor:[[UIColor whiteColor] CGColor]];
  121. NSData *imageData = [NSData dataWithContentsOfFile:watermarkPath];
  122. UIImage *image2 = [UIImage imageWithData:imageData];
  123. // //图片
  124. CALayer*picLayer = [CALayer layer];
  125. //// UIImage *ui11=[UIImage imageNamed:@"watermarklogo"];
  126. //
  127. picLayer.contents = (id)image2.CGImage;
  128. // CALayer*picLayer = [CALayer layer];
  129. // picLayer.contents = (id)[UIImage imageNamed:@"watermarklogo"].CGImage;
  130. // picLayer.frame = CGRectMake(20, size.height-120, 100, 102);
  131. picLayer.contents = (id)image2.CGImage;
  132. NSUInteger width= image2.size.width;
  133. NSUInteger height= image2.size.height;
  134. //看到时候传不传进来
  135. NSInteger logoPadding=20;
  136. //LeftTop RightTop RightBottom LeftBottom
  137. if([@"LeftTop" isEqualToString:position]){
  138. picLayer.frame = CGRectMake(logoPadding, size.height-height-logoPadding, width, height);
  139. }else if([@"RightTop" isEqualToString:position]){
  140. picLayer.frame = CGRectMake(size.width-width-logoPadding, size.height-height-logoPadding, width, height);
  141. }else if([@"RightBottom" isEqualToString:position]){
  142. picLayer.frame = CGRectMake(size.width-width-logoPadding, logoPadding, width, height);
  143. }else if([@"LeftBottom" isEqualToString:position]){
  144. picLayer.frame = CGRectMake(logoPadding, logoPadding, width, height);
  145. }
  146. // 2 - The usual overlay
  147. CALayer *overlayLayer = [CALayer layer];
  148. [overlayLayer addSublayer:picLayer];
  149. overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
  150. [overlayLayer setMasksToBounds:YES];
  151. CALayer *parentLayer = [CALayer layer];
  152. CALayer *videoLayer = [CALayer layer];
  153. parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
  154. videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
  155. [parentLayer addSublayer:videoLayer];
  156. [parentLayer addSublayer:overlayLayer];
  157. composition.animationTool = [AVVideoCompositionCoreAnimationTool
  158. videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
  159. }
  160. @end