SwiftDubbingLibPlugin.swift 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250
  1. import Flutter
  2. import UIKit
  3. import AVFoundation
  4. private let timerInterval: Double = 0.03
  5. private let preTime: Double = 0.3
  6. private let preLag: Double = 0.15
  7. enum DubbingStatus {
  8. case stop, record, playRecord
  9. }
  10. public class SwiftDubbingLibPlugin: NSObject, FlutterPlugin {
  11. var registrar: FlutterPluginRegistrar!
  12. var channel: FlutterMethodChannel!
  13. var result: FlutterResult? = nil
  14. /// 当前界面配音状态
  15. var status: DubbingStatus = .stop
  16. var isRecording = false
  17. var audioRecorder: AVAudioRecorder?
  18. var audioPlayer: AVAudioPlayer?
  19. var initFlag: Bool = false
  20. public static func register(with registrar: FlutterPluginRegistrar) {
  21. let channel = FlutterMethodChannel(name: "dubbing_lib", binaryMessenger: registrar.messenger())
  22. let instance = SwiftDubbingLibPlugin()
  23. registrar.addMethodCallDelegate(instance, channel: channel)
  24. instance.channel = channel
  25. instance.registrar = registrar
  26. }
  27. public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
  28. let args = call.arguments as? [String: Any];
  29. self.result = result;
  30. switch call.method {
  31. case "getPlatformVersion":
  32. result("iOS " + UIDevice.current.systemVersion)
  33. break
  34. case "startRecord":
  35. let duration = args!["duration"] as! Int;
  36. let fileName = args!["fileName"] as! String;
  37. let index = args!["index"] as! Int;
  38. let pathAudio = args!["pathAudio"] as! String;
  39. startRecord(pathAudio: pathAudio, index: index, duration: duration, fileName: fileName, result: result)
  40. break
  41. case "playRecordAudio":
  42. let filePath = args!["fileName"] as! String;
  43. playRecord(filePath: filePath, result: result)
  44. break
  45. case "pauseRecordAudio":
  46. audioPlayer?.pause()
  47. result(true)
  48. break
  49. case "startMixinAudio":
  50. let videoId = args!["videoId"] as! String;
  51. let videoPath = args!["videoPath"] as! String;
  52. let bgmPath = args!["bgmPath"] as! String;
  53. let audioPathList = args!["audioDecodePaths"] as! [String];
  54. let startTimeList = args!["startTimeList"] as! [Double];
  55. let pathVideoMixinDir = args!["pathVideoMixin"] as! String;
  56. let outPath = pathVideoMixinDir + args!["mixinName"] as! String;
  57. startMixinAudio(videoPath: videoPath, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: outPath, result: result)
  58. break
  59. case "startMixinPaintedAudio":
  60. let bgmPath = args!["bgmPath"] as! String;
  61. let audioPathList = args!["audioPaths"] as! [String];
  62. let startTimeList = args!["startTimeList"] as! [Double];
  63. let pathVideoMixinDir = args!["encodePath"] as! String;
  64. startMixinAudio(videoPath: nil, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: pathVideoMixinDir, result: result)
  65. break;
  66. case "getIsMediaPlayPause":
  67. result(audioPlayer != nil && audioPlayer!.isPlaying)
  68. break
  69. case "cleanAudioData":
  70. break
  71. case "findIsExistCacheVideo":
  72. result("")
  73. break
  74. case "setExtraFullScreen":
  75. result("")
  76. break
  77. default:
  78. result(FlutterMethodNotImplemented)
  79. }
  80. }
  81. func initAudioSession(){
  82. do {
  83. // if (!initFlag) {
  84. if (true) {
  85. try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
  86. try AVAudioSession.sharedInstance().setActive(true)
  87. initFlag = true
  88. }
  89. } catch {
  90. initFlag = false
  91. }
  92. }
  93. func resultError(code: String, message: String) -> Void {
  94. self.result!(FlutterError(code: code, message: message, details: nil))
  95. }
  96. /// 合成
  97. func startMixinAudio(videoPath: String?, bgmPath: String, audioPathList: [String], startTimeList: [Double], outPath: String, result: @escaping FlutterResult) {
  98. var videoUrl:URL?=nil
  99. if(!(videoPath?.isBlank ?? true)){
  100. videoUrl = URL(fileURLWithPath: videoPath!);
  101. }
  102. let musicUrl = URL(fileURLWithPath: bgmPath)
  103. let composer = DubbingComposer(timeline: startTimeList, videoUrl: videoUrl, musicUrl: musicUrl, recordsUrl: audioPathList)
  104. composer.preTime = preLag
  105. let outputUrl = URL(fileURLWithPath: outPath)
  106. DispatchQueue.global().async {
  107. composer.compose(outputUrl, onSuccess: {
  108. self.result!(outPath)
  109. }) { (message) in
  110. print("合成失败", message)
  111. self.resultError(code: "1005", message: "mix video and audio failed")
  112. }
  113. }
  114. }
  115. }
  116. // MARK: - 录音控制
  117. extension SwiftDubbingLibPlugin {
  118. @objc func startRecord(pathAudio: String, index: Int, duration: Int, fileName: String, result: @escaping FlutterResult) {
  119. initAudioSession()
  120. endPlay()
  121. status = .record
  122. let filePath = pathAudio + fileName + ".aac";
  123. do {
  124. let settings: [String:Any] = [
  125. AVNumberOfChannelsKey : 1, //设置通道
  126. AVFormatIDKey : kAudioFormatMPEG4AAC, //设置录音格式
  127. //AVSampleRateKey : 16000, //设置录音采样率
  128. //AVLinearPCMBitDepthKey : 16, //每个采样点位数,分为8、16、24、32
  129. //AVLinearPCMIsFloatKey : true, //是否使用浮点数采样
  130. ]
  131. let url = URL(fileURLWithPath: filePath)
  132. audioRecorder = try AVAudioRecorder(url: url, settings: settings)
  133. audioRecorder?.prepareToRecord()
  134. audioRecorder?.record()
  135. isRecording = true
  136. } catch {
  137. stopRecord()
  138. resultError(code: "1002", message: "start record failed")
  139. }
  140. if(isRecording){
  141. var piecePosition = 0
  142. let queue:DispatchQueue = DispatchQueue.global(qos: DispatchQoS.QoSClass.default)
  143. let _timer:DispatchSource = DispatchSource.makeTimerSource(flags: [], queue: queue) as! DispatchSource
  144. _timer.schedule(deadline: DispatchTime.now(), repeating: .milliseconds(5))
  145. _timer.setEventHandler(handler: {() -> Void in
  146. self.isRecording = true
  147. piecePosition += 1
  148. self.channel.invokeMethod("recordProgress", arguments: ["progress": piecePosition * 5])
  149. if(piecePosition * 5 >= duration) {
  150. _timer.cancel()
  151. self.result!(filePath)
  152. self.stopRecord()
  153. }
  154. })
  155. _timer.resume()
  156. } else {
  157. resultError(code: "1002", message: "start record failed")
  158. }
  159. }
  160. @objc func playRecord(filePath: String, result: @escaping FlutterResult) {
  161. status = .playRecord
  162. initAudioSession()
  163. do {
  164. let url = URL(fileURLWithPath: filePath)
  165. audioPlayer = try AVAudioPlayer(contentsOf: url)
  166. audioPlayer?.prepareToPlay()
  167. audioPlayer?.volume = 1
  168. audioPlayer?.delegate = self
  169. DispatchQueue.global().async {
  170. self.audioPlayer?.play()
  171. }
  172. } catch {
  173. stopPlayRecord()
  174. }
  175. }
  176. @objc func endPlay() {
  177. switch status {
  178. case .stop:
  179. break
  180. case .record:
  181. stopRecord()
  182. case .playRecord:
  183. stopPlayRecord()
  184. }
  185. }
  186. // 停止录音
  187. @objc func stopRecord() {
  188. status = .stop
  189. audioRecorder?.stop()
  190. audioRecorder = nil
  191. isRecording = false
  192. }
  193. // 停止播放录音
  194. func stopPlayRecord() {
  195. status = .stop
  196. if audioPlayer?.isPlaying == true {
  197. audioPlayer?.stop()
  198. }
  199. audioPlayer = nil
  200. }
  201. }
  202. extension SwiftDubbingLibPlugin: AVAudioPlayerDelegate {
  203. public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
  204. stopPlayRecord()
  205. self.result!(flag)
  206. }
  207. public func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
  208. stopPlayRecord()
  209. // self.result!(false)
  210. }
  211. }
  212. extension String{
  213. /// check string cellection is whiteSpace
  214. var isBlank : Bool{
  215. return allSatisfy({$0.isWhitespace})
  216. }
  217. }
  218. extension Optional where Wrapped == String{
  219. var isBlank : Bool{
  220. return self?.isBlank ?? true
  221. }
  222. }