SwiftDubbingLibPlugin.swift 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244
  1. import Flutter
  2. import UIKit
  3. import AVFoundation
  4. private let timerInterval: Double = 0.03
  5. private let preTime: Double = 0.3
  6. private let preLag: Double = 0.15
  7. enum DubbingStatus {
  8. case stop, record, playRecord
  9. }
  10. public class SwiftDubbingLibPlugin: NSObject, FlutterPlugin {
  11. var registrar: FlutterPluginRegistrar!
  12. var channel: FlutterMethodChannel!
  13. var result: FlutterResult? = nil
  14. /// 当前界面配音状态
  15. var status: DubbingStatus = .stop
  16. var isRecording = false
  17. var audioRecorder: AVAudioRecorder?
  18. var audioPlayer: AVAudioPlayer?
  19. var initFlag: Bool = false
  20. public static func register(with registrar: FlutterPluginRegistrar) {
  21. let channel = FlutterMethodChannel(name: "dubbing_lib", binaryMessenger: registrar.messenger())
  22. let instance = SwiftDubbingLibPlugin()
  23. registrar.addMethodCallDelegate(instance, channel: channel)
  24. instance.channel = channel
  25. instance.registrar = registrar
  26. }
  27. public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
  28. let args = call.arguments as? [String: Any];
  29. self.result = result;
  30. switch call.method {
  31. case "getPlatformVersion":
  32. result("iOS " + UIDevice.current.systemVersion)
  33. break
  34. case "startRecord":
  35. let duration = args!["duration"] as! Int;
  36. let fileName = args!["fileName"] as! String;
  37. let index = args!["index"] as! Int;
  38. let pathAudio = args!["pathAudio"] as! String;
  39. startRecord(pathAudio: pathAudio, index: index, duration: duration, fileName: fileName, result: result)
  40. break
  41. case "playRecordAudio":
  42. let filePath = args!["fileName"] as! String;
  43. playRecord(filePath: filePath, result: result)
  44. break
  45. case "pauseRecordAudio":
  46. audioPlayer?.pause()
  47. result(true)
  48. break
  49. case "startMixinAudio":
  50. let videoId = args!["videoId"] as! String;
  51. let videoPath = args!["videoPath"] as! String;
  52. let bgmPath = args!["bgmPath"] as! String;
  53. let audioPathList = args!["audioPathList"] as! [String];
  54. let startTimeList = args!["startTimeList"] as! [Double];
  55. let pathVideoMixinDir = args!["pathVideoMixin"] as! String;
  56. let outPath = pathVideoMixinDir + "\(videoId)_mix.mp4";
  57. startMixinAudio(videoPath: videoPath, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: outPath, result: result)
  58. break
  59. case "startMixinPaintedAudio":
  60. let bgmPath = args!["bgmPath"] as! String;
  61. let audioPathList = args!["audioPaths"] as! [String];
  62. let startTimeList = args!["durationList"] as! [Double];
  63. let pathVideoMixinDir = args!["encodePath"] as! String;
  64. startMixinAudio(videoPath: nil, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: pathVideoMixinDir, result: result)
  65. break;
  66. case "getIsMediaPlayPause":
  67. result(audioPlayer != nil && audioPlayer!.isPlaying)
  68. break
  69. case "cleanAudioData":
  70. break
  71. case "findIsExistCacheVideo":
  72. result("")
  73. break
  74. case "setExtraFullScreen":
  75. result("")
  76. break
  77. default:
  78. result(FlutterMethodNotImplemented)
  79. }
  80. }
  81. func initAudioSession(){
  82. do {
  83. if (!initFlag) {
  84. try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
  85. try AVAudioSession.sharedInstance().setActive(true)
  86. initFlag = true
  87. }
  88. } catch {
  89. initFlag = false
  90. }
  91. }
  92. func resultError(code: String, message: String) -> Void {
  93. self.result!(FlutterError(code: code, message: message, details: nil))
  94. }
  95. /// 合成
  96. func startMixinAudio(videoPath: String?, bgmPath: String, audioPathList: [String], startTimeList: [Double], outPath: String, result: @escaping FlutterResult) {
  97. let videoUrl:URL? = videoPath.isBlank ?URL(fileURLWithPath: videoPath!):nil
  98. let musicUrl = URL(fileURLWithPath: bgmPath)
  99. let composer = DubbingComposer(timeline: startTimeList, videoUrl: videoUrl!, musicUrl: musicUrl, recordsUrl: audioPathList)
  100. composer.preTime = preLag
  101. let outputUrl = URL(fileURLWithPath: outPath)
  102. DispatchQueue.global().async {
  103. composer.compose(outputUrl, onSuccess: {
  104. self.result!(outPath)
  105. }) { (message) in
  106. print("合成失败", message)
  107. self.resultError(code: "1005", message: "mix video and audio failed")
  108. }
  109. }
  110. }
  111. }
  112. // MARK: - 录音控制
  113. extension SwiftDubbingLibPlugin {
  114. @objc func startRecord(pathAudio: String, index: Int, duration: Int, fileName: String, result: @escaping FlutterResult) {
  115. initAudioSession()
  116. endPlay()
  117. status = .record
  118. let filePath = pathAudio + fileName + ".aac";
  119. do {
  120. let settings: [String:Any] = [
  121. AVNumberOfChannelsKey : 1, //设置通道
  122. AVFormatIDKey : kAudioFormatMPEG4AAC, //设置录音格式
  123. //AVSampleRateKey : 16000, //设置录音采样率
  124. //AVLinearPCMBitDepthKey : 16, //每个采样点位数,分为8、16、24、32
  125. //AVLinearPCMIsFloatKey : true, //是否使用浮点数采样
  126. ]
  127. let url = URL(fileURLWithPath: filePath)
  128. audioRecorder = try AVAudioRecorder(url: url, settings: settings)
  129. audioRecorder?.prepareToRecord()
  130. audioRecorder?.record()
  131. isRecording = true
  132. } catch {
  133. stopRecord()
  134. resultError(code: "1002", message: "start record failed")
  135. }
  136. if(isRecording){
  137. var piecePosition = 0
  138. let queue:DispatchQueue = DispatchQueue.global(qos: DispatchQoS.QoSClass.default)
  139. let _timer:DispatchSource = DispatchSource.makeTimerSource(flags: [], queue: queue) as! DispatchSource
  140. _timer.schedule(deadline: DispatchTime.now(), repeating: .milliseconds(5))
  141. _timer.setEventHandler(handler: {() -> Void in
  142. self.isRecording = true
  143. piecePosition += 1
  144. self.channel.invokeMethod("recordProgress", arguments: ["progress": piecePosition * 5])
  145. if(piecePosition * 5 >= duration) {
  146. _timer.cancel()
  147. self.result!(filePath)
  148. self.stopRecord()
  149. }
  150. })
  151. _timer.resume()
  152. } else {
  153. resultError(code: "1002", message: "start record failed")
  154. }
  155. }
  156. @objc func playRecord(filePath: String, result: @escaping FlutterResult) {
  157. status = .playRecord
  158. initAudioSession()
  159. do {
  160. let url = URL(fileURLWithPath: filePath)
  161. audioPlayer = try AVAudioPlayer(contentsOf: url)
  162. audioPlayer?.prepareToPlay()
  163. audioPlayer?.volume = 1
  164. audioPlayer?.delegate = self
  165. DispatchQueue.global().async {
  166. self.audioPlayer?.play()
  167. }
  168. } catch {
  169. stopPlayRecord()
  170. }
  171. }
  172. @objc func endPlay() {
  173. switch status {
  174. case .stop:
  175. break
  176. case .record:
  177. stopRecord()
  178. case .playRecord:
  179. stopPlayRecord()
  180. }
  181. }
  182. // 停止录音
  183. @objc func stopRecord() {
  184. status = .stop
  185. audioRecorder?.stop()
  186. audioRecorder = nil
  187. isRecording = false
  188. }
  189. // 停止播放录音
  190. func stopPlayRecord() {
  191. status = .stop
  192. if audioPlayer?.isPlaying == true {
  193. audioPlayer?.stop()
  194. }
  195. audioPlayer = nil
  196. }
  197. }
  198. extension SwiftDubbingLibPlugin: AVAudioPlayerDelegate {
  199. public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
  200. stopPlayRecord()
  201. self.result!(flag)
  202. }
  203. public func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
  204. stopPlayRecord()
  205. // self.result!(false)
  206. }
  207. }
  208. extension String{
  209. /// check string cellection is whiteSpace
  210. var isBlank : Bool{
  211. return allSatisfy({$0.isWhitespace})
  212. }
  213. }
  214. extension Optional where Wrapped == String{
  215. var isBlank : Bool{
  216. return self?.isBlank ?? true
  217. }
  218. }