|
@@ -1,14 +1,222 @@
|
|
|
import Flutter
|
|
import Flutter
|
|
|
import UIKit
|
|
import UIKit
|
|
|
|
|
+import AVFoundation
|
|
|
|
|
+
|
|
|
|
|
+private let timerInterval: Double = 0.03
|
|
|
|
|
+private let preTime: Double = 0.3
|
|
|
|
|
+private let preLag: Double = 0.15
|
|
|
|
|
+
|
|
|
|
|
+enum DubbingStatus {
|
|
|
|
|
+ case stop, record, playRecord
|
|
|
|
|
+}
|
|
|
|
|
|
|
|
public class SwiftDubbingLibPlugin: NSObject, FlutterPlugin {
|
|
public class SwiftDubbingLibPlugin: NSObject, FlutterPlugin {
|
|
|
- public static func register(with registrar: FlutterPluginRegistrar) {
|
|
|
|
|
- let channel = FlutterMethodChannel(name: "dubbing_lib", binaryMessenger: registrar.messenger())
|
|
|
|
|
- let instance = SwiftDubbingLibPlugin()
|
|
|
|
|
- registrar.addMethodCallDelegate(instance, channel: channel)
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
- public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
|
|
|
|
|
- result("iOS " + UIDevice.current.systemVersion)
|
|
|
|
|
- }
|
|
|
|
|
|
|
+
|
|
|
|
|
+ var registrar: FlutterPluginRegistrar!
|
|
|
|
|
+ var channel: FlutterMethodChannel!
|
|
|
|
|
+
|
|
|
|
|
+ var result: FlutterResult? = nil
|
|
|
|
|
+
|
|
|
|
|
+ /// 当前界面配音状态
|
|
|
|
|
+ var status: DubbingStatus = .stop
|
|
|
|
|
+ var isRecording = false
|
|
|
|
|
+
|
|
|
|
|
+ var audioRecorder: AVAudioRecorder?
|
|
|
|
|
+ var audioPlayer: AVAudioPlayer?
|
|
|
|
|
+
|
|
|
|
|
+ var initFlag: Bool = false
|
|
|
|
|
+
|
|
|
|
|
+ public static func register(with registrar: FlutterPluginRegistrar) {
|
|
|
|
|
+ let channel = FlutterMethodChannel(name: "dubbing_lib", binaryMessenger: registrar.messenger())
|
|
|
|
|
+ let instance = SwiftDubbingLibPlugin()
|
|
|
|
|
+ registrar.addMethodCallDelegate(instance, channel: channel)
|
|
|
|
|
+ instance.channel = channel
|
|
|
|
|
+ instance.registrar = registrar
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
|
|
|
|
|
+ let args = call.arguments as? [String: Any];
|
|
|
|
|
+ self.result = result;
|
|
|
|
|
+ switch call.method {
|
|
|
|
|
+ case "getPlatformVersion":
|
|
|
|
|
+ result("iOS " + UIDevice.current.systemVersion)
|
|
|
|
|
+ break
|
|
|
|
|
+ case "startRecord":
|
|
|
|
|
+ let duration = args!["duration"] as! Int;
|
|
|
|
|
+ let fileName = args!["fileName"] as! String;
|
|
|
|
|
+ let index = args!["index"] as! Int;
|
|
|
|
|
+ let pathAudio = args!["pathAudio"] as! String;
|
|
|
|
|
+ startRecord(pathAudio: pathAudio, index: index, duration: duration, fileName: fileName, result: result)
|
|
|
|
|
+ break
|
|
|
|
|
+ case "playRecordAudio":
|
|
|
|
|
+ let filePath = args!["fileName"] as! String;
|
|
|
|
|
+ playRecord(filePath: filePath, result: result)
|
|
|
|
|
+ break
|
|
|
|
|
+ case "pauseRecordAudio":
|
|
|
|
|
+ audioPlayer?.pause()
|
|
|
|
|
+ result(true)
|
|
|
|
|
+ break
|
|
|
|
|
+ case "startMixinAudio":
|
|
|
|
|
+ let videoId = args!["videoId"] as! String;
|
|
|
|
|
+ let videoPath = args!["videoPath"] as! String;
|
|
|
|
|
+ let bgmPath = args!["bgmPath"] as! String;
|
|
|
|
|
+ let audioPathList = args!["audioPathList"] as! [String];
|
|
|
|
|
+ let startTimeList = args!["startTimeList"] as! [Double];
|
|
|
|
|
+ let pathVideoMixinDir = args!["pathVideoMixin"] as! String;
|
|
|
|
|
+ let outPath = pathVideoMixinDir + "\(videoId)_mix.mp4";
|
|
|
|
|
+ startMixinAudio(videoPath: videoPath, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: outPath, result: result)
|
|
|
|
|
+ break
|
|
|
|
|
+ case "getIsMediaPlayPause":
|
|
|
|
|
+ result(audioPlayer != nil && audioPlayer!.isPlaying)
|
|
|
|
|
+ break
|
|
|
|
|
+ case "cleanAudioData":
|
|
|
|
|
+ break
|
|
|
|
|
+ case "findIsExistCacheVideo":
|
|
|
|
|
+ result("")
|
|
|
|
|
+ break
|
|
|
|
|
+ case "setExtraFullScreen":
|
|
|
|
|
+ result("")
|
|
|
|
|
+ break
|
|
|
|
|
+ default:
|
|
|
|
|
+ result(FlutterMethodNotImplemented)
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ func initAudioSession(){
|
|
|
|
|
+ do {
|
|
|
|
|
+ if (!initFlag) {
|
|
|
|
|
+ try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
|
|
|
|
|
+ try AVAudioSession.sharedInstance().setActive(true)
|
|
|
|
|
+ initFlag = true
|
|
|
|
|
+ }
|
|
|
|
|
+ } catch {
|
|
|
|
|
+ initFlag = false
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ func resultError(code: String, message: String) -> Void {
|
|
|
|
|
+ self.result!(FlutterError(code: code, message: message, details: nil))
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ /// 合成
|
|
|
|
|
+ func startMixinAudio(videoPath: String, bgmPath: String, audioPathList: [String], startTimeList: [Double], outPath: String, result: @escaping FlutterResult) {
|
|
|
|
|
+ let videoUrl = URL(fileURLWithPath: videoPath)
|
|
|
|
|
+ let musicUrl = URL(fileURLWithPath: bgmPath)
|
|
|
|
|
+ let composer = DubbingComposer(timeline: startTimeList, videoUrl: videoUrl, musicUrl: musicUrl, recordsUrl: audioPathList)
|
|
|
|
|
+ composer.preTime = preLag
|
|
|
|
|
+ let outputUrl = URL(fileURLWithPath: outPath)
|
|
|
|
|
+ DispatchQueue.global().async {
|
|
|
|
|
+ composer.compose(outputUrl, onSuccess: {
|
|
|
|
|
+ self.result!(outPath)
|
|
|
|
|
+ }) { (message) in
|
|
|
|
|
+ print("合成失败", message)
|
|
|
|
|
+ self.resultError(code: "1005", message: "mix video and audio failed")
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+// MARK: - 录音控制
|
|
|
|
|
+extension SwiftDubbingLibPlugin {
|
|
|
|
|
+
|
|
|
|
|
+ @objc func startRecord(pathAudio: String, index: Int, duration: Int, fileName: String, result: @escaping FlutterResult) {
|
|
|
|
|
+ initAudioSession()
|
|
|
|
|
+ endPlay()
|
|
|
|
|
+ status = .record
|
|
|
|
|
+ let filePath = pathAudio + fileName + ".aac";
|
|
|
|
|
+ do {
|
|
|
|
|
+ let settings: [String:Any] = [
|
|
|
|
|
+ AVNumberOfChannelsKey : 1, //设置通道
|
|
|
|
|
+ AVFormatIDKey : kAudioFormatMPEG4AAC, //设置录音格式
|
|
|
|
|
+ //AVSampleRateKey : 16000, //设置录音采样率
|
|
|
|
|
+ //AVLinearPCMBitDepthKey : 16, //每个采样点位数,分为8、16、24、32
|
|
|
|
|
+ //AVLinearPCMIsFloatKey : true, //是否使用浮点数采样
|
|
|
|
|
+ ]
|
|
|
|
|
+ let url = URL(fileURLWithPath: filePath)
|
|
|
|
|
+ audioRecorder = try AVAudioRecorder(url: url, settings: settings)
|
|
|
|
|
+ audioRecorder?.prepareToRecord()
|
|
|
|
|
+ audioRecorder?.record()
|
|
|
|
|
+ isRecording = true
|
|
|
|
|
+ } catch {
|
|
|
|
|
+ stopRecord()
|
|
|
|
|
+ resultError(code: "1002", message: "start record failed")
|
|
|
|
|
+ }
|
|
|
|
|
+ if(isRecording){
|
|
|
|
|
+ var piecePosition = 0
|
|
|
|
|
+ let queue:DispatchQueue = DispatchQueue.global(qos: DispatchQoS.QoSClass.default)
|
|
|
|
|
+ let _timer:DispatchSource = DispatchSource.makeTimerSource(flags: [], queue: queue) as! DispatchSource
|
|
|
|
|
+ _timer.schedule(deadline: DispatchTime.now(), repeating: .milliseconds(5))
|
|
|
|
|
+ _timer.setEventHandler(handler: {() -> Void in
|
|
|
|
|
+ self.isRecording = true
|
|
|
|
|
+ piecePosition += 1
|
|
|
|
|
+ self.channel.invokeMethod("recordProgress", arguments: ["progress": piecePosition * 5])
|
|
|
|
|
+ if(piecePosition * 5 >= duration) {
|
|
|
|
|
+ _timer.cancel()
|
|
|
|
|
+ self.result!(filePath)
|
|
|
|
|
+ self.stopRecord()
|
|
|
|
|
+ }
|
|
|
|
|
+ })
|
|
|
|
|
+ _timer.resume()
|
|
|
|
|
+ } else {
|
|
|
|
|
+ resultError(code: "1002", message: "start record failed")
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ @objc func playRecord(filePath: String, result: @escaping FlutterResult) {
|
|
|
|
|
+ status = .playRecord
|
|
|
|
|
+ initAudioSession()
|
|
|
|
|
+ do {
|
|
|
|
|
+ let url = URL(fileURLWithPath: filePath)
|
|
|
|
|
+ audioPlayer = try AVAudioPlayer(contentsOf: url)
|
|
|
|
|
+ audioPlayer?.prepareToPlay()
|
|
|
|
|
+ audioPlayer?.volume = 1
|
|
|
|
|
+ audioPlayer?.delegate = self
|
|
|
|
|
+ DispatchQueue.global().async {
|
|
|
|
|
+ self.audioPlayer?.play()
|
|
|
|
|
+ }
|
|
|
|
|
+ } catch {
|
|
|
|
|
+ stopPlayRecord()
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ @objc func endPlay() {
|
|
|
|
|
+ switch status {
|
|
|
|
|
+ case .stop:
|
|
|
|
|
+ break
|
|
|
|
|
+ case .record:
|
|
|
|
|
+ stopRecord()
|
|
|
|
|
+ case .playRecord:
|
|
|
|
|
+ stopPlayRecord()
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // 停止录音
|
|
|
|
|
+ @objc func stopRecord() {
|
|
|
|
|
+ status = .stop
|
|
|
|
|
+ audioRecorder?.stop()
|
|
|
|
|
+ audioRecorder = nil
|
|
|
|
|
+ isRecording = false
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ // 停止播放录音
|
|
|
|
|
+ func stopPlayRecord() {
|
|
|
|
|
+ status = .stop
|
|
|
|
|
+ if audioPlayer?.isPlaying == true {
|
|
|
|
|
+ audioPlayer?.stop()
|
|
|
|
|
+ }
|
|
|
|
|
+ audioPlayer = nil
|
|
|
|
|
+ }
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+extension SwiftDubbingLibPlugin: AVAudioPlayerDelegate {
|
|
|
|
|
+ public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
|
|
|
|
|
+ stopPlayRecord()
|
|
|
|
|
+ self.result!(flag)
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ public func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
|
|
|
|
|
+ stopPlayRecord()
|
|
|
|
|
+ // self.result!(false)
|
|
|
|
|
+ }
|
|
|
}
|
|
}
|