123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250 |
- import Flutter
- import UIKit
- import AVFoundation
- private let timerInterval: Double = 0.03
- private let preTime: Double = 0.3
- private let preLag: Double = 0.15
- enum DubbingStatus {
- case stop, record, playRecord
- }
- public class SwiftDubbingLibPlugin: NSObject, FlutterPlugin {
-
- var registrar: FlutterPluginRegistrar!
- var channel: FlutterMethodChannel!
-
- var result: FlutterResult? = nil
-
- /// 当前界面配音状态
- var status: DubbingStatus = .stop
- var isRecording = false
-
- var audioRecorder: AVAudioRecorder?
- var audioPlayer: AVAudioPlayer?
-
- var initFlag: Bool = false
-
- public static func register(with registrar: FlutterPluginRegistrar) {
- let channel = FlutterMethodChannel(name: "dubbing_lib", binaryMessenger: registrar.messenger())
- let instance = SwiftDubbingLibPlugin()
- registrar.addMethodCallDelegate(instance, channel: channel)
- instance.channel = channel
- instance.registrar = registrar
- }
-
- public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
- let args = call.arguments as? [String: Any];
- self.result = result;
- switch call.method {
- case "getPlatformVersion":
- result("iOS " + UIDevice.current.systemVersion)
- break
- case "startRecord":
- let duration = args!["duration"] as! Int;
- let fileName = args!["fileName"] as! String;
- let index = args!["index"] as! Int;
- let pathAudio = args!["pathAudio"] as! String;
- startRecord(pathAudio: pathAudio, index: index, duration: duration, fileName: fileName, result: result)
- break
- case "playRecordAudio":
- let filePath = args!["fileName"] as! String;
- playRecord(filePath: filePath, result: result)
- break
- case "pauseRecordAudio":
- audioPlayer?.pause()
- result(true)
- break
- case "startMixinAudio":
- let videoId = args!["videoId"] as! String;
- let videoPath = args!["videoPath"] as! String;
- let bgmPath = args!["bgmPath"] as! String;
- let audioPathList = args!["audioDecodePaths"] as! [String];
- let startTimeList = args!["startTimeList"] as! [Double];
- let pathVideoMixinDir = args!["pathVideoMixin"] as! String;
- let outPath = pathVideoMixinDir + args!["mixinName"] as! String;
- startMixinAudio(videoPath: videoPath, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: outPath, result: result)
- break
- case "startMixinPaintedAudio":
- let bgmPath = args!["bgmPath"] as! String;
- let audioPathList = args!["audioPaths"] as! [String];
- let startTimeList = args!["startTimeList"] as! [Double];
- let pathVideoMixinDir = args!["encodePath"] as! String;
- startMixinAudio(videoPath: nil, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: pathVideoMixinDir, result: result)
- break;
- case "getIsMediaPlayPause":
- result(audioPlayer != nil && audioPlayer!.isPlaying)
- break
- case "cleanAudioData":
- break
- case "findIsExistCacheVideo":
- result("")
- break
- case "setExtraFullScreen":
- result("")
- break
- default:
- result(FlutterMethodNotImplemented)
- }
- }
-
- func initAudioSession(){
- do {
- // if (!initFlag) {
- if (true) {
- try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
- try AVAudioSession.sharedInstance().setActive(true)
- initFlag = true
- }
- } catch {
- initFlag = false
- }
- }
-
- func resultError(code: String, message: String) -> Void {
- self.result!(FlutterError(code: code, message: message, details: nil))
- }
-
- /// 合成
- func startMixinAudio(videoPath: String?, bgmPath: String, audioPathList: [String], startTimeList: [Double], outPath: String, result: @escaping FlutterResult) {
- var videoUrl:URL?=nil
- if(!(videoPath?.isBlank ?? true)){
- videoUrl = URL(fileURLWithPath: videoPath!);
- }
- let musicUrl = URL(fileURLWithPath: bgmPath)
- let composer = DubbingComposer(timeline: startTimeList, videoUrl: videoUrl, musicUrl: musicUrl, recordsUrl: audioPathList)
-
- composer.preTime = preLag
- let outputUrl = URL(fileURLWithPath: outPath)
- DispatchQueue.global().async {
- composer.compose(outputUrl, onSuccess: {
- self.result!(outPath)
- }) { (message) in
- print("合成失败", message)
- self.resultError(code: "1005", message: "mix video and audio failed")
- }
- }
- }
- }
- // MARK: - 录音控制
- extension SwiftDubbingLibPlugin {
-
- @objc func startRecord(pathAudio: String, index: Int, duration: Int, fileName: String, result: @escaping FlutterResult) {
- initAudioSession()
- endPlay()
- status = .record
- let filePath = pathAudio + fileName + ".aac";
- do {
- let settings: [String:Any] = [
- AVNumberOfChannelsKey : 1, //设置通道
- AVFormatIDKey : kAudioFormatMPEG4AAC, //设置录音格式
- //AVSampleRateKey : 16000, //设置录音采样率
- //AVLinearPCMBitDepthKey : 16, //每个采样点位数,分为8、16、24、32
- //AVLinearPCMIsFloatKey : true, //是否使用浮点数采样
- ]
- let url = URL(fileURLWithPath: filePath)
-
- audioRecorder = try AVAudioRecorder(url: url, settings: settings)
- audioRecorder?.prepareToRecord()
- audioRecorder?.record()
- isRecording = true
- } catch {
- stopRecord()
- resultError(code: "1002", message: "start record failed")
- }
- if(isRecording){
- var piecePosition = 0
- let queue:DispatchQueue = DispatchQueue.global(qos: DispatchQoS.QoSClass.default)
- let _timer:DispatchSource = DispatchSource.makeTimerSource(flags: [], queue: queue) as! DispatchSource
- _timer.schedule(deadline: DispatchTime.now(), repeating: .milliseconds(5))
- _timer.setEventHandler(handler: {() -> Void in
- self.isRecording = true
- piecePosition += 1
- self.channel.invokeMethod("recordProgress", arguments: ["progress": piecePosition * 5])
- if(piecePosition * 5 >= duration) {
- _timer.cancel()
- self.result!(filePath)
- self.stopRecord()
- }
- })
- _timer.resume()
- } else {
- resultError(code: "1002", message: "start record failed")
- }
- }
-
- @objc func playRecord(filePath: String, result: @escaping FlutterResult) {
- status = .playRecord
- initAudioSession()
- do {
- let url = URL(fileURLWithPath: filePath)
- audioPlayer = try AVAudioPlayer(contentsOf: url)
- audioPlayer?.prepareToPlay()
- audioPlayer?.volume = 1
- audioPlayer?.delegate = self
- DispatchQueue.global().async {
- self.audioPlayer?.play()
- }
- } catch {
- stopPlayRecord()
- }
- }
-
- @objc func endPlay() {
- switch status {
- case .stop:
- break
- case .record:
- stopRecord()
- case .playRecord:
- stopPlayRecord()
- }
- }
-
- // 停止录音
- @objc func stopRecord() {
- status = .stop
- audioRecorder?.stop()
- audioRecorder = nil
- isRecording = false
- }
-
- // 停止播放录音
- func stopPlayRecord() {
- status = .stop
- if audioPlayer?.isPlaying == true {
- audioPlayer?.stop()
- }
- audioPlayer = nil
- }
- }
- extension SwiftDubbingLibPlugin: AVAudioPlayerDelegate {
- public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
- stopPlayRecord()
- self.result!(flag)
- }
-
- public func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
- stopPlayRecord()
- // self.result!(false)
- }
- }
- extension String{
-
- /// check string cellection is whiteSpace
- var isBlank : Bool{
- return allSatisfy({$0.isWhitespace})
- }
- }
- extension Optional where Wrapped == String{
- var isBlank : Bool{
- return self?.isBlank ?? true
- }
- }
|