Browse Source

ios语音功能

gaoyf 6 years ago
parent
commit
4add610659

+ 105 - 0
ios/Classes/DubbingComposer.swift

@@ -0,0 +1,105 @@
+//
+//  DubbingComposer.swift
+//  dubit
+//
+//  Created by zack on 16/12/20.
+//  Copyright © 2016年 Chengdu Aitu Education Technology Ltd. All rights reserved.
+//
+
+import Foundation
+import AVFoundation
+
+class DubbingComposer {
+    var timeline: [Double]
+    var videoUrl: URL
+    var musicUrl: URL
+    var recordsUrl: [String]
+    var preTime: Double = 0
+    
+    init(timeline: [Double], videoUrl: URL, musicUrl: URL, recordsUrl: [String]){
+        self.timeline = timeline
+        self.videoUrl = videoUrl
+        self.musicUrl = musicUrl
+        self.recordsUrl = recordsUrl
+    }
+    
+    func compose(_ output: URL, onSuccess successBlock: (()->())?, onFail failBlock:((_ message: String)->())? ) {
+        
+        //初始化合成器
+        let composition = AVMutableComposition()
+        
+        //为合成器添加视频轨道
+        let videoTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
+        let videoAsset = AVURLAsset(url: videoUrl, options: nil)
+        let videoTimeRange = CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration)
+        do {
+            try videoTrack?.insertTimeRange(videoTimeRange, of: videoAsset.tracks(withMediaType: AVMediaType.video).first!, at: CMTime.zero)
+        }
+        catch {
+            DispatchQueue.main.async(execute: { 
+                failBlock?("Fail on load video")
+            })
+            return
+        }
+        
+        //添加音乐轨道
+        let musicTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
+        let musicAsset = AVURLAsset(url: musicUrl, options: nil)
+        let musicTimeRange = videoTimeRange
+        do {
+            try musicTrack?.insertTimeRange(musicTimeRange, of: musicAsset.tracks(withMediaType: AVMediaType.audio).first!, at: CMTime.zero)
+            print("创建音乐音轨成功")
+        }
+        catch {
+            print("创建音乐音轨失败")
+        }
+        
+        //添加两条配音轨道
+        let recordTrackA = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
+        let recordTrackB = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
+        
+        //将配音交替放入两条配音轨道
+        for (i, url) in recordsUrl.enumerated() {
+            let recordUrl = URL(fileURLWithPath: url)
+            let recordAsset = AVURLAsset(url: recordUrl, options: nil)
+            let recordRange = CMTimeRangeMake(start: CMTime.zero, duration: recordAsset.duration)
+            let currentSec = timeline[i]
+            let beginSec = currentSec > preTime ? currentSec - preTime : currentSec
+            let beginTime = CMTimeMakeWithSeconds(beginSec, preferredTimescale: 100)
+            let recordAssetTrack = recordAsset.tracks(withMediaType: AVMediaType.audio).first!
+            let recordTrack = i % 2 == 0 ? recordTrackA : recordTrackB
+            do {
+                try recordTrack?.insertTimeRange(recordRange, of: recordAssetTrack, at: beginTime)
+            }
+            catch{
+                print("Composer error on record: \(i)")
+            }
+        }
+        
+        let manager = FileManager.default
+        do {
+            try manager.removeItem(at: output)
+            print("删除旧输出成功")
+        }
+        catch {
+            print("删除旧输出失败")
+        }
+        
+        //输出到文件
+        if let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality) {
+            assetExport.outputFileType = AVFileType.mp4
+            assetExport.outputURL = output
+            assetExport.shouldOptimizeForNetworkUse = true
+            assetExport.exportAsynchronously(completionHandler: { 
+                successBlock?()
+                return
+            })
+
+        }
+        else {
+            DispatchQueue.main.async(execute: { 
+                failBlock?("Something wrong on composition")
+            })
+        }
+    }
+}

+ 2 - 0
ios/Classes/DubbingLibPlugin.m

@@ -1,8 +1,10 @@
 #import "DubbingLibPlugin.h"
 #import <dubbing_lib/dubbing_lib-Swift.h>
+//#import <DubbingComposer.swift>
 
 @implementation DubbingLibPlugin
 + (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
   [SwiftDubbingLibPlugin registerWithRegistrar:registrar];
 }
 @end
+

+ 217 - 9
ios/Classes/SwiftDubbingLibPlugin.swift

@@ -1,14 +1,222 @@
 import Flutter
 import UIKit
+import AVFoundation
+
+private let timerInterval: Double = 0.03
+private let preTime: Double = 0.3
+private let preLag: Double = 0.15
+
+enum DubbingStatus {
+    case stop, record, playRecord
+}
 
 public class SwiftDubbingLibPlugin: NSObject, FlutterPlugin {
-  public static func register(with registrar: FlutterPluginRegistrar) {
-    let channel = FlutterMethodChannel(name: "dubbing_lib", binaryMessenger: registrar.messenger())
-    let instance = SwiftDubbingLibPlugin()
-    registrar.addMethodCallDelegate(instance, channel: channel)
-  }
-
-  public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
-    result("iOS " + UIDevice.current.systemVersion)
-  }
+    
+    var registrar: FlutterPluginRegistrar!
+    var channel: FlutterMethodChannel!
+    
+    var result: FlutterResult? = nil
+    
+    /// 当前界面配音状态
+    var status: DubbingStatus = .stop
+    var isRecording = false
+    
+    var audioRecorder: AVAudioRecorder?
+    var audioPlayer: AVAudioPlayer?
+    
+    var initFlag: Bool = false
+    
+    public static func register(with registrar: FlutterPluginRegistrar) {
+        let channel = FlutterMethodChannel(name: "dubbing_lib", binaryMessenger: registrar.messenger())
+        let instance = SwiftDubbingLibPlugin()
+        registrar.addMethodCallDelegate(instance, channel: channel)
+        instance.channel = channel
+        instance.registrar = registrar
+    }
+    
+    public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+        let args = call.arguments as? [String: Any];
+        self.result = result;
+        switch call.method {
+        case "getPlatformVersion":
+            result("iOS " + UIDevice.current.systemVersion)
+            break
+        case "startRecord":
+            let duration = args!["duration"] as! Int;
+            let fileName = args!["fileName"] as! String;
+            let index = args!["index"] as! Int;
+            let pathAudio = args!["pathAudio"] as! String;
+            startRecord(pathAudio: pathAudio, index: index, duration: duration, fileName: fileName, result: result)
+            break
+        case "playRecordAudio":
+            let filePath = args!["fileName"] as! String;
+            playRecord(filePath: filePath, result: result)
+            break
+        case "pauseRecordAudio":
+            audioPlayer?.pause()
+            result(true)
+            break
+        case "startMixinAudio":
+            let videoId = args!["videoId"] as! String;
+            let videoPath = args!["videoPath"] as! String;
+            let bgmPath = args!["bgmPath"] as! String;
+            let audioPathList = args!["audioPathList"] as! [String];
+            let startTimeList = args!["startTimeList"] as! [Double];
+            let pathVideoMixinDir = args!["pathVideoMixin"] as! String;
+            let outPath = pathVideoMixinDir + "\(videoId)_mix.mp4";
+            startMixinAudio(videoPath: videoPath, bgmPath: bgmPath, audioPathList: audioPathList, startTimeList: startTimeList, outPath: outPath, result: result)
+            break
+        case "getIsMediaPlayPause":
+            result(audioPlayer != nil && audioPlayer!.isPlaying)
+            break
+        case "cleanAudioData":
+            break
+        case "findIsExistCacheVideo":
+            result("")
+            break
+        case "setExtraFullScreen":
+            result("")
+            break
+        default:
+            result(FlutterMethodNotImplemented)
+        }
+    }
+    
+    func initAudioSession(){
+        do {
+            if (!initFlag) {
+                try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
+                try AVAudioSession.sharedInstance().setActive(true)
+                initFlag = true
+            }
+        } catch {
+            initFlag = false
+        }
+    }
+    
+    func resultError(code: String, message: String) -> Void {
+        self.result!(FlutterError(code: code, message: message, details: nil))
+    }
+    
+    /// 合成
+    func startMixinAudio(videoPath: String, bgmPath: String, audioPathList: [String], startTimeList: [Double], outPath: String, result: @escaping FlutterResult) {
+        let videoUrl = URL(fileURLWithPath: videoPath)
+        let musicUrl = URL(fileURLWithPath: bgmPath)
+        let composer = DubbingComposer(timeline: startTimeList, videoUrl: videoUrl, musicUrl: musicUrl, recordsUrl: audioPathList)
+        composer.preTime = preLag
+        let outputUrl = URL(fileURLWithPath: outPath)
+        DispatchQueue.global().async {
+            composer.compose(outputUrl, onSuccess: {
+                self.result!(outPath)
+            }) { (message) in
+                print("合成失败", message)
+                self.resultError(code: "1005", message: "mix video and audio failed")
+            }
+        }
+    }
+}
+
+
+// MARK: - 录音控制
+extension SwiftDubbingLibPlugin {
+    
+    @objc func startRecord(pathAudio: String, index: Int, duration: Int, fileName: String, result: @escaping FlutterResult) {
+        initAudioSession()
+        endPlay()
+        status = .record
+        let filePath = pathAudio + fileName + ".aac";
+        do {
+            let settings: [String:Any] = [
+                AVNumberOfChannelsKey : 1, //设置通道
+                AVFormatIDKey : kAudioFormatMPEG4AAC, //设置录音格式
+                //AVSampleRateKey : 16000, //设置录音采样率
+                //AVLinearPCMBitDepthKey : 16, //每个采样点位数,分为8、16、24、32
+                //AVLinearPCMIsFloatKey : true, //是否使用浮点数采样
+            ]
+            let url = URL(fileURLWithPath: filePath)
+            audioRecorder = try AVAudioRecorder(url: url, settings: settings)
+            audioRecorder?.prepareToRecord()
+            audioRecorder?.record()
+            isRecording = true
+        } catch {
+            stopRecord()
+            resultError(code: "1002", message: "start record failed")
+        }
+        if(isRecording){
+            var piecePosition = 0
+            let queue:DispatchQueue = DispatchQueue.global(qos: DispatchQoS.QoSClass.default)
+            let _timer:DispatchSource = DispatchSource.makeTimerSource(flags: [], queue: queue) as! DispatchSource
+            _timer.schedule(deadline: DispatchTime.now(), repeating: .milliseconds(5))
+            _timer.setEventHandler(handler: {() -> Void in
+                self.isRecording = true
+                piecePosition += 1
+                self.channel.invokeMethod("recordProgress", arguments: ["progress": piecePosition * 5])
+                if(piecePosition * 5 >= duration) {
+                    _timer.cancel()
+                    self.result!(filePath)
+                    self.stopRecord()
+                }
+            })
+            _timer.resume()
+        } else {
+            resultError(code: "1002", message: "start record failed")
+        }
+    }
+    
+    @objc func playRecord(filePath: String, result: @escaping FlutterResult) {
+        status = .playRecord
+        initAudioSession()
+        do {
+            let url = URL(fileURLWithPath: filePath)
+            audioPlayer = try AVAudioPlayer(contentsOf: url)
+            audioPlayer?.prepareToPlay()
+            audioPlayer?.volume = 1
+            audioPlayer?.delegate = self
+            DispatchQueue.global().async {
+                self.audioPlayer?.play()
+            }
+        } catch {
+            stopPlayRecord()
+        }
+    }
+    
+    @objc func endPlay() {
+        switch status {
+        case .stop:
+            break
+        case .record:
+            stopRecord()
+        case .playRecord:
+            stopPlayRecord()
+        }
+    }
+    
+    // 停止录音
+    @objc func stopRecord() {
+        status = .stop
+        audioRecorder?.stop()
+        audioRecorder = nil
+        isRecording = false
+    }
+    
+    // 停止播放录音
+    func stopPlayRecord() {
+        status = .stop
+        if audioPlayer?.isPlaying == true {
+            audioPlayer?.stop()
+        }
+        audioPlayer = nil
+    }
+}
+
+extension SwiftDubbingLibPlugin: AVAudioPlayerDelegate {
+    public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
+        stopPlayRecord()
+        self.result!(flag)
+    }
+    
+    public func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
+        stopPlayRecord()
+        //        self.result!(false)
+    }
 }

+ 6 - 1
ios/dubbing_lib.podspec

@@ -16,8 +16,13 @@ A new Flutter plugin.
   s.source_files = 'Classes/**/*'
   s.public_header_files = 'Classes/**/*.h'
   s.dependency 'Flutter'
-  s.platform = :ios, '8.0'
 
+  s.frameworks = 'AVFoundation', 'SystemConfiguration', 'Foundation', 'CoreTelephony', 'AudioToolbox', 'UIKit', 'CoreLocation', 'QuartzCore', 'CoreGraphics'
+  s.libraries = 'z', 'c++'
+  
+  s.platform = :ios, '8.0'
+  s.static_framework = true
+	
   # Flutter.framework does not contain a i386 slice. Only x86_64 simulators are supported.
   s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
 end