Ver código fonte

ios评测功能

gaoyf 6 anos atrás
pai
commit
41028cc802
100 arquivos alterados com 5443 adições e 511 exclusões
  1. 21 11
      .idea/libraries/Dart_SDK.xml
  2. 2 2
      .idea/runConfigurations/example_lib_main_dart.xml
  3. 93 44
      .idea/workspace.xml
  4. 26 26
      example/ios/Flutter/AppFrameworkInfo.plist
  5. 2 1
      example/ios/Flutter/Debug.xcconfig
  6. 2 1
      example/ios/Flutter/Release.xcconfig
  7. 87 0
      example/ios/Podfile
  8. 91 22
      example/ios/Runner.xcodeproj/project.pbxproj
  9. 7 7
      example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata
  10. 3 7
      example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
  11. 3 0
      example/ios/Runner.xcworkspace/contents.xcworkspacedata
  12. 6 0
      example/ios/Runner/AppDelegate.h
  13. 13 0
      example/ios/Runner/AppDelegate.m
  14. 0 13
      example/ios/Runner/AppDelegate.swift
  15. 122 122
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json
  16. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png
  17. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png
  18. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png
  19. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png
  20. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png
  21. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png
  22. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png
  23. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png
  24. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png
  25. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png
  26. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png
  27. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png
  28. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png
  29. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png
  30. 0 0
      example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png
  31. 23 23
      example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json
  32. 0 0
      example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png
  33. 0 0
      example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png
  34. 0 0
      example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png
  35. 4 4
      example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md
  36. 37 37
      example/ios/Runner/Base.lproj/LaunchScreen.storyboard
  37. 26 26
      example/ios/Runner/Base.lproj/Main.storyboard
  38. 45 45
      example/ios/Runner/Info.plist
  39. 0 1
      example/ios/Runner/Runner-Bridging-Header.h
  40. 9 0
      example/ios/Runner/main.m
  41. 68 19
      example/pubspec.lock
  42. 36 36
      ios/.gitignore
  43. 0 0
      ios/Assets/.gitkeep
  44. 20 0
      ios/Classes/M4aToPcmHelper.h
  45. 108 0
      ios/Classes/M4aToPcmHelper.m
  46. 20 0
      ios/Classes/Mp4ToPcmHelper.h
  47. 121 0
      ios/Classes/Mp4ToPcmHelper.m
  48. 68 0
      ios/Classes/Results/ISEResult.h
  49. 17 0
      ios/Classes/Results/ISEResult.m
  50. 17 0
      ios/Classes/Results/ISEResultFinal.h
  51. 18 0
      ios/Classes/Results/ISEResultFinal.m
  52. 47 0
      ios/Classes/Results/ISEResultPhone.h
  53. 27 0
      ios/Classes/Results/ISEResultPhone.m
  54. 13 0
      ios/Classes/Results/ISEResultReadSentence.h
  55. 48 0
      ios/Classes/Results/ISEResultReadSentence.m
  56. 13 0
      ios/Classes/Results/ISEResultReadSyllable.h
  57. 35 0
      ios/Classes/Results/ISEResultReadSyllable.m
  58. 13 0
      ios/Classes/Results/ISEResultReadWord.h
  59. 48 0
      ios/Classes/Results/ISEResultReadWord.m
  60. 56 0
      ios/Classes/Results/ISEResultSentence.h
  61. 13 0
      ios/Classes/Results/ISEResultSentence.m
  62. 56 0
      ios/Classes/Results/ISEResultSyll.h
  63. 29 0
      ios/Classes/Results/ISEResultSyll.m
  64. 61 0
      ios/Classes/Results/ISEResultTools.h
  65. 254 0
      ios/Classes/Results/ISEResultTools.m
  66. 66 0
      ios/Classes/Results/ISEResultWord.h
  67. 13 0
      ios/Classes/Results/ISEResultWord.m
  68. 29 0
      ios/Classes/Results/ISEResultXmlParser.h
  69. 278 0
      ios/Classes/Results/ISEResultXmlParser.m
  70. 4 4
      ios/Classes/SpeechPlugin.h
  71. 155 8
      ios/Classes/SpeechPlugin.m
  72. 0 14
      ios/Classes/SwiftSpeechPlugin.swift
  73. 35 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyAudioSession.h
  74. 54 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyDataUploader.h
  75. 37 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyDebugLog.h
  76. 48 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyISVDelegate.h
  77. 132 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyISVRecognizer.h
  78. 38 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyMSC.h
  79. 116 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyPcmRecorder.h
  80. 134 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyRecognizerView.h
  81. 36 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyRecognizerViewDelegate.h
  82. 90 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyResourceUtil.h
  83. 88 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySetting.h
  84. 903 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechConstant.h
  85. 58 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechError.h
  86. 101 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechEvaluator.h
  87. 64 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechEvaluatorDelegate.h
  88. 151 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechEvent.h
  89. 175 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechRecognizer.h
  90. 111 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechRecognizerDelegate.h
  91. 123 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechSynthesizer.h
  92. 81 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechSynthesizerDelegate.h
  93. 184 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechUtility.h
  94. 74 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyUserWords.h
  95. 83 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyVoiceWakeuper.h
  96. 60 0
      ios/Frameworks/iflyMSC.framework/Headers/IFlyVoiceWakeuperDelegate.h
  97. BIN
      ios/Frameworks/iflyMSC.framework/iflyMSC
  98. 26 20
      ios/speech_plugin.podspec
  99. 67 18
      pubspec.lock
  100. 1 0
      speech_plugin.iml

+ 21 - 11
.idea/libraries/Dart_SDK.xml

@@ -1,17 +1,27 @@
 <component name="libraryTable">
   <library name="Dart SDK">
     <CLASSES>
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/async" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/collection" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/convert" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/core" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/developer" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/html" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/io" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/isolate" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/math" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/mirrors" />
-      <root url="file://C:/flutter/flutter/bin/cache/dart-sdk/lib/typed_data" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/async" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/cli" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/collection" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/convert" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/core" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/developer" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/ffi" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/html" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/indexed_db" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/io" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/isolate" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/js" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/js_util" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/math" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/mirrors" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/svg" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/typed_data" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/wasm" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/web_audio" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/web_gl" />
+      <root url="file://$PROJECT_DIR$/../flutter/bin/cache/dart-sdk/lib/web_sql" />
     </CLASSES>
     <JAVADOC />
     <SOURCES />

+ 2 - 2
.idea/runConfigurations/example_lib_main_dart.xml

@@ -1,6 +1,6 @@
 <component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="example/lib/main.dart" type="FlutterRunConfigurationType" factoryName="Flutter">
+  <configuration default="false" name="example/lib/main.dart" type="FlutterRunConfigurationType" factoryName="Flutter" singleton="false">
     <option name="filePath" value="$PROJECT_DIR$/example/lib/main.dart" />
-    <method />
+    <method v="2" />
   </configuration>
 </component>

+ 93 - 44
.idea/workspace.xml

@@ -1,7 +1,53 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project version="4">
   <component name="ChangeListManager">
-    <list default="true" id="91fb8cbe-3946-4eca-a4cf-05dab52b73c6" name="Default Changelist" comment="" />
+    <list default="true" id="91fb8cbe-3946-4eca-a4cf-05dab52b73c6" name="Default Changelist" comment="">
+      <change beforePath="$PROJECT_DIR$/.idea/libraries/Dart_SDK.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/libraries/Dart_SDK.xml" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/.idea/libraries/Flutter_Plugins.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/libraries/Flutter_Plugins.xml" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Flutter/AppFrameworkInfo.plist" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Flutter/AppFrameworkInfo.plist" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Flutter/Debug.xcconfig" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Flutter/Debug.xcconfig" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Flutter/Release.xcconfig" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Flutter/Release.xcconfig" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner.xcodeproj/project.pbxproj" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner.xcodeproj/project.pbxproj" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner.xcworkspace/contents.xcworkspacedata" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner.xcworkspace/contents.xcworkspacedata" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/AppDelegate.swift" beforeDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Base.lproj/LaunchScreen.storyboard" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Base.lproj/LaunchScreen.storyboard" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Base.lproj/Main.storyboard" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Base.lproj/Main.storyboard" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Info.plist" beforeDir="false" afterPath="$PROJECT_DIR$/example/ios/Runner/Info.plist" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/ios/Runner/Runner-Bridging-Header.h" beforeDir="false" />
+      <change beforePath="$PROJECT_DIR$/example/pubspec.lock" beforeDir="false" afterPath="$PROJECT_DIR$/example/pubspec.lock" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/ios/.gitignore" beforeDir="false" afterPath="$PROJECT_DIR$/ios/.gitignore" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/ios/Assets/.gitkeep" beforeDir="false" afterPath="$PROJECT_DIR$/ios/Assets/.gitkeep" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/ios/Classes/SpeechPlugin.h" beforeDir="false" afterPath="$PROJECT_DIR$/ios/Classes/SpeechPlugin.h" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/ios/Classes/SpeechPlugin.m" beforeDir="false" afterPath="$PROJECT_DIR$/ios/Classes/SpeechPlugin.m" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/ios/Classes/SwiftSpeechPlugin.swift" beforeDir="false" />
+      <change beforePath="$PROJECT_DIR$/ios/speech_plugin.podspec" beforeDir="false" afterPath="$PROJECT_DIR$/ios/speech_plugin.podspec" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/pubspec.lock" beforeDir="false" afterPath="$PROJECT_DIR$/pubspec.lock" afterDir="false" />
+      <change beforePath="$PROJECT_DIR$/speech_plugin.iml" beforeDir="false" afterPath="$PROJECT_DIR$/speech_plugin.iml" afterDir="false" />
+    </list>
     <ignored path="$PROJECT_DIR$/.dart_tool/" />
     <ignored path="$PROJECT_DIR$/.idea/" />
     <ignored path="$PROJECT_DIR$/.pub/" />
@@ -14,20 +60,23 @@
     <option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
     <option name="LAST_RESOLUTION" value="IGNORE" />
   </component>
-  <component name="ExecutionTargetManager" SELECTED_TARGET="792QAESFTC6MD" />
+  <component name="ExecutionTargetManager" SELECTED_TARGET="Nexus_5X_API_29_x86" />
   <component name="FileEditorManager">
     <leaf SIDE_TABS_SIZE_LIMIT_KEY="300">
       <file pinned="false" current-in-tab="true">
         <entry file="file://$PROJECT_DIR$/lib/speech_plugin.dart">
           <provider selected="true" editor-type-id="text-editor">
-            <state relative-caret-position="513">
-              <caret line="29" column="2" lean-forward="true" selection-start-line="29" selection-start-column="2" selection-end-line="29" selection-end-column="2" />
+            <state relative-caret-position="405">
+              <caret line="29" column="2" selection-start-line="29" selection-start-column="2" selection-end-line="29" selection-end-column="2" />
             </state>
           </provider>
         </entry>
       </file>
     </leaf>
   </component>
+  <component name="Git.Settings">
+    <option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
+  </component>
   <component name="IdeDocumentHistory">
     <option name="CHANGED_PATHS">
       <list>
@@ -36,31 +85,28 @@
     </option>
   </component>
   <component name="ProjectFrameBounds" extendedState="6">
-    <option name="width" value="1440" />
-    <option name="height" value="900" />
+    <option name="x" value="293" />
+    <option name="y" value="159" />
+    <option name="width" value="1634" />
+    <option name="height" value="812" />
   </component>
   <component name="ProjectView">
     <navigator currentView="ProjectPane" proportions="" version="1">
       <foldersAlwaysOnTop value="true" />
     </navigator>
     <panes>
-      <pane id="AndroidView" />
       <pane id="ProjectPane">
         <subPane>
           <expand>
             <path>
-              <item name="speech_plugin" type="b2602c69:ProjectViewProjectNode" />
-              <item name="speech_plugin" type="462c0819:PsiDirectoryNode" />
+              <item name="speech_lib" type="b2602c69:ProjectViewProjectNode" />
+              <item name="speech_lib" type="462c0819:PsiDirectoryNode" />
             </path>
             <path>
-              <item name="speech_plugin" type="b2602c69:ProjectViewProjectNode" />
-              <item name="speech_plugin" type="462c0819:PsiDirectoryNode" />
+              <item name="speech_lib" type="b2602c69:ProjectViewProjectNode" />
+              <item name="speech_lib" type="462c0819:PsiDirectoryNode" />
               <item name="lib" type="462c0819:PsiDirectoryNode" />
             </path>
-            <path>
-              <item name="speech_plugin" type="b2602c69:ProjectViewProjectNode" />
-              <item name="External Libraries" type="cb654da1:ExternalLibrariesNode" />
-            </path>
           </expand>
           <select />
         </subPane>
@@ -70,8 +116,10 @@
     </panes>
   </component>
   <component name="PropertiesComponent">
+    <property name="SHARE_PROJECT_CONFIGURATION_FILES" value="true" />
     <property name="dart.analysis.tool.window.force.activate" value="false" />
-    <property name="last_opened_file_path" value="$PROJECT_DIR$/android" />
+    <property name="last_opened_file_path" value="$PROJECT_DIR$" />
+    <property name="settings.editor.selected.configurable" value="flutter.settings" />
     <property name="show.migrate.to.gradle.popup" value="false" />
   </component>
   <component name="RunDashboard">
@@ -100,34 +148,35 @@
     <servers />
   </component>
   <component name="ToolWindowManager">
-    <frame x="-8" y="-8" width="1936" height="1056" extended-state="6" />
+    <frame x="0" y="23" width="1920" height="977" extended-state="6" />
     <editor active="true" />
     <layout>
-      <window_info id="Capture Tool" />
-      <window_info id="Structure" side_tool="true" />
-      <window_info id="Favorites" side_tool="true" />
-      <window_info id="Build Variants" side_tool="true" />
-      <window_info id="Image Layers" />
-      <window_info id="Designer" />
-      <window_info id="Captures" side_tool="true" />
-      <window_info id="Resources Explorer" />
-      <window_info active="true" content_ui="combo" id="Project" order="0" visible="true" weight="0.19989339" />
-      <window_info anchor="bottom" id="Run" />
-      <window_info anchor="bottom" id="Dart Analysis" weight="0.32936078" />
-      <window_info anchor="bottom" id="Logcat" />
-      <window_info anchor="bottom" id="TODO" />
-      <window_info anchor="bottom" id="Debug" />
-      <window_info anchor="bottom" id="Terminal" />
-      <window_info anchor="bottom" id="Event Log" side_tool="true" />
-      <window_info anchor="bottom" id="Flutter Performance" side_tool="true" />
-      <window_info anchor="bottom" id="Version Control" />
-      <window_info anchor="bottom" id="Android Profiler" show_stripe_button="false" />
-      <window_info anchor="right" id="Device File Explorer" side_tool="true" />
-      <window_info anchor="right" id="Capture Analysis" />
-      <window_info anchor="right" id="Theme Preview" />
-      <window_info anchor="right" id="Flutter Inspector" />
-      <window_info anchor="right" id="Flutter Outline" />
-      <window_info anchor="right" id="Palette&#9;" />
+      <window_info content_ui="combo" id="Project" order="0" visible="true" weight="0.20181043" />
+      <window_info id="Captures" order="1" side_tool="true" />
+      <window_info id="Structure" order="2" side_tool="true" />
+      <window_info id="Image Layers" order="3" />
+      <window_info id="Designer" order="4" />
+      <window_info id="Build Variants" order="5" side_tool="true" />
+      <window_info id="Resources Explorer" order="6" />
+      <window_info id="Capture Tool" order="7" />
+      <window_info id="Favorites" order="8" side_tool="true" />
+      <window_info anchor="bottom" id="Messages" weight="0.3299435" />
+      <window_info anchor="bottom" id="Dart Analysis" order="0" weight="0.32936078" />
+      <window_info active="true" anchor="bottom" id="Run" order="1" visible="true" weight="0.3299435" />
+      <window_info anchor="bottom" id="TODO" order="2" />
+      <window_info anchor="bottom" id="Android Profiler" order="3" show_stripe_button="false" />
+      <window_info anchor="bottom" id="Logcat" order="4" />
+      <window_info anchor="bottom" id="Debug" order="5" />
+      <window_info anchor="bottom" id="Terminal" order="6" />
+      <window_info anchor="bottom" id="Event Log" order="7" side_tool="true" />
+      <window_info anchor="bottom" id="Flutter Performance" order="8" side_tool="true" />
+      <window_info anchor="bottom" id="Version Control" order="9" />
+      <window_info anchor="right" id="Device File Explorer" order="0" side_tool="true" />
+      <window_info anchor="right" id="Capture Analysis" order="1" />
+      <window_info anchor="right" id="Theme Preview" order="2" />
+      <window_info anchor="right" id="Flutter Inspector" order="3" />
+      <window_info anchor="right" id="Flutter Outline" order="4" />
+      <window_info anchor="right" id="Palette&#9;" order="5" />
     </layout>
   </component>
   <component name="editorHistoryManager">
@@ -147,8 +196,8 @@
     </entry>
     <entry file="file://$PROJECT_DIR$/lib/speech_plugin.dart">
       <provider selected="true" editor-type-id="text-editor">
-        <state relative-caret-position="513">
-          <caret line="29" column="2" lean-forward="true" selection-start-line="29" selection-start-column="2" selection-end-line="29" selection-end-column="2" />
+        <state relative-caret-position="405">
+          <caret line="29" column="2" selection-start-line="29" selection-start-column="2" selection-end-line="29" selection-end-column="2" />
         </state>
       </provider>
     </entry>

+ 26 - 26
example/ios/Flutter/AppFrameworkInfo.plist

@@ -1,26 +1,26 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>CFBundleDevelopmentRegion</key>
-  <string>$(DEVELOPMENT_LANGUAGE)</string>
-  <key>CFBundleExecutable</key>
-  <string>App</string>
-  <key>CFBundleIdentifier</key>
-  <string>io.flutter.flutter.app</string>
-  <key>CFBundleInfoDictionaryVersion</key>
-  <string>6.0</string>
-  <key>CFBundleName</key>
-  <string>App</string>
-  <key>CFBundlePackageType</key>
-  <string>FMWK</string>
-  <key>CFBundleShortVersionString</key>
-  <string>1.0</string>
-  <key>CFBundleSignature</key>
-  <string>????</string>
-  <key>CFBundleVersion</key>
-  <string>1.0</string>
-  <key>MinimumOSVersion</key>
-  <string>8.0</string>
-</dict>
-</plist>
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>CFBundleDevelopmentRegion</key>
+  <string>$(DEVELOPMENT_LANGUAGE)</string>
+  <key>CFBundleExecutable</key>
+  <string>App</string>
+  <key>CFBundleIdentifier</key>
+  <string>io.flutter.flutter.app</string>
+  <key>CFBundleInfoDictionaryVersion</key>
+  <string>6.0</string>
+  <key>CFBundleName</key>
+  <string>App</string>
+  <key>CFBundlePackageType</key>
+  <string>FMWK</string>
+  <key>CFBundleShortVersionString</key>
+  <string>1.0</string>
+  <key>CFBundleSignature</key>
+  <string>????</string>
+  <key>CFBundleVersion</key>
+  <string>1.0</string>
+  <key>MinimumOSVersion</key>
+  <string>8.0</string>
+</dict>
+</plist>

+ 2 - 1
example/ios/Flutter/Debug.xcconfig

@@ -1 +1,2 @@
-#include "Generated.xcconfig"
+#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
+#include "Generated.xcconfig"

+ 2 - 1
example/ios/Flutter/Release.xcconfig

@@ -1 +1,2 @@
-#include "Generated.xcconfig"
+#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
+#include "Generated.xcconfig"

+ 87 - 0
example/ios/Podfile

@@ -0,0 +1,87 @@
+# Uncomment this line to define a global platform for your project
+# platform :ios, '9.0'
+
+# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
+ENV['COCOAPODS_DISABLE_STATS'] = 'true'
+
+project 'Runner', {
+  'Debug' => :debug,
+  'Profile' => :release,
+  'Release' => :release,
+}
+
+def parse_KV_file(file, separator='=')
+  file_abs_path = File.expand_path(file)
+  if !File.exists? file_abs_path
+    return [];
+  end
+  generated_key_values = {}
+  skip_line_start_symbols = ["#", "/"]
+  File.foreach(file_abs_path) do |line|
+    next if skip_line_start_symbols.any? { |symbol| line =~ /^\s*#{symbol}/ }
+    plugin = line.split(pattern=separator)
+    if plugin.length == 2
+      podname = plugin[0].strip()
+      path = plugin[1].strip()
+      podpath = File.expand_path("#{path}", file_abs_path)
+      generated_key_values[podname] = podpath
+    else
+      puts "Invalid plugin specification: #{line}"
+    end
+  end
+  generated_key_values
+end
+
+target 'Runner' do
+  # Flutter Pod
+
+  copied_flutter_dir = File.join(__dir__, 'Flutter')
+  copied_framework_path = File.join(copied_flutter_dir, 'Flutter.framework')
+  copied_podspec_path = File.join(copied_flutter_dir, 'Flutter.podspec')
+  unless File.exist?(copied_framework_path) && File.exist?(copied_podspec_path)
+    # Copy Flutter.framework and Flutter.podspec to Flutter/ to have something to link against if the xcode backend script has not run yet.
+    # That script will copy the correct debug/profile/release version of the framework based on the currently selected Xcode configuration.
+    # CocoaPods will not embed the framework on pod install (before any build phases can generate) if the dylib does not exist.
+
+    generated_xcode_build_settings_path = File.join(copied_flutter_dir, 'Generated.xcconfig')
+    unless File.exist?(generated_xcode_build_settings_path)
+      raise "Generated.xcconfig must exist. If you're running pod install manually, make sure flutter pub get is executed first"
+    end
+    generated_xcode_build_settings = parse_KV_file(generated_xcode_build_settings_path)
+    cached_framework_dir = generated_xcode_build_settings['FLUTTER_FRAMEWORK_DIR'];
+
+    unless File.exist?(copied_framework_path)
+      FileUtils.cp_r(File.join(cached_framework_dir, 'Flutter.framework'), copied_flutter_dir)
+    end
+    unless File.exist?(copied_podspec_path)
+      FileUtils.cp(File.join(cached_framework_dir, 'Flutter.podspec'), copied_flutter_dir)
+    end
+  end
+
+  # Keep pod path relative so it can be checked into Podfile.lock.
+  pod 'Flutter', :path => 'Flutter'
+
+  # Plugin Pods
+
+  # Prepare symlinks folder. We use symlinks to avoid having Podfile.lock
+  # referring to absolute paths on developers' machines.
+  system('rm -rf .symlinks')
+  system('mkdir -p .symlinks/plugins')
+  plugin_pods = parse_KV_file('../.flutter-plugins')
+  plugin_pods.each do |name, path|
+    symlink = File.join('.symlinks', 'plugins', name)
+    File.symlink(path, symlink)
+    pod name, :path => File.join(symlink, 'ios')
+  end
+end
+
+# Prevent Cocoapods from embedding a second Flutter framework and causing an error with the new Xcode build system.
+install! 'cocoapods', :disable_input_output_paths => true
+
+post_install do |installer|
+  installer.pods_project.targets.each do |target|
+    target.build_configurations.each do |config|
+      config.build_settings['ENABLE_BITCODE'] = 'NO'
+    end
+  end
+end

+ 91 - 22
example/ios/Runner.xcodeproj/project.pbxproj

@@ -11,10 +11,11 @@
 		3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
 		3B80C3941E831B6300D905FE /* App.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3B80C3931E831B6300D905FE /* App.framework */; };
 		3B80C3951E831B6300D905FE /* App.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 3B80C3931E831B6300D905FE /* App.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
-		74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; };
+		4E7EECE9383C572DE041AD20 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = C1E6CD67FAE6488382D7C86E /* libPods-Runner.a */; };
 		9705A1C61CF904A100538489 /* Flutter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 9740EEBA1CF902C7004384FC /* Flutter.framework */; };
 		9705A1C71CF904A300538489 /* Flutter.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 9740EEBA1CF902C7004384FC /* Flutter.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
-		9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 9740EEB21CF90195004384FC /* Debug.xcconfig */; };
+		978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; };
+		97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; };
 		97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
 		97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
 		97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
@@ -40,17 +41,22 @@
 		1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
 		3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
 		3B80C3931E831B6300D905FE /* App.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = App.framework; path = Flutter/App.framework; sourceTree = "<group>"; };
-		74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = "<group>"; };
-		74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
+		6EBBD70825B0DB46C1295F01 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = "<group>"; };
 		7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
+		7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
+		7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
+		8E8F1967063E48A728361464 /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = "<group>"; };
 		9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
 		9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
 		9740EEBA1CF902C7004384FC /* Flutter.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Flutter.framework; path = Flutter/Flutter.framework; sourceTree = "<group>"; };
 		97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
+		97C146F21CF9000F007C117D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
 		97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
 		97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
 		97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
 		97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+		C1E6CD67FAE6488382D7C86E /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; };
+		C5DADEA3CDCFFFFA299F58AB /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
 /* End PBXFileReference section */
 
 /* Begin PBXFrameworksBuildPhase section */
@@ -60,12 +66,23 @@
 			files = (
 				9705A1C61CF904A100538489 /* Flutter.framework in Frameworks */,
 				3B80C3941E831B6300D905FE /* App.framework in Frameworks */,
+				4E7EECE9383C572DE041AD20 /* libPods-Runner.a in Frameworks */,
 			);
 			runOnlyForDeploymentPostprocessing = 0;
 		};
 /* End PBXFrameworksBuildPhase section */
 
 /* Begin PBXGroup section */
+		0F3987F49D13048B9349AB59 /* Pods */ = {
+			isa = PBXGroup;
+			children = (
+				C5DADEA3CDCFFFFA299F58AB /* Pods-Runner.debug.xcconfig */,
+				6EBBD70825B0DB46C1295F01 /* Pods-Runner.release.xcconfig */,
+				8E8F1967063E48A728361464 /* Pods-Runner.profile.xcconfig */,
+			);
+			path = Pods;
+			sourceTree = "<group>";
+		};
 		9740EEB11CF90186004384FC /* Flutter */ = {
 			isa = PBXGroup;
 			children = (
@@ -85,6 +102,8 @@
 				9740EEB11CF90186004384FC /* Flutter */,
 				97C146F01CF9000F007C117D /* Runner */,
 				97C146EF1CF9000F007C117D /* Products */,
+				0F3987F49D13048B9349AB59 /* Pods */,
+				EDEBEDABD58FB1779098877E /* Frameworks */,
 			);
 			sourceTree = "<group>";
 		};
@@ -99,6 +118,8 @@
 		97C146F01CF9000F007C117D /* Runner */ = {
 			isa = PBXGroup;
 			children = (
+				7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */,
+				7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */,
 				97C146FA1CF9000F007C117D /* Main.storyboard */,
 				97C146FD1CF9000F007C117D /* Assets.xcassets */,
 				97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
@@ -106,8 +127,6 @@
 				97C146F11CF9000F007C117D /* Supporting Files */,
 				1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
 				1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
-				74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
-				74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
 			);
 			path = Runner;
 			sourceTree = "<group>";
@@ -115,10 +134,19 @@
 		97C146F11CF9000F007C117D /* Supporting Files */ = {
 			isa = PBXGroup;
 			children = (
+				97C146F21CF9000F007C117D /* main.m */,
 			);
 			name = "Supporting Files";
 			sourceTree = "<group>";
 		};
+		EDEBEDABD58FB1779098877E /* Frameworks */ = {
+			isa = PBXGroup;
+			children = (
+				C1E6CD67FAE6488382D7C86E /* libPods-Runner.a */,
+			);
+			name = Frameworks;
+			sourceTree = "<group>";
+		};
 /* End PBXGroup section */
 
 /* Begin PBXNativeTarget section */
@@ -126,12 +154,14 @@
 			isa = PBXNativeTarget;
 			buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
 			buildPhases = (
+				8AABC13A4AE969899678E386 /* [CP] Check Pods Manifest.lock */,
 				9740EEB61CF901F6004384FC /* Run Script */,
 				97C146EA1CF9000F007C117D /* Sources */,
 				97C146EB1CF9000F007C117D /* Frameworks */,
 				97C146EC1CF9000F007C117D /* Resources */,
 				9705A1C41CF9048500538489 /* Embed Frameworks */,
 				3B06AD1E1E4923F5004D2608 /* Thin Binary */,
+				C9816D6ACD4932443EF75500 /* [CP] Embed Pods Frameworks */,
 			);
 			buildRules = (
 			);
@@ -148,12 +178,12 @@
 		97C146E61CF9000F007C117D /* Project object */ = {
 			isa = PBXProject;
 			attributes = {
-				LastUpgradeCheck = 1020;
+				LastUpgradeCheck = 1110;
 				ORGANIZATIONNAME = "The Chromium Authors";
 				TargetAttributes = {
 					97C146ED1CF9000F007C117D = {
 						CreatedOnToolsVersion = 7.3.1;
-						LastSwiftMigration = 0910;
+						DevelopmentTeam = LRXRX75D5X;
 					};
 				};
 			};
@@ -182,7 +212,6 @@
 			files = (
 				97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
 				3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
-				9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */,
 				97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
 				97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
 			);
@@ -205,6 +234,28 @@
 			shellPath = /bin/sh;
 			shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" thin";
 		};
+		8AABC13A4AE969899678E386 /* [CP] Check Pods Manifest.lock */ = {
+			isa = PBXShellScriptBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+			);
+			inputFileListPaths = (
+			);
+			inputPaths = (
+				"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
+				"${PODS_ROOT}/Manifest.lock",
+			);
+			name = "[CP] Check Pods Manifest.lock";
+			outputFileListPaths = (
+			);
+			outputPaths = (
+				"$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+			shellPath = /bin/sh;
+			shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n    # print error to STDERR\n    echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n    exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
+			showEnvVarsInLog = 0;
+		};
 		9740EEB61CF901F6004384FC /* Run Script */ = {
 			isa = PBXShellScriptBuildPhase;
 			buildActionMask = 2147483647;
@@ -219,6 +270,21 @@
 			shellPath = /bin/sh;
 			shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
 		};
+		C9816D6ACD4932443EF75500 /* [CP] Embed Pods Frameworks */ = {
+			isa = PBXShellScriptBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+			);
+			inputPaths = (
+			);
+			name = "[CP] Embed Pods Frameworks";
+			outputPaths = (
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+			shellPath = /bin/sh;
+			shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
+			showEnvVarsInLog = 0;
+		};
 /* End PBXShellScriptBuildPhase section */
 
 /* Begin PBXSourcesBuildPhase section */
@@ -226,7 +292,8 @@
 			isa = PBXSourcesBuildPhase;
 			buildActionMask = 2147483647;
 			files = (
-				74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */,
+				978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */,
+				97C146F31CF9000F007C117D /* main.m in Sources */,
 				1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
 			);
 			runOnlyForDeploymentPostprocessing = 0;
@@ -257,6 +324,7 @@
 			isa = XCBuildConfiguration;
 			baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
 			buildSettings = {
+				ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
 				ALWAYS_SEARCH_USER_PATHS = NO;
 				CLANG_ANALYZER_NONNULL = YES;
 				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
@@ -298,6 +366,7 @@
 				IPHONEOS_DEPLOYMENT_TARGET = 8.0;
 				MTL_ENABLE_DEBUG_INFO = NO;
 				SDKROOT = iphoneos;
+				SUPPORTED_PLATFORMS = iphoneos;
 				TARGETED_DEVICE_FAMILY = "1,2";
 				VALIDATE_PRODUCT = YES;
 			};
@@ -307,14 +376,16 @@
 			isa = XCBuildConfiguration;
 			baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
 			buildSettings = {
+				ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
 				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
-				CLANG_ENABLE_MODULES = YES;
 				CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
+				DEVELOPMENT_TEAM = LRXRX75D5X;
 				ENABLE_BITCODE = NO;
 				FRAMEWORK_SEARCH_PATHS = (
 					"$(inherited)",
 					"$(PROJECT_DIR)/Flutter",
 				);
+				GCC_NO_COMMON_BLOCKS = NO;
 				INFOPLIST_FILE = Runner/Info.plist;
 				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
 				LIBRARY_SEARCH_PATHS = (
@@ -323,8 +394,6 @@
 				);
 				PRODUCT_BUNDLE_IDENTIFIER = cn.i2edu.speechPluginExample;
 				PRODUCT_NAME = "$(TARGET_NAME)";
-				SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
-				SWIFT_VERSION = 4.0;
 				VERSIONING_SYSTEM = "apple-generic";
 			};
 			name = Profile;
@@ -333,6 +402,7 @@
 			isa = XCBuildConfiguration;
 			baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
 			buildSettings = {
+				ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
 				ALWAYS_SEARCH_USER_PATHS = NO;
 				CLANG_ANALYZER_NONNULL = YES;
 				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
@@ -389,6 +459,7 @@
 			isa = XCBuildConfiguration;
 			baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
 			buildSettings = {
+				ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
 				ALWAYS_SEARCH_USER_PATHS = NO;
 				CLANG_ANALYZER_NONNULL = YES;
 				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
@@ -430,7 +501,7 @@
 				IPHONEOS_DEPLOYMENT_TARGET = 8.0;
 				MTL_ENABLE_DEBUG_INFO = NO;
 				SDKROOT = iphoneos;
-				SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
+				SUPPORTED_PLATFORMS = iphoneos;
 				TARGETED_DEVICE_FAMILY = "1,2";
 				VALIDATE_PRODUCT = YES;
 			};
@@ -440,14 +511,16 @@
 			isa = XCBuildConfiguration;
 			baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
 			buildSettings = {
+				ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
 				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
-				CLANG_ENABLE_MODULES = YES;
 				CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
+				DEVELOPMENT_TEAM = LRXRX75D5X;
 				ENABLE_BITCODE = NO;
 				FRAMEWORK_SEARCH_PATHS = (
 					"$(inherited)",
 					"$(PROJECT_DIR)/Flutter",
 				);
+				GCC_NO_COMMON_BLOCKS = NO;
 				INFOPLIST_FILE = Runner/Info.plist;
 				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
 				LIBRARY_SEARCH_PATHS = (
@@ -456,9 +529,6 @@
 				);
 				PRODUCT_BUNDLE_IDENTIFIER = cn.i2edu.speechPluginExample;
 				PRODUCT_NAME = "$(TARGET_NAME)";
-				SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
-				SWIFT_OPTIMIZATION_LEVEL = "-Onone";
-				SWIFT_VERSION = 4.0;
 				VERSIONING_SYSTEM = "apple-generic";
 			};
 			name = Debug;
@@ -467,14 +537,16 @@
 			isa = XCBuildConfiguration;
 			baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
 			buildSettings = {
+				ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
 				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
-				CLANG_ENABLE_MODULES = YES;
 				CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
+				DEVELOPMENT_TEAM = LRXRX75D5X;
 				ENABLE_BITCODE = NO;
 				FRAMEWORK_SEARCH_PATHS = (
 					"$(inherited)",
 					"$(PROJECT_DIR)/Flutter",
 				);
+				GCC_NO_COMMON_BLOCKS = NO;
 				INFOPLIST_FILE = Runner/Info.plist;
 				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
 				LIBRARY_SEARCH_PATHS = (
@@ -483,8 +555,6 @@
 				);
 				PRODUCT_BUNDLE_IDENTIFIER = cn.i2edu.speechPluginExample;
 				PRODUCT_NAME = "$(TARGET_NAME)";
-				SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
-				SWIFT_VERSION = 4.0;
 				VERSIONING_SYSTEM = "apple-generic";
 			};
 			name = Release;
@@ -513,7 +583,6 @@
 			defaultConfigurationName = Release;
 		};
 /* End XCConfigurationList section */
-
 	};
 	rootObject = 97C146E61CF9000F007C117D /* Project object */;
 }

+ 7 - 7
example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata

@@ -1,7 +1,7 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Workspace
-   version = "1.0">
-   <FileRef
-      location = "group:Runner.xcodeproj">
-   </FileRef>
-</Workspace>
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+   version = "1.0">
+   <FileRef
+      location = "group:Runner.xcodeproj">
+   </FileRef>
+</Workspace>

+ 3 - 7
example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme

@@ -1,6 +1,6 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <Scheme
-   LastUpgradeVersion = "1020"
+   LastUpgradeVersion = "1110"
    version = "1.3">
    <BuildAction
       parallelizeBuildables = "YES"
@@ -27,8 +27,6 @@
       selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
       selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
       shouldUseLaunchSchemeArgsEnv = "YES">
-      <Testables>
-      </Testables>
       <MacroExpansion>
          <BuildableReference
             BuildableIdentifier = "primary"
@@ -38,8 +36,8 @@
             ReferencedContainer = "container:Runner.xcodeproj">
          </BuildableReference>
       </MacroExpansion>
-      <AdditionalOptions>
-      </AdditionalOptions>
+      <Testables>
+      </Testables>
    </TestAction>
    <LaunchAction
       buildConfiguration = "Debug"
@@ -61,8 +59,6 @@
             ReferencedContainer = "container:Runner.xcodeproj">
          </BuildableReference>
       </BuildableProductRunnable>
-      <AdditionalOptions>
-      </AdditionalOptions>
    </LaunchAction>
    <ProfileAction
       buildConfiguration = "Profile"

+ 3 - 0
example/ios/Runner.xcworkspace/contents.xcworkspacedata

@@ -4,4 +4,7 @@
    <FileRef
       location = "group:Runner.xcodeproj">
    </FileRef>
+   <FileRef
+      location = "group:Pods/Pods.xcodeproj">
+   </FileRef>
 </Workspace>

+ 6 - 0
example/ios/Runner/AppDelegate.h

@@ -0,0 +1,6 @@
+#import <Flutter/Flutter.h>
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : FlutterAppDelegate
+
+@end

+ 13 - 0
example/ios/Runner/AppDelegate.m

@@ -0,0 +1,13 @@
+#import "AppDelegate.h"
+#import "GeneratedPluginRegistrant.h"
+
+@implementation AppDelegate
+
+- (BOOL)application:(UIApplication *)application
+    didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+  [GeneratedPluginRegistrant registerWithRegistry:self];
+  // Override point for customization after application launch.
+  return [super application:application didFinishLaunchingWithOptions:launchOptions];
+}
+
+@end

+ 0 - 13
example/ios/Runner/AppDelegate.swift

@@ -1,13 +0,0 @@
-import UIKit
-import Flutter
-
-@UIApplicationMain
-@objc class AppDelegate: FlutterAppDelegate {
-  override func application(
-    _ application: UIApplication,
-    didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
-  ) -> Bool {
-    GeneratedPluginRegistrant.register(with: self)
-    return super.application(application, didFinishLaunchingWithOptions: launchOptions)
-  }
-}

+ 122 - 122
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json

@@ -1,122 +1,122 @@
-{
-  "images" : [
-    {
-      "size" : "20x20",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-20x20@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "20x20",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-20x20@3x.png",
-      "scale" : "3x"
-    },
-    {
-      "size" : "29x29",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-29x29@1x.png",
-      "scale" : "1x"
-    },
-    {
-      "size" : "29x29",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-29x29@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "29x29",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-29x29@3x.png",
-      "scale" : "3x"
-    },
-    {
-      "size" : "40x40",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-40x40@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "40x40",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-40x40@3x.png",
-      "scale" : "3x"
-    },
-    {
-      "size" : "60x60",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-60x60@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "60x60",
-      "idiom" : "iphone",
-      "filename" : "Icon-App-60x60@3x.png",
-      "scale" : "3x"
-    },
-    {
-      "size" : "20x20",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-20x20@1x.png",
-      "scale" : "1x"
-    },
-    {
-      "size" : "20x20",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-20x20@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "29x29",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-29x29@1x.png",
-      "scale" : "1x"
-    },
-    {
-      "size" : "29x29",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-29x29@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "40x40",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-40x40@1x.png",
-      "scale" : "1x"
-    },
-    {
-      "size" : "40x40",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-40x40@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "76x76",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-76x76@1x.png",
-      "scale" : "1x"
-    },
-    {
-      "size" : "76x76",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-76x76@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "83.5x83.5",
-      "idiom" : "ipad",
-      "filename" : "Icon-App-83.5x83.5@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "size" : "1024x1024",
-      "idiom" : "ios-marketing",
-      "filename" : "Icon-App-1024x1024@1x.png",
-      "scale" : "1x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
+{
+  "images" : [
+    {
+      "size" : "20x20",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-20x20@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "20x20",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-20x20@3x.png",
+      "scale" : "3x"
+    },
+    {
+      "size" : "29x29",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-29x29@1x.png",
+      "scale" : "1x"
+    },
+    {
+      "size" : "29x29",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-29x29@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "29x29",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-29x29@3x.png",
+      "scale" : "3x"
+    },
+    {
+      "size" : "40x40",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-40x40@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "40x40",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-40x40@3x.png",
+      "scale" : "3x"
+    },
+    {
+      "size" : "60x60",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-60x60@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "60x60",
+      "idiom" : "iphone",
+      "filename" : "Icon-App-60x60@3x.png",
+      "scale" : "3x"
+    },
+    {
+      "size" : "20x20",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-20x20@1x.png",
+      "scale" : "1x"
+    },
+    {
+      "size" : "20x20",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-20x20@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "29x29",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-29x29@1x.png",
+      "scale" : "1x"
+    },
+    {
+      "size" : "29x29",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-29x29@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "40x40",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-40x40@1x.png",
+      "scale" : "1x"
+    },
+    {
+      "size" : "40x40",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-40x40@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "76x76",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-76x76@1x.png",
+      "scale" : "1x"
+    },
+    {
+      "size" : "76x76",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-76x76@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "83.5x83.5",
+      "idiom" : "ipad",
+      "filename" : "Icon-App-83.5x83.5@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "size" : "1024x1024",
+      "idiom" : "ios-marketing",
+      "filename" : "Icon-App-1024x1024@1x.png",
+      "scale" : "1x"
+    }
+  ],
+  "info" : {
+    "version" : 1,
+    "author" : "xcode"
+  }
+}

+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png


+ 23 - 23
example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json

@@ -1,23 +1,23 @@
-{
-  "images" : [
-    {
-      "idiom" : "universal",
-      "filename" : "LaunchImage.png",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "universal",
-      "filename" : "LaunchImage@2x.png",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "universal",
-      "filename" : "LaunchImage@3x.png",
-      "scale" : "3x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
+{
+  "images" : [
+    {
+      "idiom" : "universal",
+      "filename" : "LaunchImage.png",
+      "scale" : "1x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "LaunchImage@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "LaunchImage@3x.png",
+      "scale" : "3x"
+    }
+  ],
+  "info" : {
+    "version" : 1,
+    "author" : "xcode"
+  }
+}

+ 0 - 0
example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png


+ 0 - 0
example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png


+ 4 - 4
example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md

@@ -1,5 +1,5 @@
-# Launch Screen Assets
-
-You can customize the launch screen with your own desired assets by replacing the image files in this directory.
-
+# Launch Screen Assets
+
+You can customize the launch screen with your own desired assets by replacing the image files in this directory.
+
 You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.

+ 37 - 37
example/ios/Runner/Base.lproj/LaunchScreen.storyboard

@@ -1,37 +1,37 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
-    <dependencies>
-        <deployment identifier="iOS"/>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
-    </dependencies>
-    <scenes>
-        <!--View Controller-->
-        <scene sceneID="EHf-IW-A2E">
-            <objects>
-                <viewController id="01J-lp-oVM" sceneMemberID="viewController">
-                    <layoutGuides>
-                        <viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
-                        <viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
-                    </layoutGuides>
-                    <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
-                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-                        <subviews>
-                            <imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
-                            </imageView>
-                        </subviews>
-                        <color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
-                        <constraints>
-                            <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
-                            <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
-                        </constraints>
-                    </view>
-                </viewController>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
-            </objects>
-            <point key="canvasLocation" x="53" y="375"/>
-        </scene>
-    </scenes>
-    <resources>
-        <image name="LaunchImage" width="168" height="185"/>
-    </resources>
-</document>
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
+    <dependencies>
+        <deployment identifier="iOS"/>
+        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
+    </dependencies>
+    <scenes>
+        <!--View Controller-->
+        <scene sceneID="EHf-IW-A2E">
+            <objects>
+                <viewController id="01J-lp-oVM" sceneMemberID="viewController">
+                    <layoutGuides>
+                        <viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
+                        <viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
+                    </layoutGuides>
+                    <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
+                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+                        <subviews>
+                            <imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
+                            </imageView>
+                        </subviews>
+                        <color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
+                        <constraints>
+                            <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
+                            <constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
+                        </constraints>
+                    </view>
+                </viewController>
+                <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="53" y="375"/>
+        </scene>
+    </scenes>
+    <resources>
+        <image name="LaunchImage" width="168" height="185"/>
+    </resources>
+</document>

+ 26 - 26
example/ios/Runner/Base.lproj/Main.storyboard

@@ -1,26 +1,26 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
-    <dependencies>
-        <deployment identifier="iOS"/>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
-    </dependencies>
-    <scenes>
-        <!--Flutter View Controller-->
-        <scene sceneID="tne-QT-ifu">
-            <objects>
-                <viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
-                    <layoutGuides>
-                        <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
-                        <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
-                    </layoutGuides>
-                    <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
-                        <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
-                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
-                    </view>
-                </viewController>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
-            </objects>
-        </scene>
-    </scenes>
-</document>
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
+    <dependencies>
+        <deployment identifier="iOS"/>
+        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
+    </dependencies>
+    <scenes>
+        <!--Flutter View Controller-->
+        <scene sceneID="tne-QT-ifu">
+            <objects>
+                <viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
+                    <layoutGuides>
+                        <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
+                        <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
+                    </layoutGuides>
+                    <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
+                        <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
+                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+                    </view>
+                </viewController>
+                <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
+            </objects>
+        </scene>
+    </scenes>
+</document>

+ 45 - 45
example/ios/Runner/Info.plist

@@ -1,45 +1,45 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>$(DEVELOPMENT_LANGUAGE)</string>
-	<key>CFBundleExecutable</key>
-	<string>$(EXECUTABLE_NAME)</string>
-	<key>CFBundleIdentifier</key>
-	<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>speech_plugin_example</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>$(FLUTTER_BUILD_NAME)</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>$(FLUTTER_BUILD_NUMBER)</string>
-	<key>LSRequiresIPhoneOS</key>
-	<true/>
-	<key>UILaunchStoryboardName</key>
-	<string>LaunchScreen</string>
-	<key>UIMainStoryboardFile</key>
-	<string>Main</string>
-	<key>UISupportedInterfaceOrientations</key>
-	<array>
-		<string>UIInterfaceOrientationPortrait</string>
-		<string>UIInterfaceOrientationLandscapeLeft</string>
-		<string>UIInterfaceOrientationLandscapeRight</string>
-	</array>
-	<key>UISupportedInterfaceOrientations~ipad</key>
-	<array>
-		<string>UIInterfaceOrientationPortrait</string>
-		<string>UIInterfaceOrientationPortraitUpsideDown</string>
-		<string>UIInterfaceOrientationLandscapeLeft</string>
-		<string>UIInterfaceOrientationLandscapeRight</string>
-	</array>
-	<key>UIViewControllerBasedStatusBarAppearance</key>
-	<false/>
-</dict>
-</plist>
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>$(DEVELOPMENT_LANGUAGE)</string>
+	<key>CFBundleExecutable</key>
+	<string>$(EXECUTABLE_NAME)</string>
+	<key>CFBundleIdentifier</key>
+	<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleName</key>
+	<string>speech_plugin_example</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleShortVersionString</key>
+	<string>$(FLUTTER_BUILD_NAME)</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>$(FLUTTER_BUILD_NUMBER)</string>
+	<key>LSRequiresIPhoneOS</key>
+	<true/>
+	<key>UILaunchStoryboardName</key>
+	<string>LaunchScreen</string>
+	<key>UIMainStoryboardFile</key>
+	<string>Main</string>
+	<key>UISupportedInterfaceOrientations</key>
+	<array>
+		<string>UIInterfaceOrientationPortrait</string>
+		<string>UIInterfaceOrientationLandscapeLeft</string>
+		<string>UIInterfaceOrientationLandscapeRight</string>
+	</array>
+	<key>UISupportedInterfaceOrientations~ipad</key>
+	<array>
+		<string>UIInterfaceOrientationPortrait</string>
+		<string>UIInterfaceOrientationPortraitUpsideDown</string>
+		<string>UIInterfaceOrientationLandscapeLeft</string>
+		<string>UIInterfaceOrientationLandscapeRight</string>
+	</array>
+	<key>UIViewControllerBasedStatusBarAppearance</key>
+	<false/>
+</dict>
+</plist>

+ 0 - 1
example/ios/Runner/Runner-Bridging-Header.h

@@ -1 +0,0 @@
-#import "GeneratedPluginRegistrant.h"

+ 9 - 0
example/ios/Runner/main.m

@@ -0,0 +1,9 @@
+#import <Flutter/Flutter.h>
+#import <UIKit/UIKit.h>
+#import "AppDelegate.h"
+
+int main(int argc, char* argv[]) {
+  @autoreleasepool {
+    return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
+  }
+}

+ 68 - 19
example/pubspec.lock

@@ -1,39 +1,67 @@
 # Generated by pub
 # See https://dart.dev/tools/pub/glossary#lockfile
 packages:
+  archive:
+    dependency: transitive
+    description:
+      name: archive
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.0.10"
+  args:
+    dependency: transitive
+    description:
+      name: args
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "1.5.2"
   async:
     dependency: transitive
     description:
       name: async
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.3.0"
   boolean_selector:
     dependency: transitive
     description:
       name: boolean_selector
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.0.5"
   charcode:
     dependency: transitive
     description:
       name: charcode
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.2"
   collection:
     dependency: transitive
     description:
       name: collection
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.14.11"
+  convert:
+    dependency: transitive
+    description:
+      name: convert
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.1.1"
+  crypto:
+    dependency: transitive
+    description:
+      name: crypto
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.1.3"
   cupertino_icons:
     dependency: "direct main"
     description:
       name: cupertino_icons
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "0.1.2"
   flutter:
@@ -46,39 +74,53 @@ packages:
     description: flutter
     source: sdk
     version: "0.0.0"
+  image:
+    dependency: transitive
+    description:
+      name: image
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.1.4"
   matcher:
     dependency: transitive
     description:
       name: matcher
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "0.12.5"
   meta:
     dependency: transitive
     description:
       name: meta
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.7"
   path:
     dependency: transitive
     description:
       name: path
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.6.4"
   pedantic:
     dependency: transitive
     description:
       name: pedantic
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.8.0+1"
+  petitparser:
+    dependency: transitive
+    description:
+      name: petitparser
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.4.0"
   quiver:
     dependency: transitive
     description:
       name: quiver
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.0.5"
   sky_engine:
@@ -90,7 +132,7 @@ packages:
     dependency: transitive
     description:
       name: source_span
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.5.5"
   speech_plugin:
@@ -104,50 +146,57 @@ packages:
     dependency: transitive
     description:
       name: stack_trace
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.9.3"
   stream_channel:
     dependency: transitive
     description:
       name: stream_channel
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.0.0"
   string_scanner:
     dependency: transitive
     description:
       name: string_scanner
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.0.5"
   term_glyph:
     dependency: transitive
     description:
       name: term_glyph
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.0"
   test_api:
     dependency: transitive
     description:
       name: test_api
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "0.2.5"
   typed_data:
     dependency: transitive
     description:
       name: typed_data
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.6"
   vector_math:
     dependency: transitive
     description:
       name: vector_math
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.0.8"
+  xml:
+    dependency: transitive
+    description:
+      name: xml
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "3.5.0"
 sdks:
-  dart: ">=2.2.2 <3.0.0"
+  dart: ">=2.4.0 <3.0.0"

+ 36 - 36
ios/.gitignore

@@ -1,37 +1,37 @@
-.idea/
-.vagrant/
-.sconsign.dblite
-.svn/
-
-.DS_Store
-*.swp
-profile
-
-DerivedData/
-build/
-GeneratedPluginRegistrant.h
-GeneratedPluginRegistrant.m
-
-.generated/
-
-*.pbxuser
-*.mode1v3
-*.mode2v3
-*.perspectivev3
-
-!default.pbxuser
-!default.mode1v3
-!default.mode2v3
-!default.perspectivev3
-
-xcuserdata
-
-*.moved-aside
-
-*.pyc
-*sync/
-Icon?
-.tags*
-
-/Flutter/Generated.xcconfig
+.idea/
+.vagrant/
+.sconsign.dblite
+.svn/
+
+.DS_Store
+*.swp
+profile
+
+DerivedData/
+build/
+GeneratedPluginRegistrant.h
+GeneratedPluginRegistrant.m
+
+.generated/
+
+*.pbxuser
+*.mode1v3
+*.mode2v3
+*.perspectivev3
+
+!default.pbxuser
+!default.mode1v3
+!default.mode2v3
+!default.perspectivev3
+
+xcuserdata
+
+*.moved-aside
+
+*.pyc
+*sync/
+Icon?
+.tags*
+
+/Flutter/Generated.xcconfig
 /Flutter/flutter_export_environment.sh

+ 0 - 0
ios/Assets/.gitkeep


+ 20 - 0
ios/Classes/M4aToPcmHelper.h

@@ -0,0 +1,20 @@
+//
+//  M4aToPcmHelper.h
+//  TianyiProSwift
+//
+//  Created by i2国际私塾 on 2017/3/21.
+//  Copyright © 2017年 Chengdu Aitu Education Technology Ltd. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+@interface M4aToPcmHelper : NSObject
+
+/**
+ *  根据m4a音频转化成pcm格式的音频
+ *
+ *  @param url 音频路径
+ */
++ (NSData *) M4aToPcmWithUrl:(NSURL *)url;
+
+@end

+ 108 - 0
ios/Classes/M4aToPcmHelper.m

@@ -0,0 +1,108 @@
+//
+//  M4aToPcmHelper.m
+//  TianyiProSwift
+//
+//  Created by i2国际私塾 on 2017/3/21.
+//  Copyright © 2017年 Chengdu Aitu Education Technology Ltd. All rights reserved.
+//
+
+#import "M4aToPcmHelper.h"
+#import <AudioToolbox/AudioToolbox.h>
+#import <AVFoundation/AVFoundation.h>
+#import <MediaPlayer/MediaPlayer.h>
+
+@implementation M4aToPcmHelper
+
+/**
+ *  根据m4a音频转化成pcm格式的音频
+ *
+ *  @param urlStr 音频路径
+ *
+ *  @return pcm格式的[Int16]的Data
+ */
+
++ (NSData *) M4aToPcmWithUrl:(NSURL *)url {
+    NSArray *dirs = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+    NSString *documentsDirectoryPath = [dirs objectAtIndex:0];
+    NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:@"pcmData.pcm"];
+    if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
+        [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
+    }
+    NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
+    
+    AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:url options:nil];
+    NSError *assetError = nil;
+    AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
+                                                                error:&assetError];
+    if (assetError) {
+        NSLog (@"error: %@", assetError);
+        return nil;
+    }
+    
+    AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
+                                                           fileType:AVFileTypeCoreAudioFormat
+                                                              error:&assetError];
+    
+    AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
+                                               assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
+                                               audioSettings: nil];
+    
+    [assetReader addOutput: assetReaderOutput];
+    
+    if (assetError) {
+        NSLog (@"error: %@", assetError);
+        return nil;
+    }
+    AudioChannelLayout channelLayout;
+    memset(&channelLayout, 0, sizeof(AudioChannelLayout));
+    channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
+    NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
+                                    [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
+                                    [NSNumber numberWithFloat:8000.0], AVSampleRateKey,
+                                    [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
+                                    [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
+                                    [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
+                                    [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
+                                    [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
+                                    [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
+                                    nil];
+    AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
+                                                                               outputSettings:outputSettings];
+    if ([assetWriter canAddInput:assetWriterInput]) {
+        [assetWriter addInput:assetWriterInput];
+    } else {
+        NSLog (@"can't add asset writer input... die!");
+        return nil;
+    }
+    
+    assetWriterInput.expectsMediaDataInRealTime = NO;
+    
+    [assetWriter startWriting];
+    [assetReader startReading];
+    
+    AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
+    CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
+    [assetWriter startSessionAtSourceTime: startTime];
+    
+    __block UInt64 convertedByteCount = 0;
+    
+    while (assetWriterInput.readyForMoreMediaData) {
+        CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
+        if (nextBuffer) {
+            // append buffer
+            [assetWriterInput appendSampleBuffer: nextBuffer];
+            convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
+        } else {
+            // done!
+            [assetWriterInput markAsFinished];
+            [assetWriter finishWritingWithCompletionHandler:^{
+            }];
+            [assetReader cancelReading];
+            return [NSData dataWithContentsOfURL:exportURL];
+            break;
+        }
+    }
+    return nil;
+}
+
+@end

+ 20 - 0
ios/Classes/Mp4ToPcmHelper.h

@@ -0,0 +1,20 @@
+//
+//  M4aToPcmHelper.h
+//  TianyiProSwift
+//
+//  Created by i2国际私塾 on 2017/3/21.
+//  Copyright © 2017年 Chengdu Aitu Education Technology Ltd. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+@interface Mp4ToPcmHelper : NSObject
+
+/**
+ *  根据mp4视频转化成pcm格式的音频
+ *
+ *  @param videoUrl 视频路径
+ */
++ (void) Mp4ToPcmWithUrl:(NSURL*)videoUrl completion:(void(^)(NSData*data))completionHandle;
+
+@end

+ 121 - 0
ios/Classes/Mp4ToPcmHelper.m

@@ -0,0 +1,121 @@
+//
+//  M4aToPcmHelper.m
+//  TianyiProSwift
+//
+//  Created by i2国际私塾 on 2017/3/21.
+//  Copyright © 2017年 Chengdu Aitu Education Technology Ltd. All rights reserved.
+//
+
+#import "Mp4ToPcmHelper.h"
+#import "M4aToPcmHelper.h"
+#import <AudioToolbox/AudioToolbox.h>
+#import <AVFoundation/AVFoundation.h>
+#import <MediaPlayer/MediaPlayer.h>
+
+@implementation Mp4ToPcmHelper
+
++ (void)Mp4ToPcmWithUrl:(NSURL*)videoUrl completion:(void(^)(NSData*data))completionHandle
+{
+    NSArray *dirs = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+    NSString *documentsDirectoryPath = [dirs objectAtIndex:0];
+    NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:@"pcmData.pcm"];
+    if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
+        [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
+    }
+    NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
+        
+    AVURLAsset* videoAsset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
+    [videoAsset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:^{
+        NSError *error = nil;
+        AVKeyValueStatus status = [videoAsset statusOfValueForKey:@"tracks"error:&error];
+        if(status == AVKeyValueStatusLoaded) { //数据加载完成
+            NSError *assetError = nil;
+            AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:videoAsset error:&assetError];
+            if (assetError) {
+                completionHandle(nil);
+                return;
+            }
+    
+            NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
+            [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
+            
+            NSArray *audioTracks = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
+            AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
+            if ([audioTracks count] > 0)
+            {
+                AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
+                NSDictionary *outputSettings = @{AVFormatIDKey:@(kAudioFormatLinearPCM),
+                                                 AVLinearPCMIsBigEndianKey:@NO,
+                                                 AVLinearPCMIsFloatKey:@NO,
+                                                 AVLinearPCMBitDepthKey :@(16)
+                };
+                readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings: outputSettings];
+                readerAudioTrackOutput.alwaysCopiesSampleData = NO;
+                [assetReader addOutput:readerAudioTrackOutput];
+            }
+            
+            AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL fileType:AVFileTypeCoreAudioFormat   error:&assetError];
+            if (assetError) {
+                completionHandle(nil);
+                return;
+            }
+            
+            AudioChannelLayout channelLayout;
+            memset(&channelLayout, 0, sizeof(AudioChannelLayout));
+            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
+            NSDictionary *outputSettings2 = [NSDictionary dictionaryWithObjectsAndKeys:
+                                            [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
+                                            [NSNumber numberWithFloat:8000.0], AVSampleRateKey,
+                                            [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
+                                            [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
+                                            [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
+                                            [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
+                                            [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
+                                            [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
+                                            nil];
+            AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio    outputSettings:outputSettings2];
+            if ([assetWriter canAddInput:assetWriterInput]) {
+                [assetWriter addInput:assetWriterInput];
+            } else {
+                completionHandle(nil);
+                return;
+            }
+            assetWriterInput.expectsMediaDataInRealTime = NO;
+
+            if ([assetReader startReading] == NO){
+                completionHandle(nil);
+                return;
+            }
+            
+            [assetWriter startWriting];
+            
+            AVAssetTrack *soundTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
+            CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
+            [assetWriter startSessionAtSourceTime: startTime];
+            
+            __block UInt64 convertedByteCount = 0;
+            
+            while (assetWriterInput.readyForMoreMediaData) {
+                CMSampleBufferRef nextBuffer = [readerAudioTrackOutput copyNextSampleBuffer];
+                if (nextBuffer) {
+                    // append buffer
+                    [assetWriterInput appendSampleBuffer: nextBuffer];
+                    convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
+                } else {
+                    // done!
+                    [assetWriterInput markAsFinished];
+                    [assetWriter finishWritingWithCompletionHandler:^{
+                    }];
+                    [assetReader cancelReading];
+                    completionHandle([NSData dataWithContentsOfURL:exportURL]);
+                    return;
+                    break;
+                }
+            }
+            completionHandle(nil);
+            return;
+        }}
+     ];
+}
+
+@end

+ 68 - 0
ios/Classes/Results/ISEResult.h

@@ -0,0 +1,68 @@
+//
+//  ISEResult.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+/**
+ *  ISE Result
+ */
+@interface ISEResult : NSObject
+
+/**
+ * Language:English(en)、Chinese(cn)
+ */
+@property(nonatomic,strong)NSString* language;
+
+/**
+ * Category:read_syllable(cn)、read_word、read_sentence
+ */
+@property(nonatomic,strong)NSString* category;
+
+/**
+ * Beginning of frame,10ms per frame
+ */
+@property(nonatomic,assign)int beg_pos;
+
+/**
+ * End of frame
+ */
+@property(nonatomic,assign)int end_pos;
+
+/**
+ * Content of ISE
+ */
+@property(nonatomic,strong)NSString* content;
+
+/**
+ * Total score
+ */
+@property(nonatomic,assign)float total_score;
+
+/**
+ * Duration(cn)
+ */
+@property(nonatomic,assign)int time_len;
+
+/**
+ * Exception info(en)
+ */
+@property(nonatomic,strong)NSString* except_info;
+
+/**
+ * Whether or not dirty read(cn)
+ */
+@property(nonatomic,assign)BOOL is_rejected;
+
+/**
+ * The lable of sentence in xml results
+ */
+@property(nonatomic,strong)NSMutableArray* sentences;
+
+-(NSString*) toString;
+
+@end

+ 17 - 0
ios/Classes/Results/ISEResult.m

@@ -0,0 +1,17 @@
+//
+//  ISEResult.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import "ISEResult.h"
+
+@implementation ISEResult
+
+-(NSString*) toString{
+    return @"";
+}
+
+@end

+ 17 - 0
ios/Classes/Results/ISEResultFinal.h

@@ -0,0 +1,17 @@
+//
+//  ISEResultFinal.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResult.h"
+
+@interface ISEResultFinal : ISEResult
+
+@property(nonatomic,assign) int ret;
+
+-(NSString*) toString;
+
+@end

+ 18 - 0
ios/Classes/Results/ISEResultFinal.m

@@ -0,0 +1,18 @@
+//
+//  ISEResultFinal.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResultFinal.h"
+
+@implementation ISEResultFinal
+
+-(NSString*) toString{
+    NSString* resultString=[NSString stringWithFormat:@"Returned Value:%d,Total Score:%f",self.ret,self.total_score];
+    return resultString;
+}
+
+@end

+ 47 - 0
ios/Classes/Results/ISEResultPhone.h

@@ -0,0 +1,47 @@
+//
+//  ISEResultPhone.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+/**
+ *  The lable of Phone in xml results
+ */
+@interface ISEResultPhone : NSObject
+
+/**
+ * Beginning of frame,10ms per frame
+ */
+@property(nonatomic, assign)int beg_pos;
+
+/**
+ * End of frame
+ */
+@property(nonatomic, assign)int end_pos;
+
+/**
+ * Content of Phone
+ */
+@property(nonatomic, strong)NSString* content;
+
+/**
+ * Read message:0(Right),16(Skip),32(Duplicate),64(Readback),128(Replace)
+ */
+@property(nonatomic, assign)int dp_message;
+
+/**
+ * Duration(cn)
+ */
+@property(nonatomic, assign)int time_len;
+
+/**
+ * Get the standard phonetic symbol of content(en)
+ */
+- (NSString*) getStdSymbol;
+
+
+@end

+ 27 - 0
ios/Classes/Results/ISEResultPhone.m

@@ -0,0 +1,27 @@
+//
+//  ISEResultPhone.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import "ISEResultPhone.h"
+#import "ISEResultTools.h"
+
+@implementation ISEResultPhone
+
+/**
+ * Get the standard phonetic symbol of content(en)
+ */
+- (NSString*) getStdSymbol{
+    
+    if(self.content){
+        NSString* stdSymbol=[ISEResultTools toStdSymbol:self.content];
+        return stdSymbol?stdSymbol:self.content;
+    }
+    
+    return self.content;
+}
+
+@end

+ 13 - 0
ios/Classes/Results/ISEResultReadSentence.h

@@ -0,0 +1,13 @@
+//
+//  ISEResultReadSentence.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResult.h"
+
+@interface ISEResultReadSentence : ISEResult
+
+@end

+ 48 - 0
ios/Classes/Results/ISEResultReadSentence.m

@@ -0,0 +1,48 @@
+//
+//  ISEResultReadSentence.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResultReadSentence.h"
+#import "ISEResultTools.h"
+
+@implementation ISEResultReadSentence
+
+-(instancetype)init{
+    if(self=[super init]){
+        self.category=@"read_sentence";
+    }
+    return self;
+}
+
+-(NSString*) toString{
+    NSString* buffer = [[NSString alloc] init];
+    
+    if ([@"cn" isEqualToString:self.language]) {
+        buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"];
+        buffer=[buffer stringByAppendingFormat:@"Content:%@\n" ,self.content];
+        buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len];
+        buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score];
+        buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageCN:self.sentences]];
+        
+    } else {
+        if (self.is_rejected) {
+             buffer=[buffer stringByAppendingFormat:@"Dirty Read,"];
+            
+             buffer=[buffer stringByAppendingFormat:@"except_info:%@\n\n",self.except_info];
+        }
+        
+        buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"];
+        buffer=[buffer stringByAppendingFormat:@"Content:%@\n",self.content];
+//        buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len];
+        buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score];
+        buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageEN:self.sentences]];
+    }
+    
+    return buffer;
+}
+
+@end

+ 13 - 0
ios/Classes/Results/ISEResultReadSyllable.h

@@ -0,0 +1,13 @@
+//
+//  ISEResultReadSyllable.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResult.h"
+
+@interface ISEResultReadSyllable : ISEResult
+
+@end

+ 35 - 0
ios/Classes/Results/ISEResultReadSyllable.m

@@ -0,0 +1,35 @@
+//
+//  ISEResultReadSyllable.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResultReadSyllable.h"
+#import "ISEResultTools.h"
+
+@implementation ISEResultReadSyllable
+
+
+-(instancetype)init{
+    if(self=[super init]){
+        self.category = @"read_syllable";
+        self.language = @"cn";
+    }
+    return self;
+}
+
+-(NSString*) toString{
+    NSString* buffer = [[NSString alloc] init];
+
+    buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"];
+    buffer=[buffer stringByAppendingFormat:@"Content:%@\n" ,self.content];
+    buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len];
+    buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score];
+    buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageCN:self.sentences]];
+
+    return buffer;
+}
+
+@end

+ 13 - 0
ios/Classes/Results/ISEResultReadWord.h

@@ -0,0 +1,13 @@
+//
+//  ISEResultReadWord.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResult.h"
+
+@interface ISEResultReadWord : ISEResult
+
+@end

+ 48 - 0
ios/Classes/Results/ISEResultReadWord.m

@@ -0,0 +1,48 @@
+//
+//  ISEResultReadWord.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/7.
+//
+//
+
+#import "ISEResultReadWord.h"
+#import "ISEResultTools.h"
+
+@implementation ISEResultReadWord
+
+-(instancetype)init{
+    if(self=[super init]){
+        self.category=@"read_word";
+    }
+    return self;
+}
+
+-(NSString*) toString{
+    NSString* buffer = [[NSString alloc] init];
+    
+    if ([@"cn" isEqualToString:self.language]) {
+        buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"];
+        buffer=[buffer stringByAppendingFormat:@"Content:%@\n" ,self.content];
+        buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len];
+        buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score];
+        buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageCN:self.sentences]];
+        
+    } else {
+        if (self.is_rejected) {
+            buffer=[buffer stringByAppendingFormat:@"Dirty Read,"];
+            
+            buffer=[buffer stringByAppendingFormat:@"except_info:%@\n\n",self.except_info];
+        }
+        
+        buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"];
+        buffer=[buffer stringByAppendingFormat:@"Content:%@\n",self.content];
+//        buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len];
+        buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score];
+        buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageEN:self.sentences]];
+    }
+    
+    return buffer;
+}
+
+@end

+ 56 - 0
ios/Classes/Results/ISEResultSentence.h

@@ -0,0 +1,56 @@
+//
+//  ISEResultSentence.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+/**
+ *  The lable of sentence in xml results
+ */
+@interface ISEResultSentence : NSObject
+
+/**
+ * Beginning of frame,10ms per frame
+ */
+@property(nonatomic, assign)int beg_pos;
+
+/**
+ * End of frame
+ */
+@property(nonatomic, assign)int end_pos;
+
+/**
+ * Content of Sentence
+ */
+@property(nonatomic, strong)NSString* content;
+
+/**
+ * Total score
+ */
+@property(nonatomic, assign)float total_score;
+
+/**
+ * Duration(cn)
+ */
+@property(nonatomic, assign)int time_len;
+
+/**
+ * The index of Sentence(en)
+ */
+@property(nonatomic, assign)int index;
+
+/**
+ * Count of words in Sentence(en)
+ */
+@property(nonatomic, assign)int word_count;
+
+/**
+ * Word array in Sentence
+ */
+@property(nonatomic, strong)NSMutableArray* words;
+
+@end

+ 13 - 0
ios/Classes/Results/ISEResultSentence.m

@@ -0,0 +1,13 @@
+//
+//  ISEResultSentence.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import "ISEResultSentence.h"
+
+@implementation ISEResultSentence
+
+@end

+ 56 - 0
ios/Classes/Results/ISEResultSyll.h

@@ -0,0 +1,56 @@
+//
+//  ISEResultSyll.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+/**
+ *  The lable of Syll in xml results
+ */
+@interface ISEResultSyll : NSObject
+
+/**
+ * Beginning of frame,10ms per frame
+ */
+@property(nonatomic, assign)int beg_pos;
+
+/**
+ * End of frame
+ */
+@property(nonatomic, assign)int end_pos;
+
+/**
+ * Content of Syll
+ */
+@property(nonatomic, strong)NSString* content;
+
+/**
+ * Pin Yin(cn),number represents tone,5 represents light tone,for example, fen1
+ */
+@property(nonatomic, strong)NSString* symbol;
+
+/**
+ * Read message:0(Right),16(Skip),32(Duplicate),64(Readback),128(Replace)
+ */
+@property(nonatomic, assign)int dp_message;
+
+/**
+ * Duration(cn)
+ */
+@property(nonatomic, assign)int time_len;
+
+/**
+ * Phonetic array in Syll
+ */
+@property(nonatomic, strong)NSMutableArray* phones;
+
+/**
+ * Get the standard phonetic symbol of content(en)
+ */
+- (NSString*) getStdSymbol;
+
+@end

+ 29 - 0
ios/Classes/Results/ISEResultSyll.m

@@ -0,0 +1,29 @@
+//
+//  ISEResultSyll.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import "ISEResultSyll.h"
+#import "ISEResultTools.h"
+
+@implementation ISEResultSyll
+
+/**
+ * Get the standard phonetic symbol of content(en)
+ */
+- (NSString*) getStdSymbol{
+
+    NSArray* symbols=[self.content componentsSeparatedByString:@" "];
+    NSString* stdSymbol=[[NSString alloc] init];
+        
+    for (int i = 0; i < [symbols count]; ++i) {
+        stdSymbol = [stdSymbol stringByAppendingString:[ISEResultTools toStdSymbol:symbols[i]]];
+    }
+    
+    return stdSymbol;
+}
+
+@end

+ 61 - 0
ios/Classes/Results/ISEResultTools.h

@@ -0,0 +1,61 @@
+//
+//  ISEResultTools.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+
+FOUNDATION_EXPORT NSString* const KCIFlyResultNormal;
+FOUNDATION_EXPORT NSString* const KCIFlyResultMiss;
+FOUNDATION_EXPORT NSString* const KCIFlyResultAdd;
+FOUNDATION_EXPORT NSString* const KCIFlyResultRepeat;
+FOUNDATION_EXPORT NSString* const KCIFlyResultReplace;
+
+FOUNDATION_EXPORT NSString* const KCIFlyResultNoise;
+FOUNDATION_EXPORT NSString* const KCIFlyResultMute;
+
+
+@interface ISEResultTools : NSObject
+
+/*!
+ *  Get the standard phonetic symbol of symbol
+ *
+ *  @param symbol iFlytek phonetic symbol
+ *
+ *  @return if not exit,return symbol itself
+ */
++(NSString*) toStdSymbol:(NSString*) symbol;
+
+
+/*!
+ *  Get the message of dpMessage
+ */
++ (NSString*)translateDpMessageInfo:(int)dpMessage;
+
+/*!
+ *  Get the message of content
+ */
++ (NSString*)translateContentInfo:(NSString*) content;
+
+
+/**
+ * Get the format details from sentences in chinese
+ *
+ * @param sentences sentences in chinese
+ * @return the format details
+ */
++ (NSString*)formatDetailsForLanguageCN:(NSArray*) sentences ;
+
+/**
+ * Get the format details from sentences in english
+ *
+ * @param sentences sentences in english
+ * @return the format details
+ */
++ (NSString*)formatDetailsForLanguageEN:(NSArray*) sentences ;
+
+@end

+ 254 - 0
ios/Classes/Results/ISEResultTools.m

@@ -0,0 +1,254 @@
+//
+//  ISEResultTools.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import "ISEResultTools.h"
+#import "ISEResultPhone.h"
+#import "ISEResultSyll.h"
+#import "ISEResultWord.h"
+#import "ISEResultSentence.h"
+
+@implementation ISEResultTools
+
+
++(NSString*) toStdSymbol:(NSString*) symbol{
+    
+    if(!symbol){
+        return symbol;
+    }
+    
+    /**
+     * The mapping table between iFlytek phonetic symbol and standard phonetic symbol(en)
+     */
+    static NSDictionary* _gISEResultPhoneHashDic;
+    
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        _gISEResultPhoneHashDic=@{
+            @"aa" : @"ɑ:",
+            @"oo" : @"ɔ",
+            @"ae" : @"æ",
+            @"ah" : @"ʌ",
+            @"ao" : @"ɔ:",
+            @"aw" : @"aʊ",
+            @"ax" : @"ə",
+            @"ay" : @"aɪ",
+            @"eh" : @"e",
+            @"er" : @"ə:",
+            @"ey" : @"eɪ",
+            @"ih" : @"ɪ",
+            @"iy" : @"i:",
+            @"ow" : @"əʊ",
+            @"oy" : @"ɔɪ",
+            @"uh" : @"ʊ",
+            @"uw" : @"ʊ:",
+            @"ch" : @"tʃ",
+            @"dh" : @"ð",
+            @"hh" : @"h",
+            @"jh" : @"dʒ",
+            @"ng" : @"ŋ",
+            @"sh" : @"ʃ",
+            @"th" : @"θ",
+            @"zh" : @"ʒ",
+            @"y" : @"j",
+            @"d" : @"d",
+            @"k" : @"k",
+            @"l" : @"l",
+            @"m" : @"m",
+            @"n" : @"n",
+            @"b" : @"b",
+            @"f" : @"f",
+            @"g" : @"g",
+            @"p" : @"p",
+            @"r" : @"r",
+            @"s" : @"s",
+            @"t" : @"t",
+            @"v" : @"v",
+            @"w" : @"w",
+            @"z" : @"z",
+            @"ar" : @"eə",
+            @"ir" : @"iə",
+            @"ur" : @"ʊə",
+            @"tr" : @"tr",
+            @"dr" : @"dr",
+            @"ts" : @"ts",
+            @"dz" : @"dz"
+        };
+        
+    });
+    
+    NSString* stdsymbol=[_gISEResultPhoneHashDic objectForKey:symbol];
+    return stdsymbol?stdsymbol:symbol;
+    
+}
+
+
+NSString* const KCIFlyResultNormal=@"Right";
+NSString* const KCIFlyResultMiss=@"Skip";
+NSString* const KCIFlyResultAdd=@"Duplicate";
+NSString* const KCIFlyResultRepeat=@"Readback";
+NSString* const KCIFlyResultReplace=@"Replace";
+
+NSString* const KCIFlyResultNoise=@"Noise";
+NSString* const KCIFlyResultMute=@"Mute";
+
++ (NSString*)translateDpMessageInfo:(int)dpMessage {
+    
+    static NSDictionary* _gISEResultDpMessageHashDic;
+    
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        _gISEResultDpMessageHashDic=@{
+            @0 : KCIFlyResultNormal,
+            @16 : KCIFlyResultMiss,
+            @32 : KCIFlyResultAdd,
+            @64 : KCIFlyResultRepeat,
+            @128 : KCIFlyResultReplace
+        };
+    });
+    
+    NSString* transDpMessage=[_gISEResultDpMessageHashDic objectForKey:[NSNumber numberWithInt:dpMessage]];
+    return transDpMessage;
+}
+
++ (NSString*)translateContentInfo:(NSString*) content {
+    
+    if(!content){
+        return nil;
+    }
+    
+    static NSDictionary* _gISEResultContentHashDic;
+    
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        _gISEResultContentHashDic=@{
+            @"sil"  : KCIFlyResultMute,
+            @"silv" : KCIFlyResultMute,
+            @"fil"  : KCIFlyResultNoise
+        };
+    });
+    
+    NSString* transContent=[_gISEResultContentHashDic objectForKey:content];
+    return transContent?transContent:content;
+}
+
+
+/**
+ * Get the format details from sentences in chinese
+ *
+ * @param sentences sentences in chinese
+ * @return the format details
+ */
++ (NSString*)formatDetailsForLanguageCN:(NSArray*) sentences {
+    NSString* buffer =[[NSString alloc] init];
+    if (!sentences) {
+        return nil;
+    }
+    
+    for (ISEResultSentence* sentence in sentences ) {
+        
+        if (nil == sentence.words) {
+            continue;
+        }
+        
+        for (ISEResultWord* word in sentence.words) {
+            NSString* wContent=[ISEResultTools translateContentInfo:word.content];
+            if ([KCIFlyResultNoise isEqualToString:wContent] || [KCIFlyResultMute isEqualToString:wContent]){
+                continue;
+            }
+            buffer=[buffer stringByAppendingFormat:@"\nWord[%@] %@  Dur:%d",wContent,word.symbol,word.time_len];
+            
+            if (!word.sylls) {
+                continue;
+            }
+            
+            for (ISEResultSyll* syll in word.sylls) {
+                NSString* syContent=[ISEResultTools translateContentInfo:[syll content]];
+                if ([KCIFlyResultNoise isEqualToString:syContent] || [KCIFlyResultMute isEqualToString:syContent]){
+                    continue;
+                }
+                
+                buffer=[buffer stringByAppendingFormat:@"\n└Syllable[%@] %@  Dur:%d",syContent,syll.symbol,syll.time_len];
+                if (!syll.phones) {
+                    continue;
+                }
+                
+                for (ISEResultPhone* phone in syll.phones) {
+                    NSString* pContent=[ISEResultTools translateContentInfo:[phone content]];
+                    NSString* pDpMessage=[ISEResultTools translateDpMessageInfo:phone.dp_message];
+                    buffer=[buffer stringByAppendingFormat:@"\n\t└Phoneme[%@] Dur:%d Msg:%@",pContent,phone.time_len,pDpMessage];
+                }
+                
+            }
+            buffer=[buffer stringByAppendingString:@"\n"];
+        }
+    }
+    return buffer;
+    
+}
+
+/**
+ * Get the format details from sentences in english
+ *
+ * @param sentences sentences in english
+ * @return the format details
+ */
++ (NSString*)formatDetailsForLanguageEN:(NSArray*) sentences {
+    //    NSString* buffer =[[NSString alloc] init];
+    if (!sentences) {
+        return nil;
+    }
+    NSMutableArray * result = [[NSMutableArray alloc] init];
+    for (ISEResultSentence* sentence in sentences ) {
+        NSString* sContent=[ISEResultTools translateContentInfo:sentence.content];
+        if ([KCIFlyResultNoise isEqualToString:sContent] || [KCIFlyResultMute isEqualToString:sContent]){
+            continue;
+        }
+        
+        if (nil == sentence.words) {
+            continue;
+        }
+        for (ISEResultWord* word in sentence.words) {
+            NSString* wContent=[ISEResultTools translateContentInfo:word.content];
+            //            NSString* wDpMessage=[ISEResultTools translateDpMessageInfo:word.dp_message];
+            if ([KCIFlyResultNoise isEqualToString:wContent] || [KCIFlyResultMute isEqualToString:wContent]){
+                continue;
+            }
+            NSMutableDictionary *dictionary = [[NSMutableDictionary alloc] init];
+            [dictionary setValue:wContent forKey:@"content"];
+            [dictionary setValue:@(word.total_score) forKey:@"score"];
+            [result addObject: dictionary];
+            //            buffer=[buffer stringByAppendingFormat:@"\nWord[%@] Msg:%@  Score:%f",wContent,wDpMessage,word.total_score];
+            
+            //            if (!word.sylls) {
+            //                buffer=[buffer stringByAppendingString:@"\n"];
+            //                continue;
+            //            }
+            //
+            //            for (ISEResultSyll* syll in word.sylls) {
+            //                NSString* syContent=[ISEResultTools translateContentInfo:[syll getStdSymbol]];
+            //                buffer=[buffer stringByAppendingFormat:@"\n└Syllable[%@] ",syContent];
+            //                if (!syll.phones) {
+            //                    continue;
+            //                }
+            //
+            //                for (ISEResultPhone* phone in syll.phones) {
+            //                    NSString* pContent=[ISEResultTools translateContentInfo:[phone getStdSymbol]];
+            //                    NSString* pDpMessage=[ISEResultTools translateDpMessageInfo:phone.dp_message];
+            //                    buffer=[buffer stringByAppendingFormat:@"\n\t└Phoneme[%@] Msg:%@",pContent,pDpMessage];
+            //                }
+            //
+            //            }
+            //            buffer=[buffer stringByAppendingString:@"\n"];
+        }
+    }
+    NSData *data = [NSJSONSerialization dataWithJSONObject:result options:NSJSONWritingPrettyPrinted error:nil];
+    NSString *jsonStr = [[NSString alloc]initWithData:data encoding:NSUTF8StringEncoding];
+    return jsonStr;
+}
+
+@end

+ 66 - 0
ios/Classes/Results/ISEResultWord.h

@@ -0,0 +1,66 @@
+//
+//  ISEResultWord.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+/**
+ *  The lable of Word in xml results
+ */
+@interface ISEResultWord : NSObject
+
+/**
+ * Beginning of frame,10ms per frame
+ */
+@property(nonatomic, assign)int beg_pos;
+
+/**
+ * End of frame
+ */
+@property(nonatomic, assign)int end_pos;
+
+/**
+ * Content of Word
+ */
+@property(nonatomic, strong)NSString* content;
+
+/**
+ * Read message:0(Right),16(Skip),32(Duplicate),64(Readback),128(Replace)
+ */
+@property(nonatomic, assign)int dp_message;
+
+/**
+ * The index of Word in chapter(en)
+ */
+@property(nonatomic, assign)int global_index;
+
+/**
+ * The index of Word in sentense(en)
+ */
+@property(nonatomic, assign)int index;
+
+/**
+ * Pin Yin(cn),number represents tone,5 represents light tone,for example, fen1
+ */
+@property(nonatomic, strong)NSString* symbol;
+
+/**
+ * Duration(cn)
+ */
+@property(nonatomic, assign)int time_len;
+
+/**
+ * Total score(en)
+ */
+@property(nonatomic, assign)float total_score;
+
+/**
+ * Syll array in Word
+ */
+@property(nonatomic, strong)NSMutableArray* sylls;
+
+@end

+ 13 - 0
ios/Classes/Results/ISEResultWord.m

@@ -0,0 +1,13 @@
+//
+//  ISEResultWord.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import "ISEResultWord.h"
+
+@implementation ISEResultWord
+
+@end

+ 29 - 0
ios/Classes/Results/ISEResultXmlParser.h

@@ -0,0 +1,29 @@
+//
+//  ISEResultXmlParser.h
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+@class ISEResult;
+
+@protocol ISEResultXmlParserDelegate <NSObject>
+
+-(void)onISEResultXmlParser:(NSXMLParser *)parser Error:(NSError*)error;
+-(void)onISEResultXmlParserResult:(ISEResult*)result;
+
+@end
+
+@interface ISEResultXmlParser : NSObject <NSXMLParserDelegate>
+
+@property (nonatomic, weak) id <ISEResultXmlParserDelegate> delegate;
+
+/*!
+ *  parse xml results for ISE
+ */
+- (void)parserXml:(NSString*) xml;
+
+@end

+ 278 - 0
ios/Classes/Results/ISEResultXmlParser.m

@@ -0,0 +1,278 @@
+//
+//  ISEResultXmlParser.m
+//  IFlyMSCDemo
+//
+//  Created by 张剑 on 15/3/6.
+//
+//
+
+#import "ISEResultXmlParser.h"
+#import "ISEResult.h"
+#import "ISEResultPhone.h"
+#import "ISEResultSyll.h"
+#import "ISEResultWord.h"
+#import "ISEResultSentence.h"
+#import "ISEResultFinal.h"
+#import "ISEResultReadSyllable.h"
+#import "ISEResultReadWord.h"
+#import "ISEResultReadSentence.h"
+
+@interface ISEResultXmlParser ()
+
+@property(nonatomic,retain)ISEResult* xmlResult;
+
+@property(nonatomic,assign)BOOL isPlainResult;
+@property(nonatomic,assign)BOOL isRecPaperPassed;
+@property(nonatomic,retain)ISEResultPhone* phone;
+@property(nonatomic,retain)ISEResultSyll*  syll;
+@property(nonatomic,retain)ISEResultWord*  word;
+@property(nonatomic,retain)ISEResultSentence* sentence;
+
+@end
+
+@implementation ISEResultXmlParser
+
+void readTotalResult(ISEResult* result, NSDictionary* attrDic);
+ISEResultPhone* createPhone(NSDictionary* attrDic);
+ISEResultSyll* createSyll(NSDictionary* attrDic);
+ISEResultWord* createWord(NSDictionary* attrDic);
+ISEResultSentence* createSentence(NSDictionary* attrDic);
+
+- (void)clearAllProperty{
+    self.isPlainResult=NO;
+    self.isRecPaperPassed=NO;
+    self.phone=nil;
+    self.syll=nil;
+    self.word=nil;
+    self.sentence=nil;
+    
+}
+
+- (void)parserXml:(NSString*) xml{
+    
+    [self clearAllProperty];
+    if(xml){
+        
+        self.xmlResult = nil;
+        
+        NSData* xmlData=[xml dataUsingEncoding:NSUTF8StringEncoding];
+        NSXMLParser *parser = [[NSXMLParser alloc] initWithData:xmlData];
+        [parser setShouldProcessNamespaces:NO];
+        [parser setShouldReportNamespacePrefixes:NO];
+        [parser setShouldResolveExternalEntities:NO];
+        
+        [parser setDelegate:self];
+        [parser parse];
+    }
+    else{
+        if (self.delegate && [self.delegate respondsToSelector:@selector(onISEResultXmlParserResult:)]) {
+            [self.delegate onISEResultXmlParserResult:self.xmlResult];
+        }
+    }
+}
+
+
+#pragma mark - tools
+
+
+void readTotalResult(ISEResult* result, NSDictionary* attrDic) {
+    result.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue];
+    result.end_pos = [[attrDic objectForKey:@"end_pos"] intValue];
+    result.content = [attrDic objectForKey:@"content"];
+    result.total_score = [[attrDic objectForKey:@"total_score"] floatValue];
+    result.time_len = [[attrDic objectForKey:@"time_len"] intValue];
+    result.except_info = [attrDic objectForKey:@"except_info"];
+    result.is_rejected = [[attrDic objectForKey:@"is_rejected"] boolValue];
+}
+
+ISEResultPhone* createPhone(NSDictionary* attrDic) {
+    ISEResultPhone* phone=[[ISEResultPhone alloc] init];
+    phone.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue];
+    phone.end_pos = [[attrDic objectForKey:@"end_pos"] intValue];
+    phone.content = [attrDic objectForKey:@"content"];
+    phone.dp_message = [[attrDic objectForKey:@"dp_message"] intValue];
+    phone.time_len = [[attrDic objectForKey:@"time_len"] intValue];
+    return phone;
+}
+
+ISEResultSyll* createSyll(NSDictionary* attrDic) {
+    ISEResultSyll* syll=[[ISEResultSyll alloc] init];
+    syll.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue];
+    syll.end_pos = [[attrDic objectForKey:@"end_pos"] intValue];
+    syll.content = [attrDic objectForKey:@"content"];
+    syll.symbol = [attrDic objectForKey:@"symbol"];
+    syll.dp_message = [[attrDic objectForKey:@"dp_message"] intValue];
+    syll.time_len = [[attrDic objectForKey:@"time_len"] intValue];
+    return syll;
+}
+
+ISEResultWord* createWord(NSDictionary* attrDic) {
+    ISEResultWord* word=[[ISEResultWord alloc] init];
+    word.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue];
+    word.end_pos = [[attrDic objectForKey:@"end_pos"] intValue];
+    word.content = [attrDic objectForKey:@"content"];
+    word.symbol =  [attrDic objectForKey:@"symbol"];
+    word.dp_message = [[attrDic objectForKey:@"dp_message"] intValue];
+    word.time_len = [[attrDic objectForKey:@"time_len"] intValue];
+    word.total_score = [[attrDic objectForKey:@"total_score"] floatValue];
+    word.global_index = [[attrDic objectForKey:@"global_index"] intValue];
+    word.index = [[attrDic objectForKey:@"index"] intValue];
+    return word;
+}
+
+ISEResultSentence* createSentence(NSDictionary* attrDic) {
+    ISEResultSentence* sentence=[[ISEResultSentence alloc] init];;
+    sentence.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue];
+    sentence.end_pos = [[attrDic objectForKey:@"end_pos"] intValue];
+    sentence.content = [attrDic objectForKey:@"content"];
+    sentence.time_len = [[attrDic objectForKey:@"time_len"] intValue];
+    sentence.index = [[attrDic objectForKey:@"index"] intValue];
+    sentence.word_count = [[attrDic objectForKey:@"word_count"] intValue];
+    return sentence;
+}
+
+#pragma mark - NSXMLParser delegate
+- (void) parserDidStartDocument:(NSXMLParser *)parser{
+}
+
+- (void) parserDidEndDocument:(NSXMLParser *)parser{
+}
+
+- (void) parser:(NSXMLParser *)parser
+didStartElement:(NSString *)elementName
+   namespaceURI:(NSString *)namespaceURI
+  qualifiedName:(NSString *)qualifiedName
+     attributes:(NSDictionary *)attributeDict{
+    
+    
+    //complete
+    if([@"rec_paper" isEqualToString:elementName]){
+        _isRecPaperPassed=YES;
+    }
+    else if([@"read_syllable" isEqualToString:elementName]){
+        if(!_isRecPaperPassed){
+             _xmlResult=[[ISEResultReadSyllable alloc] init];
+        }
+        else{
+            readTotalResult(self.xmlResult, attributeDict);
+        }
+        
+    }
+    else if([@"read_word" isEqualToString:elementName]){
+        if(!_isRecPaperPassed){
+            _xmlResult=[[ISEResultReadWord alloc] init];
+            NSString* lan=[attributeDict objectForKey:@"lan"];
+            _xmlResult.language=lan?lan:@"cn";
+        }
+        else{
+            readTotalResult(self.xmlResult, attributeDict);
+        }
+        
+    }
+    else if([@"read_sentence" isEqualToString:elementName]||[@"read_chapter" isEqualToString:elementName]){
+        if(!_isRecPaperPassed){
+            _xmlResult=[[ISEResultReadSentence alloc] init];
+            NSString* lan=[attributeDict objectForKey:@"lan"];
+            _xmlResult.language=lan?lan:@"cn";
+        }
+        else{
+            readTotalResult(self.xmlResult, attributeDict);
+        }
+        
+    }
+    else if([@"sentence" isEqualToString:elementName]){
+        if(_xmlResult&&!_xmlResult.sentences){
+            _xmlResult.sentences=[[NSMutableArray alloc] init];
+        }
+        _sentence=createSentence(attributeDict);
+    }
+    else if([@"word" isEqualToString:elementName]){
+        if(_sentence && !_sentence.words){
+            _sentence.words=[[NSMutableArray alloc] init];
+        }
+        _word=createWord(attributeDict);
+    }
+    else if([@"syll" isEqualToString:elementName]){
+        if(_word && !_word.sylls){
+            _word.sylls=[[NSMutableArray alloc] init];
+        }
+        _syll=createSyll(attributeDict);
+    }
+    else if([@"phone" isEqualToString:elementName]){
+        if(_syll && !_syll.phones){
+            _syll.phones=[[NSMutableArray alloc] init];
+        }
+        _phone=createPhone(attributeDict);
+    }
+    
+    //plain
+    if([@"FinalResult" isEqualToString:elementName]){
+        self.isPlainResult=YES;
+        _xmlResult = [[ISEResultFinal alloc] init];
+    }
+    else if([@"ret" isEqualToString:elementName]){
+        [(ISEResultFinal *)_xmlResult setRet:[[attributeDict objectForKey:@"value"] intValue]];
+    }
+    else if([@"total_score" isEqualToString:elementName]){
+        [(ISEResultFinal *)_xmlResult setTotal_score:[[attributeDict objectForKey:@"value"] floatValue]];
+    }
+    else if([@"xml_result" isEqualToString:elementName]){
+        self.isPlainResult=NO;
+    }
+    
+}
+
+- (void) parser:(NSXMLParser *)parser foundCharacters:(NSString *)string {
+    
+}
+
+- (void) parser:(NSXMLParser *)parser foundIgnorableWhitespace:(NSString *)whitespaceString{
+    
+}
+
+
+
+- (void) parser:(NSXMLParser *) parser
+  didEndElement:(NSString *) elementName
+   namespaceURI:(NSString *) namespaceURI
+  qualifiedName:(NSString *) qualifiedName{
+
+    
+    if([@"phone" isEqualToString:elementName]){
+        [_syll.phones addObject:_phone];
+        _phone=nil;
+    }
+    else if([@"syll" isEqualToString:elementName]){
+        [_word.sylls addObject:_syll];
+        _syll=nil;
+    }
+    else if([@"word" isEqualToString:elementName]){
+        [_sentence.words addObject:_word];
+        _word=nil;
+    }
+    else if([@"sentence" isEqualToString:elementName]){
+        [_xmlResult.sentences addObject:_sentence];
+        _sentence=nil;
+    }
+    else if([@"read_syllable" isEqualToString:elementName] ||
+            [@"read_word" isEqualToString:elementName] ||
+            [@"read_sentence" isEqualToString:elementName] ||
+            [@"read_chapter" isEqualToString:elementName] ||
+            [@"FinalResult" isEqualToString:elementName] ){
+        
+        [parser abortParsing];
+        if (self.delegate && [self.delegate respondsToSelector:@selector(onISEResultXmlParserResult:)]) {
+            [self.delegate onISEResultXmlParserResult:self.xmlResult];
+        }
+    }
+    
+}
+
+- (void) parser:(NSXMLParser *)parser parseErrorOccurred:(NSError *)parseError{
+    if (self.delegate && [self.delegate respondsToSelector:@selector(onISEResultXmlParser:Error:)]) {
+        [self.delegate onISEResultXmlParser:parser Error:parseError];
+    }
+}
+
+@end
+

+ 4 - 4
ios/Classes/SpeechPlugin.h

@@ -1,4 +1,4 @@
-#import <Flutter/Flutter.h>
-
-@interface SpeechPlugin : NSObject<FlutterPlugin>
-@end
+#import <Flutter/Flutter.h>
+
+@interface SpeechPlugin : NSObject<FlutterPlugin>
+@end

+ 155 - 8
ios/Classes/SpeechPlugin.m

@@ -1,8 +1,155 @@
-#import "SpeechPlugin.h"
-#import <speech_plugin/speech_plugin-Swift.h>
-
-@implementation SpeechPlugin
-+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
-  [SwiftSpeechPlugin registerWithRegistrar:registrar];
-}
-@end
+#import "SpeechPlugin.h"
+#import <iflyMSC/iflyMSC.h>
+#import "M4aToPcmHelper.h"
+#import "Mp4ToPcmHelper.h"
+#import "Results/ISEResult.h"
+#import "Results/ISEResultXmlParser.h"
+#import "Results/ISEResultTools.h"
+
+@interface SpeechPlugin () <IFlySpeechEvaluatorDelegate, ISEResultXmlParserDelegate>
+@property (nonatomic, strong) FlutterResult result;
+@property (nonatomic, strong) IFlySpeechEvaluator *iFlySpeechEvaluator;
+@property (nonatomic, strong) NSNumber *index;
+@end
+
+@implementation SpeechPlugin
++ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
+    FlutterMethodChannel* channel = [FlutterMethodChannel
+                                     methodChannelWithName:@"speech_plugin"
+                                     binaryMessenger:[registrar messenger]];
+    SpeechPlugin* instance = [[SpeechPlugin alloc] init];
+    [registrar addMethodCallDelegate:instance channel:channel];
+}
+
+- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
+    self.result = result;
+    if ([@"getPlatformVersion" isEqualToString:call.method]) {
+        result([@"iOS " stringByAppendingString:[[UIDevice currentDevice] systemVersion]]);
+    } else if ([@"initSpeechSdk" isEqualToString:call.method]) {
+        [self iflyInit];
+    }  else if ([@"evaluatorByAudio" isEqualToString:call.method]) {
+        NSNumber* index = call.arguments[@"index"];
+        NSString* recordPath = call.arguments[@"recordPath"];
+        NSString* text = call.arguments[@"en"];
+        [self evaluateVoice:index andPath:recordPath andText:text andIsVideo:false];
+    }  else if ([@"evaluatorByMp4" isEqualToString:call.method]) {
+        NSNumber* index = call.arguments[@"index"];
+        NSString* recordPath = call.arguments[@"recordPath"];
+        NSString* text = call.arguments[@"en"];
+        [self evaluateVoice:index andPath:recordPath andText:text andIsVideo:true];
+    } else {
+        result(FlutterMethodNotImplemented);
+    }
+}
+
+#pragma mark - Bridge Actions
+
+- (void)iflyInit {
+    [IFlySpeechUtility createUtility:@"appid=5db7af6b"];
+    self.iFlySpeechEvaluator = [IFlySpeechEvaluator sharedInstance];
+    self.iFlySpeechEvaluator.delegate = self;
+    [self configEvaluator];
+}
+
+- (void) configEvaluator {
+    [self.iFlySpeechEvaluator setParameter:@"" forKey:[IFlySpeechConstant PARAMS]];
+    [self.iFlySpeechEvaluator setParameter:@"read_sentence" forKey:[IFlySpeechConstant ISE_CATEGORY]];
+    [self.iFlySpeechEvaluator setParameter:@"en_us" forKey:[IFlySpeechConstant LANGUAGE]];
+    [self.iFlySpeechEvaluator setParameter:@"5000" forKey:[IFlySpeechConstant VAD_BOS]];
+    [self.iFlySpeechEvaluator setParameter:@"1800" forKey:[IFlySpeechConstant VAD_EOS]];
+    [self.iFlySpeechEvaluator setParameter:@"-1" forKey:[IFlySpeechConstant SPEECH_TIMEOUT]];
+    [self.iFlySpeechEvaluator setParameter:@"complete" forKey:[IFlySpeechConstant ISE_RESULT_LEVEL]];
+    [self.iFlySpeechEvaluator setParameter:@"16000" forKey:[IFlySpeechConstant SAMPLE_RATE]];
+    [self.iFlySpeechEvaluator setParameter:@"xml" forKey:[IFlySpeechConstant ISE_RESULT_TYPE]];
+    [self.iFlySpeechEvaluator setParameter:@"-1" forKey:@"audio_source"];
+}
+
+- (void) evaluateVoice: (NSNumber*)index andPath:(NSString*)path andText:(NSString*)text andIsVideo:(BOOL) isVideo
+{
+    self.index = index;
+    if(isVideo) {
+        [Mp4ToPcmHelper Mp4ToPcmWithUrl:[[NSURL alloc] initFileURLWithPath:path] completion:^(NSData *data) {
+            if(data == nil) {
+                self.result([NSDictionary dictionaryWithObjectsAndKeys: self.index, @"index", [NSNull null], @"score", nil]);
+                return;
+            }
+            NSStringEncoding encoding = CFStringConvertEncodingToNSStringEncoding(kCFStringEncodingGB_18030_2000);
+            NSMutableData *buffer= [NSMutableData dataWithData:[text dataUsingEncoding:encoding]];
+            [self.iFlySpeechEvaluator startListening:buffer params:nil];
+            [self.iFlySpeechEvaluator writeAudio:data];
+            [self.iFlySpeechEvaluator stopListening];
+        }];
+    } else {
+        NSData *voiceData = [M4aToPcmHelper M4aToPcmWithUrl:[[NSURL alloc] initFileURLWithPath:path]];
+        if(voiceData == nil) {
+            self.result([NSDictionary dictionaryWithObjectsAndKeys: self.index, @"index", [NSNull null], @"score", nil]);
+            return;
+        }
+        NSStringEncoding encoding = CFStringConvertEncodingToNSStringEncoding(kCFStringEncodingGB_18030_2000);
+        NSMutableData *buffer= [NSMutableData dataWithData:[text dataUsingEncoding:encoding]];
+        [self.iFlySpeechEvaluator startListening:buffer params:nil];
+        [self.iFlySpeechEvaluator writeAudio:voiceData];
+        [self.iFlySpeechEvaluator stopListening];
+    }
+}
+
+#pragma mark - iFly delegate
+
+// 评测结果回调
+- (void)onResults:(NSData *)results isLast:(BOOL)isLast {
+    if (isLast) {
+        if(results) {
+            const char* chResult = [results bytes];
+            NSString* strResults = nil;
+            NSStringEncoding encoding = CFStringConvertEncodingToNSStringEncoding(kCFStringEncodingGB_18030_2000);
+            strResults = [[NSString alloc] initWithBytes:chResult length:[results length] encoding:encoding];
+            if(strResults != nil) {
+                ISEResultXmlParser *parser = [ISEResultXmlParser alloc];
+                [parser setDelegate:self];
+                [parser parserXml:strResults];
+            } else {
+                self.result([NSDictionary dictionaryWithObjectsAndKeys: self.index, @"index", [NSNull null], @"score", nil]);
+            }
+        } else {
+            self.result([NSDictionary dictionaryWithObjectsAndKeys: self.index, @"index", [NSNull null], @"score", nil]);
+        }
+    }
+}
+
+- (void)onCompleted:(IFlySpeechError *)errorCode {
+    if (errorCode.errorCode != 0) {
+        self.result([NSDictionary dictionaryWithObjectsAndKeys: self.index, @"index", [NSNull null], @"score", nil]);
+    }
+}
+
+- (void)onCancel {}
+
+- (void)onBeginOfSpeech {}
+
+- (void)onEndOfSpeech {}
+
+- (void)onVolumeChanged:(int)volume buffer:(NSData *)buffer {}
+
+
+#pragma mark - ISEResultXmlParser delegate
+
+-(void)onISEResultXmlParser:(NSXMLParser *)parser Error:(NSError*)error {
+    if (error.code != 0) {
+        self.result([NSDictionary dictionaryWithObjectsAndKeys: self.index, @"index", [NSNull null], @"score", nil]);
+    }
+}
+
+-(void)onISEResultXmlParserResult:(ISEResult*)result {
+    if (result.is_rejected) {
+        self.result([NSDictionary dictionaryWithObjectsAndKeys: self.index, @"index", [NSNull null], @"score", nil]);
+    } else {
+        NSMutableDictionary *dic = [NSMutableDictionary dictionary];
+        [dic setValue:self.index forKey:@"index"];
+        [dic setValue:@(result.total_score) forKey:@"score"];
+        NSString* words = [ISEResultTools formatDetailsForLanguageEN: result.sentences];
+        [dic setValue:words forKey:@"words"];
+        self.result(dic);
+    }
+}
+
+@end

+ 0 - 14
ios/Classes/SwiftSpeechPlugin.swift

@@ -1,14 +0,0 @@
-import Flutter
-import UIKit
-
-public class SwiftSpeechPlugin: NSObject, FlutterPlugin {
-  public static func register(with registrar: FlutterPluginRegistrar) {
-    let channel = FlutterMethodChannel(name: "speech_plugin", binaryMessenger: registrar.messenger())
-    let instance = SwiftSpeechPlugin()
-    registrar.addMethodCallDelegate(instance, channel: channel)
-  }
-
-  public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
-    result("iOS " + UIDevice.current.systemVersion)
-  }
-}

+ 35 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyAudioSession.h

@@ -0,0 +1,35 @@
+//
+//  IFlyAudioSession.h
+//  MSCDemo
+//
+//  Created by AlexHHC on 1/9/14.
+//
+//
+
+#import <Foundation/Foundation.h>
+
+/**
+ *  音频环境初始化,设置AVAudioSession的Category属性。
+ */
+@interface IFlyAudioSession : NSObject
+
+/**
+ *  初始化播音环境,主要用于合成播放器。
+ *
+ *  此接口主要根据原来的音频环境,重新优化设置AVAudioSession的Category属性值。<br>
+ *  若原来的Category属性值为AVAudioSessionCategoryPlayAndRecord,则添加AVAudioSessionCategoryOptionDefaultToSpeaker|AVAudioSessionCategoryOptionAllowBluetooth选项;若为其他Category属性值且isMPCenter为NO,则设置Category属性值为AVAudioSessionCategoryPlayback,选项为AVAudioSessionCategoryOptionMixWithOthers;若为其他Category属性值且isMPCenter为YES,则保持原来的设置,不做任何更改。
+ *
+ *  @param isMPCenter 是否初始化MPPlayerCenter:0不初始化,1初始化。此参数只在AVAudioSession的Category属性值不为AVAudioSessionCategoryPlayAndRecord时设置有效。
+ */
++(void) initPlayingAudioSession:(BOOL)isMPCenter;
+
+/**
+ *  初始化录音环境,主要用于识别录音器。
+ *
+ *  设置AVAudioSession的Category属性值为AVAudioSessionCategoryPlayAndRecord,选项为AVAudioSessionCategoryOptionDefaultToSpeaker|AVAudioSessionCategoryOptionAllowBluetooth。
+ *
+ *  @return 成功返回YES,失败返回NO
+ */
++(BOOL) initRecordingAudioSession;
+
+@end

+ 54 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyDataUploader.h

@@ -0,0 +1,54 @@
+//
+//  IFlyDataUploader.h
+//  MSC
+//
+//  Created by ypzhao on 13-4-8.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+
+@class IFlySpeechError;
+
+/*!
+ *  数据上传类,主要用于上传语法文件或上传联系人、词表等个性化数据。
+ */
+@interface IFlyDataUploader : NSObject
+
+/*!
+ *  数据名称
+ */
+@property(nonatomic,copy) NSString *dataName;
+/*!
+ *  数据
+ */
+@property(nonatomic,copy) NSString *data;
+
+/*!
+ *  上传完成回调
+ *
+ *  @param result 结果
+ *  @param error  错误码
+ */
+typedef void(^IFlyUploadDataCompletionHandler)(NSString* result,IFlySpeechError * error);
+
+/*!
+ *  上传数据
+ *  此函数用于上传数据,下载的过程是**异步**的。
+ *
+ *  @param completionHandler -[in] 上传完成回调
+ *  @param name              -[in] 上传的内容名称,名称最好和你要上传的数据内容相关,不可以为nil
+ *  @param data              -[in] 上传的数据,以utf8编码,不可以为nil
+ */
+- (void) uploadDataWithCompletionHandler:(IFlyUploadDataCompletionHandler)completionHandler name:(NSString *)name data:(NSString *)data;
+
+/*!
+ *  设置上传数据参数
+ *
+ *  @param parameter 参数值
+ *  @param key       参数名
+ */
+-(void) setParameter:(NSString*) parameter forKey:(NSString*) key;
+
+@end

+ 37 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyDebugLog.h

@@ -0,0 +1,37 @@
+//
+//  IFlyDebugLog.h
+//  MSC
+
+//  description: 程序中的log处理类
+
+//  Created by ypzhao on 12-11-22.
+//  Copyright (c) 2012年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+/*!
+ *  调试信息
+ */
+@interface IFlyDebugLog : NSObject
+
+/*!
+ *  打印调试信息
+ *
+ *  @param format -[in] 要打印的内容格式
+ *  @param ...    -[in] 要打印的内容
+ */
++ (void) showLog:(NSString *)format, ...;
+
+/*!
+ *  将log写入文件中
+ */
++ (void) writeLog;
+
+/*!
+ *  设置是否显示log
+ *
+ *  @param showLog YES:显示;NO:不显示
+ */
++ (void) setShowLog:(BOOL) showLog;
+@end

+ 48 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyISVDelegate.h

@@ -0,0 +1,48 @@
+//
+//  IFlyISVDelegate.h
+//  msc_UI
+//
+//  Created by admin on 14-9-15.
+//  Copyright (c) 2014年 iflytek. All rights reserved.
+//
+
+
+#import <Foundation/Foundation.h>
+
+@class IFlySpeechError;
+
+/*!
+ *  声纹回调协议
+ */
+@protocol IFlyISVDelegate
+
+/*!
+ *  声纹结果回调
+ *
+ *  @param dic 结果
+ */
+-(void) onResult:(NSDictionary *)dic;
+
+/*!
+ *  错误码回调
+ *
+ *  @param errorCode 错误码
+ */
+-(void) onCompleted:(IFlySpeechError *) errorCode;
+
+@optional
+
+/*!
+ *  等待结果
+ */
+-(void) onRecognition;
+
+/*!
+ *  音量改变回调
+ *
+ *  @param volume 音量值
+ */
+-(void) onVolumeChanged: (int)volume;
+
+@end
+

+ 132 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyISVRecognizer.h

@@ -0,0 +1,132 @@
+//
+//  IFlyISVRecognizer.h
+//  ISV
+//
+//  Created by wangdan on 14-9-6.
+//  Copyright (c) 2014年 IFlyTEK. All rights reserved.
+//
+
+
+#import <Foundation/Foundation.h>
+#import "IFlyISVDelegate.h"
+
+/**
+ *  声纹接口类
+ */
+@interface IFlyISVRecognizer : NSObject 
+{
+
+}
+
+/*!
+ *  The delegate of FlyISVRecognizer responsing to IFlyISVDelegate.
+ */
+@property (assign) id<IFlyISVDelegate> delegate;
+
+
+/*!
+ *  FlyISVRecognizer is a kind of Singleton calss.The function can be used as below:<br>
+ *  IFLyISVRecognizer *recognizer=[IFlyISVRecognizer creteRecognizer: self];
+ */
++(instancetype) sharedInstance;
+
+
+/*!
+ *  Genrerate a serial number password<br>
+ *  Princeple:<br>
+ *  1.Number serial has no 1 in itself;<br>
+ *  2.The nuber serial has no same number("98765432"is right while "99876543" is wrong)
+ *
+ *  @param length   the serial number's length,length of "98765432" is 8,generally length is 8 and other value is forbidden
+ */
+-(NSString*) generatePassword:(int)length;
+
+
+
+/*!
+ *  Used to get password from server
+ *
+ *  @param pwdt   when pwdt is 1,the function will return chinese text;while pwdt is 2, the funciton will return number serial
+ */
+-(NSArray*) getPasswordList:(int)pwdt;
+
+
+/*!
+ *  Used to judge if the engine is running in listenning
+ *
+ *  @return YES: the engine is listenning;<br>No : the engine is not listenning
+ */
+-(BOOL) isListening;
+
+
+
+/*!
+ *  Used to query or delete the voiceprint model in server
+ *
+ *  @param cmd  "del": delete model;<br>"que": query model;
+ *  @param authid: user id ,can be @"tianxia" or other;
+ *  @param pwdt voiceprint type<br>
+ *  1: fixed txt voiceprint code ,like @"我的地盘我做主";<br>
+ *  2: free voiceprint code , user can speek anything,but 5 times trainning the speech shall be same;<br>
+ *  3: number serial voiceprint code ,like @"98765432" and so on.
+ *  @param ptxt voiceprint txt,only fixed voiceprint and number serial have this,in free voiceprint model this param shall be set nil.
+ *  @param vid  another voiceprint type model,user can use this to query or delete model in server can be @"jakillasdfasdjjjlajlsdfhdfdsadff",totally 32 bits;<br>
+ *  NOTES:<br>
+ *  when vid is not nil,then the server will judge the vid first; while the vid is nil, server can still query or delete the voiceprint model by other params.
+ */
+-(BOOL) sendRequest:(NSString*)cmd authid:(NSString *)auth_id  pwdt:(int)pwdt ptxt:(NSString *)ptxt vid:(NSString *)vid err:(int *)err;
+
+
+/*!
+ *  Set the voiceprint params
+ * 
+ *  | key             | value                                             |
+ *  |:---------------:|:-------------------------------------------------:|
+ *  | sst             | @"train" or @"verify"                             |
+ *  | auth_id         | @"tianxia" or other                               |
+ *  | sub             | @"ivp"                                            |
+ *  | ptxt            |                                                   |
+ *  | rgn             | @"5"                                              |
+ *  | pwdt            | @"1",or @"2", or @"3"                             |
+ *  | auf             | @"audio/L16;rate=16000" or @"audio/L16;rate=8000" |
+ *  | vad_enable      | @"1" or @"0"                                      |
+ *  | vad_timeout     | @"3000"                                           |
+ *  | vad_speech_tail | @"100"                                            |
+ *
+ *  @param value 参数值
+ *  @param key   参数类型
+ *
+ *  @return 设置成功返回YES,失败返回NO
+ */
+-(BOOL) setParameter:(NSString *)value forKey:(NSString *)key;
+
+
+
+/*!
+ *  Get the voiceprint params used the same as function of setParameter
+ */
+-(NSString*) getParameter:(NSString *)key;
+
+
+/*!
+ *  Start recording
+ */
+-(void) startListening;
+
+
+/*!
+ *  Stop recording
+ */
+-(void) stopListening;
+
+
+/*!
+ *  Cancel recording,like function stopListening
+ */
+-(void) cancel;                                                         /* cancel recognization */
+
+
+
+
+@end
+

+ 38 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyMSC.h

@@ -0,0 +1,38 @@
+//
+//  IFlyMSC.h
+//  msc
+//
+//  Created by 张剑 on 15/1/14.
+//  Copyright (c) 2015年 iflytek. All rights reserved.
+//
+
+#ifndef MSC_IFlyMSC_h
+#define MSC_IFlyMSC_h
+
+#import "IFlyAudioSession.h"
+#import "IFlyDataUploader.h"
+#import "IFlyDebugLog.h"
+#import "IFlyISVDelegate.h"
+#import "IFlyISVRecognizer.h"
+#import "IFlyRecognizerView.h"
+#import "IFlyRecognizerViewDelegate.h"
+#import "IFlyResourceUtil.h"
+#import "IFlySetting.h"
+#import "IFlySpeechConstant.h"
+#import "IFlySpeechError.h"
+#import "IFlySpeechEvaluator.h"
+#import "IFlySpeechEvaluatorDelegate.h"
+#import "IFlySpeechEvent.h"
+#import "IFlySpeechRecognizer.h"
+#import "IFlySpeechRecognizerDelegate.h"
+#import "IFlySpeechSynthesizer.h"
+#import "IFlySpeechSynthesizerDelegate.h"
+#import "IFlySpeechUtility.h"
+#import "IFlyUserWords.h"
+#import "IFlyPcmRecorder.h"
+#import "IFlyVoiceWakeuper.h"
+#import "IFlyVoiceWakeuperDelegate.h"
+
+
+
+#endif

+ 116 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyPcmRecorder.h

@@ -0,0 +1,116 @@
+//
+//  IFlyPcmRecorder.h
+//  MSC
+
+//  description:
+
+//  Created by ypzhao on 12-11-15.
+//  Copyright (c) 2012年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+#import <AudioToolbox/AudioQueue.h>
+#import <AudioToolbox/AudioFile.h>
+#import <AudioToolbox/AudioServices.h>
+#import <AudioToolbox/AudioConverter.h>
+#import <AVFoundation/AVFoundation.h>
+
+
+@class IFlyPcmRecorder;
+
+/*!
+ *  录音协议
+ */
+@protocol IFlyPcmRecorderDelegate<NSObject>
+
+/*!
+ *  回调音频数据
+ *
+ *  @param buffer 音频数据
+ *  @param size   表示音频的长度
+ */
+- (void) onIFlyRecorderBuffer: (const void *)buffer bufferSize:(int)size;
+
+/*!
+ *  回调音频的错误码
+ *
+ *  @param recoder 录音器
+ *  @param error   错误码
+ */
+- (void) onIFlyRecorderError:(IFlyPcmRecorder*)recoder theError:(int) error;
+
+@optional
+
+/*!
+ *  回调录音音量
+ *
+ *  @param power 音量值
+ */
+- (void) onIFlyRecorderVolumeChanged:(int) power;
+
+@end
+
+
+/*!
+ *  录音器控件
+ */
+@interface IFlyPcmRecorder : NSObject<AVAudioSessionDelegate>
+
+/*!
+ *  录音委托对象
+ */
+@property (nonatomic,assign) id<IFlyPcmRecorderDelegate> delegate;
+
+/*!
+ *  用于设置是否在录音结束后发送Deactive通知,默认是YES:发送
+ */
+@property (nonatomic,assign) BOOL isNeedDeActive;
+
+/*!
+ *  单例模式
+ *
+ *  @return 返回录音对象单例
+ */
++ (instancetype) sharedInstance;
+
+/*!
+ *  开始录音
+ *
+ *  @return  开启录音成功返回YES,否则返回NO
+ */
+- (BOOL) start;
+
+/*!
+ *  停止录音
+ */
+- (void) stop;
+
+/*!
+ *  设置音频采样率
+ *
+ *  @param rate -[in] 采样率,8k/16k
+ */
+- (void) setSample:(NSString *) rate;
+
+/*!
+ * 设置录音音量回调时间间隔参数
+ */
+- (void) setPowerCycle:(float) cycle;
+
+/*!
+ *  保存录音
+ *
+ *  @param savePath 音频保存路径
+ */
+-(void) setSaveAudioPath:(NSString *)savePath;
+
+/*!
+ *  录音器是否完成
+ *
+ *  @return  录音器完全结束返回YES,否则返回NO
+ */
+-(BOOL) isCompleted;
+
+@end
+

+ 134 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyRecognizerView.h

@@ -0,0 +1,134 @@
+//
+//  IFlyRecognizerView.h
+//  MSC
+//
+//  Created by admin on 13-4-16.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+
+@protocol IFlyRecognizerViewDelegate ;
+
+/*!
+ *  语音识别控件<br>
+ *  录音时触摸控件结束录音,开始识别(相当于旧版的停止);触摸其他位置,取消录音,结束会话(取消)<br>
+ *  出错时触摸控件,重新开启会话(相当于旧版的再说一次);触摸其他位置,取消录音,结束会话(取消)
+ *
+ */
+@interface IFlyRecognizerView : UIView<NSObject>
+
+/*!
+ *  设置委托对象
+ */
+@property(nonatomic,assign)id<IFlyRecognizerViewDelegate> delegate;
+
+/*!
+ *  初始化控件
+ *
+ *  @param origin 控件左上角的坐标
+ *
+ *  @return IFlyRecognizerView 对象
+ */
+- (id)initWithOrigin:(CGPoint)origin;
+
+/*!
+ *  初始化控件
+ *
+ *  @param center 控件中心的坐标
+ *
+ *  @return IFlyRecognizerView 对象
+ */
+- (id) initWithCenter:(CGPoint)center;
+
+/*!
+ *  设置横竖屏自适应
+ *
+ *  @param autoRotate 默认值YES,横竖屏自适应
+ */
+- (void) setAutoRotate:(BOOL)autoRotate;
+
+/*
+ *  | ------------- |-----------------------------------------------------------
+ *  | 参数           | 描述
+ *  | ------------- |-----------------------------------------------------------
+ *  | domain        |应用的领域: 取值为:iat、search、video、poi、music、asr;
+ *  |               |           iat:普通文本听写;
+ *  |               |        search:热词搜索;
+ *  |               |         video:视频音乐搜索;
+ *  |               |           asr:关键词识别;
+ *  | ------------- |-----------------------------------------------------------
+ *  | vad_bos       |前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms;
+ *  |               |          engine指定iat识别默认值为5000;
+ *  |               |          其他情况默认值为 4000,范围 0-10000。
+ *  | ------------- |-----------------------------------------------------------
+ *  | vad_eos       |后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入,
+ *  |               |          自动停止录音;单位:ms;
+ *  |               |          sms 识别默认值为 1800;
+ *  |               |          其他默认值为 700,范围 0-10000。
+ *  | ------------- |-----------------------------------------------------------
+ *  | sample_rate   |采样率:目前支持的采样率设置有 16000 和 8000。
+ *  | ------------- |-----------------------------------------------------------
+ *  | asr_ptt       |标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。
+ *  | ------------- |-----------------------------------------------------------
+ *  | result_type   |返回结果的数据格式: 可设置为json,xml,plain,默认为json。
+ *  | ------------- |-----------------------------------------------------------
+ *  | grammarID     |识别的语法id: 只针对 domain 设置为”asr”的应用。
+ *  | ------------- |-----------------------------------------------------------
+ *  | asr_audio_path|音频文件名: 设置此参数后,将会自动保存识别的录音文件。
+ *  |               |          路径为Documents/(指定值)。
+ *  |               |          不设置或者设置为nil,则不保存音频。
+ *  | ------------- |-----------------------------------------------------------
+ *  | params        |扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。
+ *  | ------------- |-----------------------------------------------------------
+ *
+ */
+
+/*!
+ *  设置识别引擎的参数
+ *
+ *  识别的引擎参数(key)取值如下:<br>
+ *
+ *  | 参数             | 描述                                                  |
+ *  |-----------------|-------------------------------------------------------|
+ *  | domain          | 应用的领域: 取值为:iat、search、video、poi、music、asr;<br>iat:普通文本听写;<br>search:热词搜索;<br>video:视频音乐搜索;<br>asr:关键词识别;|
+ *  | vad_bos         | 前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms;<br>engine指定iat识别默认值为5000;<br>其他情况默认值为 4000,范围 0-10000。|
+ *  | vad_eos         | 后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入,自动停止录音;单位:ms;<br>sms 识别默认值为 1800;<br>其他默认值为 700,范围 0-10000。|
+ *  | sample_rate     | 采样率:目前支持的采样率设置有 16000 和 8000。|
+ *  | asr_ptt         | 标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。|
+ *  | result_type     | 返回结果的数据格式: 可设置为json,xml,plain,默认为json。|
+ *  | grammarID       | 识别的语法id: 只针对 domain 设置为”asr”的应用。|
+ *  | asr_audio_path  | 音频文件名: 设置此参数后,将会自动保存识别的录音文件。<br>路径为Documents/(指定值)。<br>不设置或者设置为nil,则不保存音频。|
+ *  | params          | 扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。|
+ *
+ *  @param value 参数对应的取值
+ *  @param key   识别引擎参数
+ *
+ *  @return 成功返回YES;失败返回NO
+ */
+-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
+
+/*!
+ *  获取识别引擎参数
+ *
+ *  @param key 参数key
+ *
+ *  @return 参数值
+ */
+-(NSString*) parameterForKey:(NSString *)key;
+
+/*!
+ *  开始识别
+ *
+ *  @return 成功返回YES;失败返回NO
+ */
+- (BOOL)start;
+
+/*!
+ *  取消本次识别
+ */
+- (void)cancel;
+
+
+@end

+ 36 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyRecognizerViewDelegate.h

@@ -0,0 +1,36 @@
+//
+//  IFlyRecognizerDelegate.h
+//  MSC
+//
+//  Created by admin on 13-4-16.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+@class IFlyRecognizerView;
+@class IFlySpeechError;
+
+/*!
+ *  识别回调委托
+ */
+@protocol IFlyRecognizerViewDelegate <NSObject>
+
+/*!
+ *  回调返回识别结果
+ *
+ *  @param resultArray 识别结果,NSArray的第一个元素为NSDictionary,NSDictionary的key为识别结果,sc为识别结果的置信度
+ *  @param isLast      -[out] 是否最后一个结果
+ */
+- (void)onResult:(NSArray *)resultArray isLast:(BOOL) isLast;
+
+/*!
+ *  识别结束回调
+ *
+ *  @param error 识别结束错误码
+ */
+- (void)onCompleted: (IFlySpeechError *) error;
+
+@optional
+
+@end

+ 90 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyResourceUtil.h

@@ -0,0 +1,90 @@
+//
+//  IFlyResourceUtil.h
+//  MSCDemo
+//
+//  Created by admin on 14-6-20.
+//  Copyright (c) 2014年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+/*!
+ *  资源工具类
+ */
+@interface IFlyResourceUtil : NSObject
+
+/*!
+ *  获取通过MSPSetParam,启动引擎的标识
+ *
+ *  @return 通过MSPSetParam,启动引擎的标识
+ */
++(NSString*) ENGINE_START;
+
+/*!
+ *  获取通过MSPSetParam,销毁引擎的标识
+ *
+ *  @return 通过MSPSetParam,销毁引擎的标识
+ */
++(NSString*) ENGINE_DESTROY;
+
+/*!
+ *  获取识别引擎的资源目录标识
+ *
+ *  @return 识别引擎的资源目录标识
+ */
++(NSString*) ASR_RES_PATH;
+
+/*!
+ *  得到语法构建目录
+ *
+ *  @return 语法构建目录
+ */
++(NSString*) GRM_BUILD_PATH;
+
+/*!
+ *  获取合成引擎的资源目录标识,同时需要先传入voice_name方可生效
+ *
+ *  @return 合成引擎的资源目录标识,同时需要先传入voice_name方可生效
+ */
++(NSString*) TTS_RES_PATH;
+
+/*!
+ *  获取唤醒资源的资源目录标识
+ *
+ *  @return 唤醒资源的资源目录标识
+ */
++(NSString*) IVW_RES_PATH;
+
+/*!
+ *  语法类型
+ *
+ *  @return 语法类型
+ */
++(NSString*) GRAMMARTYPE;
+
+/*!
+ *  语记SDK专用参数,用于设置本地默认资源路径
+ *
+ *  @return 本地默认资源路径key字符串
+ */
++(NSString*) PLUS_LOCAL_DEFAULT_RES_PATH;
+
+#pragma mark -
+/*!
+ *  资源存放路径
+ *
+ *  @param path 设置的路径
+ *
+ *  @return 资源目录
+ */
++(NSString*) generateResourcePath:(NSString *)path;
+
+/**
+ *  获得离线发音人对应的id
+ *
+ *  @param voiceName 发音人名称
+ *
+ *  @return 有,发音人对应的id;无,返回nil
+ */
++(NSString*) identifierForVoiceName:(NSString*)voiceName;
+@end

+ 88 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySetting.h

@@ -0,0 +1,88 @@
+//
+//  IFlySetting.h
+//  MSC
+//
+//  Created by iflytek on 13-4-12.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+/*!
+ *  日志打印等级
+ */
+typedef NS_OPTIONS(NSInteger, LOG_LEVEL){
+    /*!
+     *  全部打印
+     */
+    LVL_ALL                 = -1,
+    /*!
+     *  高,异常分析需要的级别
+     */
+    LVL_DETAIL              = 31,
+    /*!
+     *  中,打印基本日志信息
+     */
+    LVL_NORMAL              = 15,
+    /*!
+     *  低,只打印主要日志信息
+     */
+    LVL_LOW                 = 7,
+    /*!
+     *  不打印
+     */
+    LVL_NONE                = 0
+};
+
+/*!
+ *   此接口为iflyMSC sdk 配置接口。<br>
+ *   可以获取版本号,设置日志打印等级等
+ */
+@interface IFlySetting : NSObject
+
+/*!
+ *  获取版本号
+ *
+ *  @return  版本号
+ */
++ (NSString *) getVersion;
+
+/*!
+ *  获取日志等级
+ *
+ *  @return 返回日志等级
+ */
++ (LOG_LEVEL) logLvl;
+
+/*!
+ *  是否打印控制台log<br>
+ *  在软件发布时,建议关闭此log。
+ *
+ *  @param showLog -[in] YES,打印log;NO,不打印
+ */
++ (void) showLogcat:(BOOL) showLog;
+
+/*!
+ *  设置日志msc.log生成路径以及日志等级
+ *
+ *  | 日志打印等级             | 描述                               |
+ *  |------------------------|-----------------------------------|
+ *  | LVL_ALL                | 全部打印                           |
+ *  | LVL_DETAIL             | 高,异常分析需要的级别                |
+ *  | LVL_NORMAL             | 中,打印基本日志信息                  |
+ *  | LVL_LOW                | 低,只打印主要日志信息                |
+ *  | LVL_NONE               | 不打印                             |
+ *
+ *  @param level  -[in] 日志打印等级
+ */
++ (void) setLogFile:(LOG_LEVEL) level;
+
+/*!
+ *  设置日志文件的路径<br>
+ *  日志文件默认存放在Documents目录。
+ *
+ *  @param path  -[in] 日志文件的全路径
+ */
++ (void) setLogFilePath:(NSString*) path;
+
+@end

+ 903 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechConstant.h

@@ -0,0 +1,903 @@
+//
+//  IFlySpeechConstant.h
+//  MSCDemo
+//
+//  Created by iflytek on 5/9/14.
+//  Copyright (c) 2014 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+/*!
+ *  公共常量类<br>
+ *  主要定义参数的key value值
+ */
+@interface IFlySpeechConstant : NSObject
+
+
+#pragma mark - 通用参数key
+
+/*!
+ *  语音应用ID<br>
+ *  通过开发者网站申请
+ *
+ *  @return 语音应用IDkey
+ */
++(NSString*)APPID;
+
+
+/*!
+ *  语言区域。
+ *
+ *  @return 语言区域key。
+ */
++(NSString*)ACCENT;
+
+/*!
+ *  语言区域。
+ *
+ *  @return 普通话value。
+ */
++(NSString*)ACCENT_MANDARIN;
+
+/*!
+ *  语言区域。
+ *
+ *  @return 河南话value。
+ */
++(NSString*)ACCENT_HENANESE;
+
+/*!
+ *  语言区域。
+ *
+ *  @return 四川话value。
+ */
++(NSString*)ACCENT_SICHUANESE;
+
+/*!
+ *  语言区域。
+ *
+ *  @return 粤语value。
+ */
++(NSString*)ACCENT_CANTONESE;
+
+/*!
+ *  语言<br>
+ *  支持:zh_cn,zh_tw,en_us<br>
+ *
+ *  @return 语言key
+ */
++(NSString*)LANGUAGE;
+
+/*!
+ *  语言
+ *
+ *  @return 中文value
+ */
++(NSString*)LANGUAGE_CHINESE;
+
+
+/*!
+ *  语言
+ *
+ *  @return 中文台湾value
+ */
++(NSString*)LANGUAGE_CHINESE_TW;
+
+/*!
+ *  语言
+ *
+ *  @return 英文value
+ */
++(NSString*)LANGUAGE_ENGLISH;
+
+/*!
+ *  返回结果的数据格式,可设置为json,xml,plain,默认为json。
+ *
+ *  @return 返回结果的数据格式key
+ */
++(NSString*)RESULT_TYPE;
+
+/*!
+ *  应用领域。
+ *
+ *  @return 应用领域key
+ */
++(NSString*)IFLY_DOMAIN;
+
+/*!
+ *  个性化数据上传类型
+ *
+ *  @return 个性化数据上传类型key
+ */
++(NSString*)DATA_TYPE;
+
+/*!
+ *  语音输入超时时间<br>
+ *  单位:ms,默认30000
+ *
+ *  @return 语音输入超时时间key
+ */
++(NSString*)SPEECH_TIMEOUT;
+
+/*!
+ *  网络连接超时时间<br>
+ *  单位:ms,默认20000
+ *
+ *  @return 网络连接超时时间key
+ */
++(NSString*)NET_TIMEOUT;
+
+/*!
+ *  业务类型。
+ *
+ *  @return 业务类型key。
+ */
++(NSString*)SUBJECT;
+
+/*!
+ *  扩展参数。
+ *
+ *  @return 扩展参数key。
+ */
++(NSString*)PARAMS;
+
+/*!
+ *  加密参数
+ *
+ *  支持类型:ssl 加密  tcp 非加密 默认:tcp<br>
+ *  建议对安全性要求较高时使用ssl。
+ *
+ *  @return 加密参数key
+ */
++(NSString*)PROT_TYPE;
+
+/*!
+ *  ssl证书内容
+ *
+ *  @return ssl证书内容key
+ */
++(NSString*)SSL_CERT;
+
+/*!
+ *  录音音量返回时间间隔。
+ *
+ *  @return 间隔key。
+ */
++(NSString*)POWER_CYCLE;
+
+/*!
+ *  合成、识别、唤醒、评测、声纹等业务采样率。
+ *
+ *  @return 合成及识别采样率key。
+ */
++(NSString*)SAMPLE_RATE;
+
+/*!
+ *  合成、识别、唤醒、声纹等业务采样率。
+ *
+ *  @return 合成及识别采样率8K Value。
+ */
++(NSString*)SAMPLE_RATE_8K;
+
+/*!
+ *  合成、识别、唤醒、评测、声纹等业务采样率。
+ *
+ *  @return 合成及识别采样率16K Value。
+ */
++(NSString*)SAMPLE_RATE_16K;
+
+/*!
+ *  引擎类型。<br>
+ *  可选:local,cloud,auto<br>
+ *  默认:auto
+ *
+ *  @return 引擎类型key。
+ */
++(NSString*)ENGINE_TYPE;
+
+/*!
+ *  本地识别引擎。
+ *
+ *  @return 本地识别引擎value。
+ */
++(NSString*)TYPE_LOCAL;
+
+/*!
+ *  云端识别引擎。
+ *
+ *  @return 云端识别引擎value。
+ */
++(NSString*)TYPE_CLOUD;
+
+/*!
+ *  混合识别引擎。
+ *
+ *  @return 混合识别引擎value。
+ */
++(NSString*)TYPE_MIX;
+
+/*!
+ *  引擎根据当前配置进行选择。
+ *
+ *  @return 引擎根据当前配置进行选择value。
+ */
++(NSString*)TYPE_AUTO;
+
+/*!
+ *  输入文本编码格式。
+ *
+ *  @return 编码格式key。
+ */
++(NSString*)TEXT_ENCODING;
+
+/*!
+ *  结果编码格式。
+ *
+ *  @return 结果编码格式key。
+ */
++(NSString*)RESULT_ENCODING;
+
+/*!
+ *  是否初始化播放器<br>
+ *  SDK内部播放器采用音频队列实现,有部分外部需求需要自定义音频队列,可以通过此开关控制<br>
+ *  0:不初始化,非0或者参数为空:初始化,默认初始化
+ *
+ *  @return 是否初始化播放器参数key
+ */
++(NSString*)PLAYER_INIT;
+
+/*!
+ *  是否播放器结束后发送deactive系统通知<br>
+ *  SDK内部播放器结束后可通过此开关发送deactive系统通知,使其他被中断的音频应用解除中断<br>
+ *  0:不发送,非0或者参数为空:发送,默认发送
+ *
+ *  @return 是否播放器结束后发送deactive系统通知参数key
+ */
++(NSString*)PLAYER_DEACTIVE;
+
+/**
+ *  是否初始化录音器<br>
+ *  SDK内部录音器采用音频队列实现,有部分外部需求需要自定义音频队列,可以通过此开关控制<br>
+ *  0:不初始化,非0或者参数为空:初始化,默认初始化
+ *
+ *  @return 是否初始化录音器参数key
+ */
++(NSString*)RECORDER_INIT;
+
+/**
+ *  是否录音器结束后发送deactive系统通知<br>
+ *  SDK内部录音器结束后可通过此开关发送deactive系统通知,使其他被中断的音频应用解除中断<br>
+ *  0:不发送,非0或者参数为空:发送,默认发送
+ *
+ *  @return 是否录音器结束后发送deactive系统通知参数key
+ */
++(NSString*)RECORDER_DEACTIVE;
+
+
+#pragma mark -  合成相关设置key
+/*!
+ *  语速<br>
+ *  范围 (0~100) 默认值:50
+ *
+ *  @return 语速key
+ */
++(NSString*)SPEED;
+
+/*!
+ *  音调<br>
+ *  范围(0~100)默认值:50
+ *
+ *  @return 音调key
+ */
++(NSString*)PITCH;
+
+/*!
+ *  合成录音保存路径
+ *
+ *  注意:只需要设置文件名则可,会自动拼接到[IFlySetting setLogFilePath]接口设置的目录后
+ *
+ *  @return 合成录音保存路径key
+ */
++(NSString*)TTS_AUDIO_PATH;
+
+/**
+ *  启用VAD功能
+ *
+ *  @return 启用VAD功能key
+ */
++(NSString*)VAD_ENABLE;
+
+/*!
+ *  VAD前端点超时<br>
+ *  范围:0-10000(单位ms)
+ *
+ *  @return VAD前端点超时key
+ */
++(NSString*)VAD_BOS;
+
+/*!
+ *  VAD后端点超时。<br>
+ *  可选范围:0-10000(单位ms)
+ *
+ *  @return VAD后端点超时key
+ */
++(NSString*)VAD_EOS;
+
+
+/*
+ *  云端支持如下发音人:
+ *  对于网络TTS的发音人角色,不同引擎类型支持的发音人不同,使用中请注意选择。
+ *
+ *  |--------|----------------|
+ *  |  发音人 |  参数          |
+ *  |--------|----------------|
+ *  |  小燕   |   xiaoyan     |
+ *  |--------|----------------|
+ *  |  小宇   |   xiaoyu      |
+ *  |--------|----------------|
+ *  |  凯瑟琳 |   catherine   |
+ *  |--------|----------------|
+ *  |  亨利   |   henry       |
+ *  |--------|----------------|
+ *  |  玛丽   |   vimary      |
+ *  |--------|----------------|
+ *  |  小研   |   vixy        |
+ *  |--------|----------------|
+ *  |  小琪   |   vixq        |
+ *  |--------|----------------|
+ *  |  小峰   |   vixf        |
+ *  |--------|----------------|
+ *  |  小梅   |   vixl        |
+ *  |--------|----------------|
+ *  |  小莉   |   vixq        |
+ *  |--------|----------------|
+ *  |  小蓉   |   vixr        |
+ *  |--------|----------------|
+ *  |  小芸   |   vixyun      |
+ *  |--------|----------------|
+ *  |  小坤   |   vixk        |
+ *  |--------|----------------|
+ *  |  小强   |   vixqa       |
+ *  |--------|----------------|
+ *  |  小莹   |   vixyin      |
+ *  |--------|----------------|
+ *  |  小新   |   vixx        |
+ *  |--------|----------------|
+ *  |  楠楠   |   vinn        |
+ *  |--------|----------------|
+ *  |  老孙   |   vils        |
+ *  |--------|----------------|
+ */
+
+/*!
+ *  发音人
+ * 
+ *  云端支持如下发音人:<br>
+ *  对于网络TTS的发音人角色,不同引擎类型支持的发音人不同,使用中请注意选择。<br>
+ *
+ *  |  发音人   |  参数             |
+ *  |:--------:|:----------------:|
+ *  |  小燕     |   xiaoyan        |
+ *  |  小宇     |   xiaoyu         |
+ *  |  凯瑟琳   |   catherine      |
+ *  |  亨利     |   henry          |
+ *  |  玛丽     |   vimary         |
+ *  |  小研     |   vixy           |
+ *  |  小琪     |   vixq           |
+ *  |  小峰     |   vixf           |
+ *  |  小梅     |   vixl           |
+ *  |  小莉     |   vixq           |
+ *  |  小蓉     |   vixr           |
+ *  |  小芸     |   vixyun         |
+ *  |  小坤     |   vixk           |
+ *  |  小强     |   vixqa          |
+ *  |  小莹     |   vixyin         |
+ *  |  小新     |   vixx           |
+ *  |  楠楠     |   vinn           |
+ *  |  老孙     |   vils           |
+ *
+ *  @return 发音人key
+ */
++(NSString*)VOICE_NAME;
+
+/*!
+ * 发音人ID key。
+ *
+ * @return 发音人ID key
+ */
++(NSString*)VOICE_ID;
+
+/*!
+ * 发音人语种 key。
+ *
+ * 参数值:0:Auto 1:中文 2英文 ,默认 0.
+ *
+ * @return 发音人ID key
+ */
++(NSString*)VOICE_LANG;
+
+/*!
+ *  音量<br>
+ *  范围(0~100) 默认值:50
+ *
+ *  @return 音量key
+ */
++(NSString*)VOLUME ;
+
+/*!
+ *  合成音频播放缓冲时间<br>
+ *  即缓冲多少秒音频后开始播放,如tts_buffer_time=1000;<br>
+ *  默认缓冲1000ms毫秒后播放。
+ *
+ *  @return 合成音频播放缓冲时间缓冲时间key
+ */
++(NSString*)TTS_BUFFER_TIME ;
+
+
+/*!
+ *  合成数据是否即时返回
+ *
+ *  是否需要数据回调,为1时,当合成一段音频会通过onEvent回调返回,直接合成结束;<br>
+ *  设置为1为即时返回;0为非即时返回;默认值为0;
+ *
+ *  @return 合成数据即时返回key
+ */
++(NSString*)TTS_DATA_NOTIFY;
+
+/*!
+ *  预合成文本
+ *
+ *  @return 预合成文本参数key
+ */
++(NSString*)NEXT_TEXT;
+
+/*!
+ *  是否需要打开MPPlayingInfocenter<br>
+ *  是否需要初始化MPPlayerCenter的属性;0:需要初始化,1:不初始化
+ *
+ *  @return 是否需要打开MPPlayingInfocenter 参数key
+ */
++(NSString*)MPPLAYINGINFOCENTER;
+
+#pragma mark - 识别、听写、语义相关设置key
+
+/*!
+ *  录音源<br>
+ *  录音时的录音方式,默认为麦克风,设置为1;<br>
+ *  如果需要外部送入音频,设置为-1,通过WriteAudio接口送入音频。
+ *
+ *  @return 录音源key
+ */
++(NSString*)AUDIO_SOURCE;
+
+/*!
+ *  识别录音保存路径
+ *
+ *  @return 识别录音保存路径key
+ */
++(NSString*) ASR_AUDIO_PATH;
+
+/*!
+ *  设置是否开启语义
+ *
+ *  @return 设置是否开启语义key
+ */
++(NSString*)ASR_SCH;
+
+/*!
+ *  设置是否有标点符号
+ *
+ *  @return 设置是否有标点符号key
+ */
++(NSString*)ASR_PTT;
+
+/*!
+ *  ASR_PTT 参数值:设置带标点符号
+ *
+ *  @return 设置是有标点符号Value
+ */
++(NSString*)ASR_PTT_HAVEDOT;
+
+/*!
+ *  ASR_PTT 参数值:设置不带标点符号
+ *
+ *  @return 设置是无标点符号Value
+ */
++(NSString*)ASR_PTT_NODOT;
+
+/*!
+ *  本地语法名称。<br>
+ *  本地语法名称,对应云端的有CLOUD_GRAMMAR
+ *
+ *  @return 本地语法名称key。
+ */
++(NSString*)LOCAL_GRAMMAR;
+
+/*!
+ *  云端语法ID。<br>
+ *  云端编译语法返回的表示,早期版本使用GRAMMAR_ID,仍然兼容,但建议使用新的。
+ *
+ *  @return 云端语法ID key。
+ */
++(NSString*)CLOUD_GRAMMAR;
+
+/*!
+ *  语法类型
+ *
+ *  @return 语法类型key
+ */
++(NSString*)GRAMMAR_TYPE;
+
+/*!
+ *  语法内容。
+ *
+ *  @return 语法内容key。
+ */
++(NSString*)GRAMMAR_CONTENT;
+
+/*!
+ *  字典内容。
+ *
+ *  @return 字典内容key。
+ */
++(NSString*)LEXICON_CONTENT;
+
+/*!
+ *  字典名字。
+ *
+ *  @return 字典名字key。
+ */
++(NSString*)LEXICON_NAME;
+
+/*!
+ *  语法名称列表。
+ *
+ *  @return 语法名称列表key。
+ */
++(NSString*)GRAMMAR_LIST;
+
+/*!
+ *  开放语义协议版本号。<br>
+ *  如需使用请在http://osp.voicecloud.cn/上进行业务配置
+ *
+ *  @return 开放语义协议版本号key。
+ */
++(NSString*)NLP_VERSION;
+
+#pragma mark -  唤醒相关设置key
+/*!
+ *  唤醒门限值。
+ *
+ *  @return 唤醒门限值key。
+ */
++(NSString*)IVW_THRESHOLD;
+
+/*!
+ *  唤醒服务类型。
+ *
+ *  @return 唤醒服务类型key。
+ */
++(NSString*)IVW_SST;
+
+/*!
+ *  唤醒+识别。
+ *
+ *  @return 唤醒+识别key。
+ */
++(NSString*)IVW_ONESHOT;
+
+/*!
+ *  唤醒工作方式<br>
+ *  1:表示唤醒成功后继续录音,0:表示唤醒成功后停止录音。
+ *
+ *  @return 唤醒工作方式key
+ */
++(NSString*)KEEP_ALIVE;
+
+/*!
+ *  唤醒录音保存路径
+ *
+ *  @return 唤醒录音保存路径key
+ */
++(NSString*) IVW_AUDIO_PATH;
+
+#pragma mark -  评测相关设置key
+/*!
+ *  评测类型<br>
+ *  可选值:read_syllable(英文评测不支持):单字;read_word:词语;read_sentence:句子;read_chapter(待开放):篇章。
+ *
+ *  @return 评测类型 key
+ */
++(NSString*)ISE_CATEGORY;
+
+/*!
+ *  评测结果等级<br>
+ *  可选值:complete:完整 ;plain:简单
+ *
+ *  @return 评测结果等级 key
+ */
++(NSString*)ISE_RESULT_LEVEL;
+
+/*!
+ *  评测结果格式<br>
+ *  可选值:xml;plain
+ *
+ *  @return 评测结果格式 key
+ */
++(NSString*)ISE_RESULT_TYPE;
+
+/*!
+ *  评测录音保存路径
+ *
+ *  @return 评测录音保存路径key
+ */
++(NSString*) ISE_AUDIO_PATH;
+
+
+/*!
+ *  朗读跟踪,只对句子和篇章有效<br>
+ *  可选值:enable:开启;disable:关闭。
+ *
+ *  @return 朗读跟踪 key
+ */
++(NSString*)ISE_AUTO_TRACKING;
+
+/*!
+ *  跟踪模式<br>
+ *  可选值:easy:简单;hard:复杂。
+ *
+ *  @return 跟踪模式 key
+ */
++(NSString*)ISE_TRACK_TYPE;
+
+#pragma mark -  语记SDK业务key
+/*!
+ *  本地所有资源
+ *
+ *  @return 本地所有资源key
+ */
++ (NSString *)PLUS_LOCAL_ALL;
+
+/*!
+ *  本地合成资源
+ *
+ *  @return 本地合成资源key
+ */
++ (NSString *)PLUS_LOCAL_TTS;
+
+/*!
+ *  本地识别资源
+ *
+ *  @return 本地识别资源key
+ */
++ (NSString *)PLUS_LOCAL_ASR;
+
+/*!
+ *  本地唤醒资源
+ *
+ *  @return 本地唤醒资源key
+ */
++ (NSString *)PLUS_LOCAL_IVW;
+
+#pragma mark -  身份验证业务key
+
+/*!
+ *  auth_id<br>
+ *  用于用户注册和登录、查询、删除等业务时标识用户身份
+ *
+ *  @return 用户标识
+ */
++ (NSString*)MFV_AUTH_ID;
+
+/*!
+ *  请求业务类型,可选值:mfv(默认,融合验证),ivp(声纹),ifr(人脸)
+ *
+ *  @return 请求业务类型key
+ */
++ (NSString*)MFV_SUB;
+
+/*!
+ *  会话类型,不同sub有不同的sst取值。<br>
+ *  ifr:enroll,verify,identify,reenroll,query,delete<br>
+ *  ivp:enroll(train),verify,reenroll,query,delete,download
+ *
+ *  @return 会话类型key
+ */
++ (NSString*)MFV_SST;
+
+/*!
+ *  融合验证模式,仅在融合验证场景下使用。可选值:sin(单一生物特征数据验证),mix(混合生物特征数据验证),agi(灵活生物特征数据验证)
+ *
+ *  @return 融合验证模式key
+ */
++ (NSString*)MFV_VCM;
+
+/*!
+ *  特征场景,用来说明本次验证将涉及的业务。可选值:ivp,ifr,ivp|ifr
+ *
+ *  @return 特征场景 key
+ */
++ (NSString*)MFV_SCENES;
+
+/*!
+ *  确认周期(affirmance cycle,单位:s),用户设置的确认超时时间(生命周期),仅在灵活融合验证场景下使用
+ *
+ *  @return 确认周期key
+ */
++ (NSString*)MFV_AFC;
+
+/*!
+ *  数据保存路径
+ *
+ *  @return 数据保存路径key
+ */
++ (NSString*)MFV_DATA_PATH;
+
+/*!
+ *  训练次数:取值2~9.无默认值,必须明确指定。
+ *
+ *  @return 训练次数key
+ */
++ (NSString*)MFV_RGN;
+
+/*!
+ *  声纹确认门限值,验证得分>=tsd验证通过,否则验证失败(该参数目前不支持,作为保留参数。)却只范围:0~100.
+ *
+ *  @return 声纹确认门限值key
+ */
++ (NSString*)MFV_TSD;
+
+/*!
+ *  密码文本。从服务端下载,比如数字密码所需要的数字串。
+ *
+ *  @return 密码文本key
+ */
++ (NSString*)MFV_PTXT;
+
+/*!
+ *  密码类型。取值:1(文本密码),2(自由说),3(数字密码).
+ *
+ *  @return 密码类型key
+ */
++ (NSString*)MFV_PWDT;
+
+/*!
+ *  取消注册。取值:0(不取消,即不生效),1(取消本次注册).
+ *
+ *  @return 取消注册key
+ */
++ (NSString*)MFV_FIN;
+
+/*!
+ *  等待超时时间:描述客户端等待结果的超时时间
+ *
+ *  @return 等待超时时间:key
+ */
++ (NSString*)MFV_WTT;
+
+/*!
+ *  数据格式<br>
+ *  声纹为音频采样率支持:16000和8000;人脸为图片格式,支持jpg和gif
+ *
+ *  @return 数据格式key
+ */
++ (NSString*)MFV_DATA_FORMAT;
+
+/*!
+ *  数据压缩编码<br>
+ *  声纹为;人脸支持raw,不对图片压缩
+ *
+ *  @return 数据压缩编码key
+ */
++ (NSString*)MFV_DATA_ENCODING;
+
+#pragma mark - 人脸业务key
+
+//1. sub	  取值: wfr	                      用途: 用于区分业务类型,web访问方式中,nginx配置不用使用,但是在结构化日志和染色日志记录中使用。
+//2. sst	  取值: reg、verify、detect、align  用途: 指定本路会话是属于何种性质
+// + 人脸图像注册(reg):上传图像,验证图像的有效性,然后存储起来,作为数据源。
+// + 人脸图像验证(verify):通过与指定源图像比较,验证人脸相似性。
+// + 人脸图像检测(detect):能够检测出不同姿态方位的人脸在图中的位置。
+// + 人脸图像聚焦(align):在给定人脸框下自动标定出两眼、鼻尖、嘴角的坐标。
+//3. aue	取值: raw	                      用途: 图像压缩格式,现在引擎不支持图像压缩,aue只能取值raw
+//4. pset   取值: 整数	                      用途: 人脸识别验证阈值,取值可以是负数也可以是整数。
+//5. skip   取值: true/false	                  用途: 后台图片处理是否进行过滤。true表示不过滤,false表示过滤
+//6. gid	取值: ***********	              用途: 图像模型id,如:4a6c124ed6b78436ee5aac4563f13eb5
+//7. appid  取值:用户申请的appid                 用途: 验证用户
+
+
+/*! 
+ * sub 默认值:wfr<br>
+ * 用于区分业务类型,web访问方式中,nginx配置不用使用,但是在结构化日志和染色日志记录中使用。
+ */
++ (NSString*) FACE_SUB;
+
+/*!
+ * WFR<br>
+ * sub参数的默认值
+ */
++ (NSString*) FACE_WFR;
+
+/*! 
+ * sst<br>
+ * 指定本路会话是属于何种性质
+ */
++ (NSString*) FACE_SST;
+
+/*! 
+ * REG<br>
+ * 人脸图像注册(reg):上传图像,验证图像的有效性,然后存储起来,作为数据源。
+ */
++ (NSString*) FACE_REG;
+
+/*! 
+ * VERIFY<br>
+ * 人脸图像验证(verify):通过与指定源图像比较,验证人脸相似性。
+ */
++ (NSString*) FACE_VERIFY;
+
+/*! 
+ * DETECT<br>
+ * 人脸图像检测(detect):能够检测出不同姿态方位的人脸在图中的位置。
+ */
++ (NSString*) FACE_DETECT;
+
+/*! 
+ * ALIGN<br>
+ * 人脸图像聚焦(align):在给定人脸框下自动标定出两眼、鼻尖、嘴角的坐标。
+ */
++ (NSString*) FACE_ALIGN;
+
+/*! 
+ * ATTR<br>
+ * 面部属性识别(attr):对面部属性进行识别:例如秃顶、刘海、大嘴、模糊、眼镜等。
+ */
++ (NSString*) FACE_ATTR;
+
+
+/*!
+ * AUE<br>
+ * 图像压缩格式,现在引擎不支持图像压缩,aue只能取值raw
+ */
++ (NSString*) FACE_AUE;
+
+/*! 
+ * RAW<br>
+ * AUE参数的值
+ */
++ (NSString*) FACE_RAW;
+
+/*! 
+ * PSET<br>
+ * 人脸识别验证阈值,取值可以是负数也可以是整数。
+ */
++ (NSString*) FACE_PSET;
+
+/*! 
+ * SKIP<br>
+ * 后台图片处理是否进行过滤。true表示不过滤,false表示过滤,传入字符串@“true”或@“false”
+ */
++ (NSString*) FACE_SKIP;
+
+/*! 
+ * GID<br>
+ * 图像模型id,如:4a6c124ed6b78436ee5aac4563f13eb5
+ */
++ (NSString*) FACE_GID;
+
+/*!
+ *  auth_id<br>
+ *  用于用户注册和登录、查询、删除等业务时标识用户身份
+ *
+ *  @return 用户标识
+ */
++ (NSString*)FACE_AUTH_ID;
+
+/*!
+ * DVC<br>
+ * 用户设备编号,用于验证用户
+ */
++ (NSString*) FACE_DVC;
+
+@end

+ 58 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechError.h

@@ -0,0 +1,58 @@
+//
+//  IFlySpeechError.h
+//  MSC
+//
+//  Created by iflytek on 13-3-19.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#ifndef __IFlySpeechError__
+#define __IFlySpeechError__
+
+#import <Foundation/Foundation.h>
+
+/*!
+ *  错误描述类
+ */
+@interface IFlySpeechError : NSObject
+
+/*!
+ *  错误码
+ */
+@property(nonatomic,assign) int errorCode;
+
+/*!
+ *  错误码类型
+ */
+@property(nonatomic,assign) int errorType;
+
+/*!
+ *  错误描述
+ */
+@property(nonatomic,retain) NSString* errorDesc;
+
+/*!
+ *  初始化
+ *
+ *  @param errorCode -[in] 错误码
+ *
+ *  @return IFlySpeechError对象
+ */
++ (instancetype) initWithError:(int) errorCode;
+
+/*!
+ *  获取错误码
+ *
+ *  @return 错误码
+ */
+-(int) errorCode;
+
+/*!
+ *  获取错误描述
+ *
+ *  @return 错误描述
+ */
+- (NSString *) errorDesc;
+
+@end
+#endif

+ 101 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechEvaluator.h

@@ -0,0 +1,101 @@
+//
+//  IFlySpeechEvaluator.h
+//  msc
+//
+//  Created by jianzhang on 14-1-13
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+
+#import <Foundation/Foundation.h>
+#import "IFlySpeechEvaluatorDelegate.h"
+
+#define IFLY_AUDIO_SOURCE_MIC    @"1"
+#define IFLY_AUDIO_SOURCE_STREAM @"-1"
+
+/*!
+ *  语音评测类
+ */
+@interface IFlySpeechEvaluator : NSObject <IFlySpeechEvaluatorDelegate>
+
+/*!
+ *  设置委托对象
+ */
+@property (assign) id <IFlySpeechEvaluatorDelegate> delegate;
+
+/*!
+ *  返回评测对象的单例
+ *
+ *  @return 别对象的单例
+ */
++ (instancetype)sharedInstance;
+
+/*!
+ *  销毁评测对象。
+ *
+ *  @return 成功返回YES,失败返回NO。
+ */
+- (BOOL)destroy;
+
+/*!
+ *  设置评测引擎的参数
+ *
+ *  @param value 评测引擎参数值
+ *  @param key   评测引擎参数
+ *
+ *  @return 设置的参数和取值正确返回YES,失败返回NO
+ */
+- (BOOL)setParameter:(NSString *)value forKey:(NSString *)key;
+
+
+/*!
+ *  获得评测引擎的参数
+ *
+ *  @param key   评测引擎参数
+ *
+ *  @return key对应的参数值
+ */
+- (NSString*)parameterForKey:(NSString *)key;
+
+/*!
+ *  开始评测<br>
+ *  同时只能进行一路会话,这次会话没有结束不能进行下一路会话,否则会报错
+ *
+ *  @param data   评测的试题
+ *  @param params 评测的参数
+ *  @return 成功返回YES,失败返回NO
+ */
+- (BOOL)startListening:(NSData *)data params:(NSString *)params;
+
+/*!
+ *  停止录音<br>
+ *  调用此函数会停止录音,并开始进行语音识别
+ */
+- (void)stopListening;
+
+/*!
+ *  取消本次会话
+ */
+- (void)cancel;
+
+@end
+
+/*!
+ *  音频流评测<br>
+ *  音频流评测可以将文件分段写入
+ */
+@interface IFlySpeechEvaluator(IFlyStreamISERecognizer)
+
+/*!
+ *  写入音频流
+ *
+ *  @param audioData 音频数据
+ *
+ *  @return 写入成功返回YES,写入失败返回NO
+ */
+- (BOOL) writeAudio:(NSData *) audioData;
+
+@end
+
+
+

+ 64 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechEvaluatorDelegate.h

@@ -0,0 +1,64 @@
+//
+//  IFlySpeechEvaluatorDelegate.h
+//  msc
+//
+//  Created by admin on 13-6-19.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+
+#import <Foundation/Foundation.h>
+
+@class IFlySpeechError;
+
+/*!
+ *  评测协议
+ */
+@protocol IFlySpeechEvaluatorDelegate <NSObject>
+
+/*!
+ *  音量和数据回调
+ *
+ *  @param volume 音量
+ *  @param buffer 音频数据
+ */
+- (void)onVolumeChanged:(int)volume buffer:(NSData *)buffer;
+
+/*!
+ *  开始录音回调<br>
+ *  当调用了`startListening`函数之后,如果没有发生错误则会回调此函数。如果发生错误则回调onCompleted:函数
+ */
+- (void)onBeginOfSpeech;
+
+/*!
+ *  停止录音回调<br>
+ *  当调用了`stopListening`函数或者引擎内部自动检测到断点,如果没有发生错误则回调此函数。<br>
+ *  如果发生错误则回调onCompleted:函数
+ */
+- (void)onEndOfSpeech;
+
+/*!
+ *  正在取消
+ */
+- (void)onCancel;
+
+/*!
+ *  评测错误回调
+ *
+ *  在进行语音评测过程中的任何时刻都有可能回调此函数,你可以根据errorCode进行相应的处理.当errorCode没有错误时,表示此次会话正常结束,否则,表示此次会话有错误发生。特别的当调用`cancel`函数时,引擎不会自动结束,需要等到回调此函数,才表示此次会话结束。在没有回调此函数之前如果重新调用了`startListenging`函数则会报错误。
+ *
+ *  @param errorCode 错误描述类
+ */
+- (void)onCompleted:(IFlySpeechError *)errorCode;
+
+/*!
+ *  评测结果回调<br>
+ *  在评测过程中可能会多次回调此函数,你最好不要在此回调函数中进行界面的更改等操作,只需要将回调的结果保存起来。
+ *
+ *  @param results -[out] 评测结果。
+ *  @param isLast  -[out] 是否最后一条结果
+ */
+- (void)onResults:(NSData *)results isLast:(BOOL)isLast;
+
+@end
+

+ 151 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechEvent.h

@@ -0,0 +1,151 @@
+//
+//  IFlySpeechEvent.h
+//  MSCDemo
+//
+//  Created by admin on 14-8-12.
+//  Copyright (c) 2014年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+/*!
+ *  事件类型
+ */
+typedef NS_ENUM(NSUInteger,IFlySpeechEventType){
+    /*!
+     *  网络状态消息<br>
+     *  在消息到达时,可通过onEvent的第2个参数arg1,获取当前网络连接状态值
+     */
+    IFlySpeechEventTypeNetPref = 10001,
+    /*!
+     * 转写音频文件消息<br>
+     * 在录音模式下,成功创建音频文件时返回。可通过onEvent第4个参数data,指定Key为[IFlySpeechConstant IST_AUDIO_PATH],获取音频文件绝对路径.或通过[IFlySpeechTranscripter getParameter:[IFlySpeechConstant IST_AUDIO_PATH]],获取音频文件绝对路径.
+     */
+    IFlySpeechEventTypeISTAudioFile = 10004,
+    /*!
+     * 转写已上传字节消息<br>
+     * 在消息到达时,通过onEvent的第二个参数arg1,获取已确认上传到服务器的字节数.若当前音频源为非写音频模式,还可通过onEvent
+     * 的第三个参数arg2,获取当前所有音频的字节大小.录音模式时,由于所有音频字节大小会变。当停止音频输入后(等待录音时间超时[IFlySpeechConstant SPEECH_TIMEOUT],或调用[IFlySpeechTranscripter stopTranscripting]),且服务器收到所有音频时,第四个参数data,将包含完成标记的布尔值(true),可通过data调用指定KEY为KCIFlySpeechEventKeyISTUploadComplete获取。此消息可能多次返回.
+     */
+    IFlySpeechEventTypeISTUploadBytes = 10006,
+    
+    /*!
+     * 转写缓存剩余<br>
+     * 此消息仅在音频源为-1时需要关注,在调用[IFlySpeechTranscripter writeAudio]写音频时,应该关注此事件。<br>
+     * 此事件在调用写音频接口、及音频最后被写入底库库时分别回调一次。当事件回调时,通过onEvent的第二个参数arg1,获取当前剩余的缓存大小,当缓存小于要写入的音频时,应该先暂停写音频数据,直到下次缓存大小大于要写入的音频时.最大缓存为128KByte。
+     */
+    IFlySpeechEventTypeISTCacheLeft = 10007,
+    
+    /*!
+     * 转写结果等待时间消息<br>
+     * 在消息到达时,通过 onEvent的第二个参数arg1,获取当前结果需要的时间.<br>
+     * 此消息可能多次返回,返回时间不定,且不一定会返回.
+     */
+    IFlySpeechEventTypeISTResultTime= 10008,
+    
+    /*!
+     * 转写转写音频同步ID消息<br>
+     * 在消息到达时,通过 onEvent的第二个参数arg1,获取当前写音频同步ID.<br>
+     * 此消息可能多次返回.
+     */
+    IFlySpeechEventTypeISTSyncID= 10009,
+    
+    /*!
+     * 会话开始消息<br>
+     * 在会话开始成功后返回
+     */
+    IFlySpeechEventTypeSessionBegin = 10010,
+    
+    /*!
+     * 会话结束消息<br>
+     * 在会话结束前返回
+     */
+    IFlySpeechEventTypeSessionEnd = 10011,
+    
+    /*!
+     * 音量消息,在得到音量时抛出,暂时只有身份验证的声纹业务用到
+     */
+    IFlySpeechEventTypeVolume = 10012,
+    
+    /*!
+     * VAD后端点消息,在检测到VAD后端点时抛出,暂时只有身份验证的声纹业务用到
+     */
+    IFlySpeechEventTypeVadEOS = 10013,
+    
+    /*!
+     *  服务端会话id<br>
+     *  在消息到达时,可通过onEvent的第4个参数data(字典类型),指定key KCIFlySpeechEventKeySessionID,获取服务端会话id.
+     */
+    IFlySpeechEventTypeSessionID = 20001,
+    
+    /*!
+     *  TTS合成数据消息<br>
+     *  -(void)onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData<br>
+     *  其中eventData中包含数据
+     *
+     */
+    IFlySpeechEventTypeTTSBuffer = 21001,
+    
+    /*!
+     *  通知cancel方法被调用的回调
+     *
+     */
+    IFlySpeechEventTypeTTSCancel = 21002,
+    
+    /*!
+     *  IVW onshot 听写 or 识别结果<br>
+     *  在消息到达时,第2个参数arg1包含是否为最后一个结果:1为是,0为否;<br>
+     *  第4个参数data中包含数据,通过指定KEY为KCIFlySpeechEventKeyIVWResult获取.
+     */
+    IFlySpeechEventTypeIVWResult = 22001,
+    
+    /*!
+     * 开始处理录音数据
+     * 
+     */
+    IFlySpeechEventTypeSpeechStart= 22002,
+    
+    /*!
+     * 录音停止
+     * 
+     */
+    IFlySpeechEventTypeRecordStop= 22003,
+    
+    /*!
+     *  服务端音频url<br>
+     *  在消息到达时,第4个参数data,包含数据,通过指定KEY为KCIFlySpeechEventKeyAudioUrl获取.
+     */
+    IFlySpeechEventTypeAudioUrl = 23001,
+    
+    /*!
+     *  变声数据结果返回<br>
+     *  设置voice_change参数获取结果.
+     */
+    IFlySpeechEventTypeVoiceChangeResult = 24001
+
+};
+
+#pragma mark - keys for event data
+
+/**
+ *  转写是否已上传完标记key
+ */
+extern NSString* const KCIFlySpeechEventKeyISTUploadComplete;
+
+/**
+ *  服务端会话key
+ */
+extern NSString* const KCIFlySpeechEventKeySessionID;
+/**
+ *  TTS取音频数据key
+ */
+extern NSString* const KCIFlySpeechEventKeyTTSBuffer;
+/**
+ *  IVW oneshot 听写 or 识别结果 key
+ */
+extern NSString* const KCIFlySpeechEventKeyIVWResult;
+/**
+ *  服务端音频url key
+ */
+extern NSString* const KCIFlySpeechEventKeyAudioUrl;
+

+ 175 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechRecognizer.h

@@ -0,0 +1,175 @@
+//
+//  IFlySpeechRecognizer.h
+//  MSC
+//
+//  Created by iflytek on 13-3-19.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+#import "IFlySpeechRecognizerDelegate.h"
+
+#define IFLY_AUDIO_SOURCE_MIC    @"1"
+#define IFLY_AUDIO_SOURCE_STREAM @"-1"
+
+/*!
+ *  语音识别类<br>
+ *  此类现在设计为单例,你在使用中只需要创建此对象,不能调用release/dealloc函数去释放此对象。所有关于语音识别的操作都在此类中。
+ */
+@interface IFlySpeechRecognizer : NSObject<IFlySpeechRecognizerDelegate>
+
+/*!
+ *  设置委托对象 
+ */
+@property(nonatomic,assign) id<IFlySpeechRecognizerDelegate> delegate ;
+
+/*!
+ *  返回识别对象的单例
+ *
+ *  @return 识别对象的单例
+ */
++ (instancetype) sharedInstance;
+
+/*!
+ *  销毁识别对象。
+ *
+ *  @return 成功返回YES,失败返回NO
+ */
+- (BOOL) destroy;
+
+/*
+ *  | ------------- |-----------------------------------------------------------
+ *  | 参数           | 描述
+ *  | ------------- |-----------------------------------------------------------
+ *  | domain        |应用的领域: 取值为:iat、search、video、poi、music、asr;
+ *  |               |           iat:普通文本听写;
+ *  |               |        search:热词搜索;
+ *  |               |         video:视频音乐搜索;
+ *  |               |           asr:关键词识别;
+ *  | ------------- |-----------------------------------------------------------
+ *  | vad_bos       |前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms;
+ *  |               |          engine指定iat识别默认值为5000;
+ *  |               |          其他情况默认值为 4000,范围 0-10000。
+ *  | ------------- |-----------------------------------------------------------
+ *  | vad_eos       |后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入,
+ *  |               |          自动停止录音;单位:ms;
+ *  |               |          sms 识别默认值为 1800;
+ *  |               |          其他默认值为 700,范围 0-10000。
+ *  | ------------- |-----------------------------------------------------------
+ *  | sample_rate   |采样率:目前支持的采样率设置有 16000 和 8000。
+ *  | ------------- |-----------------------------------------------------------
+ *  | asr_ptt       |标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。
+ *  | ------------- |-----------------------------------------------------------
+ *  | result_type   |返回结果的数据格式: 可设置为json,xml,plain,默认为json。
+ *  | ------------- |-----------------------------------------------------------
+ *  | grammarID     |识别的语法id: 只针对 domain 设置为”asr”的应用。
+ *  | ------------- |-----------------------------------------------------------
+ *  | asr_audio_path|音频文件名: 设置此参数后,将会自动保存识别的录音文件。
+ *  |               |          路径为Documents/(指定值)。
+ *  |               |          不设置或者设置为nil,则不保存音频。
+ *  | ------------- |-----------------------------------------------------------
+ *  | params        |扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。
+ *  | ------------- |-----------------------------------------------------------
+ *
+ */
+
+/*!
+ *  设置识别引擎的参数
+ *
+ *  识别的引擎参数(key)取值如下:
+ *
+ *  | 参数             | 描述                                                  |
+ *  |-----------------|-------------------------------------------------------|
+ *  | domain          | 应用的领域: 取值为:iat、search、video、poi、music、asr;<br>iat:普通文本听写;<br>search:热词搜索;<br>video:视频音乐搜索;<br>asr:关键词识别;|
+ *  | vad_bos         | 前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms;<br>engine指定iat识别默认值为5000;<br>其他情况默认值为 4000,范围 0-10000。|
+ *  | vad_eos         | 后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入,自动停止录音;单位:ms;<br>sms 识别默认值为 1800;<br>其他默认值为 700,范围 0-10000。|
+ *  | sample_rate     | 采样率:目前支持的采样率设置有 16000 和 8000。|
+ *  | asr_ptt         | 标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。|
+ *  | result_type     | 返回结果的数据格式: 可设置为json,xml,plain,默认为json。|
+ *  | grammarID       | 识别的语法id: 只针对 domain 设置为”asr”的应用。|
+ *  | asr_audio_path  | 音频文件名: 设置此参数后,将会自动保存识别的录音文件。<br>路径为Documents/(指定值)。<br>不设置或者设置为nil,则不保存音频。|
+ *  | params          | 扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。|
+ *
+ *  @param value 参数对应的取值
+ *  @param key   识别引擎参数
+ *
+ *  @return 成功返回YES;失败返回NO
+ */
+-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
+
+/*!
+ *  获取识别引擎参数
+ *
+ *  @param key 参数key
+ *
+ *  @return 参数值
+ */
+-(NSString*) parameterForKey:(NSString *)key;
+
+/*!
+ *  开始识别
+ *
+ *  同时只能进行一路会话,这次会话没有结束不能进行下一路会话,否则会报错。若有需要多次回话,请在onCompleted回调返回后请求下一路回话。
+ *
+ *  @return 成功返回YES;失败返回NO
+ */
+- (BOOL) startListening;
+
+/*!
+ *  停止录音<br>
+ *  调用此函数会停止录音,并开始进行语音识别
+ */
+- (void) stopListening;
+
+/*!
+ *  取消本次会话
+ */
+- (void) cancel;
+
+/*!
+ *  上传语法
+ *
+ *  @param completionHandler 上传语法完成回调
+ *  @param grammarType       语法类型
+ *  @param grammarContent    语法内容
+ *
+ *  @return 错误码
+ */
+- (int) buildGrammarCompletionHandler:(IFlyOnBuildFinishCompletionHandler)completionHandler
+                          grammarType:(NSString *)grammarType
+                       grammarContent:(NSString *)grammarContent;
+
+/*! 
+ *  是否正在识别
+ */
+@property (nonatomic, readonly) BOOL isListening;
+
+@end
+
+/*!
+ *  音频流识别<br>
+ *  音频流识别可以将文件分段写入
+ */
+@interface IFlySpeechRecognizer(IFlyStreamRecognizer)
+
+/*!
+*  写入音频流
+*
+*  此方法的使用示例如下:
+* <pre><code>[_iFlySpeechRecognizer setParameter:@"-1" value:@"audio_source"];
+* [_iFlySpeechRecognizer startListening];
+* [_iFlySpeechRecognizer writeAudio:audioData1];
+* [_iFlySpeechRecognizer writeAudio:audioData2];
+* ...
+* [_iFlySpeechRecognizer stopListening];
+* </code></pre>
+*
+*  @param audioData 音频数据
+*
+*  @return 写入成功返回YES,写入失败返回NO
+*/
+- (BOOL) writeAudio:(NSData *) audioData;
+
+@end
+

+ 111 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechRecognizerDelegate.h

@@ -0,0 +1,111 @@
+//
+//  IFlySpeechRecognizerDelegate.h
+//  MSC
+//
+//  Created by ypzhao on 13-3-27.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+@class IFlySpeechError;
+
+/*!
+ *  构建语法结束回调
+ *
+ *  @param grammarId 语法id
+ *  @param error     错误描述
+ */
+typedef void(^IFlyOnBuildFinishCompletionHandler)(NSString* grammarId,IFlySpeechError * error);
+
+
+/*!
+ *  语音识别协议<br>
+ *  在使用语音识别时,需要实现这个协议中的方法.
+ */
+@protocol IFlySpeechRecognizerDelegate <NSObject>
+
+@required
+
+/*!
+ *  识别结果回调
+ *
+ *  在进行语音识别过程中的任何时刻都有可能回调此函数,你可以根据errorCode进行相应的处理,当errorCode没有错误时,表示此次会话正常结束;否则,表示此次会话有错误发生。特别的当调用`cancel`函数时,引擎不会自动结束,需要等到回调此函数,才表示此次会话结束。在没有回调此函数之前如果重新调用了`startListenging`函数则会报错误。
+ *
+ *  @param errorCode 错误描述
+ */
+- (void) onCompleted:(IFlySpeechError *) errorCode;
+
+/*!
+ *  识别结果回调
+ *
+ *  在识别过程中可能会多次回调此函数,你最好不要在此回调函数中进行界面的更改等操作,只需要将回调的结果保存起来。<br>
+ *  使用results的示例如下:
+ *  <pre><code>
+ *  - (void) onResults:(NSArray *) results{
+ *     NSMutableString *result = [[NSMutableString alloc] init];
+ *     NSDictionary *dic = [results objectAtIndex:0];
+ *     for (NSString *key in dic){
+ *        [result appendFormat:@"%@",key];//合并结果
+ *     }
+ *   }
+ *  </code></pre>
+ *
+ *  @param results  -[out] 识别结果,NSArray的第一个元素为NSDictionary,NSDictionary的key为识别结果,sc为识别结果的置信度。
+ *  @param isLast   -[out] 是否最后一个结果
+ */
+- (void) onResults:(NSArray *) results isLast:(BOOL)isLast;
+
+@optional
+
+/*!
+ *  音量变化回调<br>
+ *  在录音过程中,回调音频的音量。
+ *
+ *  @param volume -[out] 音量,范围从0-30
+ */
+- (void) onVolumeChanged: (int)volume;
+
+/*!
+ *  开始录音回调<br>
+ *  当调用了`startListening`函数之后,如果没有发生错误则会回调此函数。<br>
+ *  如果发生错误则回调onCompleted:函数
+ */
+- (void) onBeginOfSpeech;
+
+/*!
+ *  停止录音回调<br>
+ *  当调用了`stopListening`函数或者引擎内部自动检测到断点,如果没有发生错误则回调此函数。<br>
+ *  如果发生错误则回调onCompleted:函数
+ */
+- (void) onEndOfSpeech;
+
+/*!
+ *  取消识别回调<br>
+ *  当调用了`cancel`函数之后,会回调此函数,在调用了cancel函数和回调onCompleted之前会有一个<br>
+ *  短暂时间,您可以在此函数中实现对这段时间的界面显示。
+ */
+- (void) onCancel;
+
+#ifdef _EDUCATION_
+/*!
+ *  返回音频Key
+ *
+ *  @param key 音频Key
+ */
+- (void) getAudioKey:(NSString *)key;
+
+#endif
+
+/*!
+ *  扩展事件回调<br>
+ *  根据事件类型返回额外的数据
+ *
+ *  @param eventType 事件类型,具体参见IFlySpeechEventType的IFlySpeechEventTypeVoiceChangeResult枚举。
+ *  @param arg0      arg0
+ *  @param arg1      arg1
+ *  @param eventData 事件数据
+ */
+- (void) onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData;
+
+@end

+ 123 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechSynthesizer.h

@@ -0,0 +1,123 @@
+//
+//  IFlySpeechSynthesizer.h
+//  MSC
+//
+//  Created by 侯效林 on 16-4-22.
+//  Copyright (c) 2016年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import "IFlySpeechSynthesizerDelegate.h"
+
+
+/*!
+ *  语音合成
+ */
+@interface IFlySpeechSynthesizer : NSObject 
+
+/*!
+ *  设置识别的委托对象
+ */
+@property(nonatomic,assign) id<IFlySpeechSynthesizerDelegate> delegate;
+
+/*!
+ *  返回合成对象的单例
+ *
+ *  @return 合成对象
+ */
++ (instancetype) sharedInstance;
+
+/*!
+ *  销毁合成对象。
+ *
+ *  @return 成功返回YES,失败返回NO.
+ */
++ (BOOL) destroy;
+
+/*
+ *  | ------------- |-----------------------------------------------------------
+ *  | 参数           | 描述
+ *  | ------------- |-----------------------------------------------------------
+ *  | speed         |合成语速,取值范围 0~100
+ *  | ------------- |-----------------------------------------------------------
+ *  | volume        |合成的音量,取值范围 0~100
+ *  | ------------- |-----------------------------------------------------------
+ *  | voice_name    |默认为”xiaoyan”;可以设置的参数列表可参考个性化发音人列表
+ *  | ------------- |-----------------------------------------------------------
+ *  | sample_rate   |采样率:目前支持的采样率设置有 16000 和 8000。
+ *  | ------------- |-----------------------------------------------------------
+ *  | tts_audio_path|音频文件名 设置此参数后,将会自动保存合成的音频文件。
+ *  |               |路径为Documents/(指定值)。不设置或者设置为nil,则不保存音频。
+ *  | ------------- |-----------------------------------------------------------
+ *  | params        |扩展参数: 对于一些特殊的参数可在此设置。
+ *  | ------------- |-----------------------------------------------------------
+ *
+ */
+
+/*!
+ *  设置合成参数
+ *
+ *  | 参数             | 描述                                               |
+ *  |-----------------|----------------------------------------------------|
+ *  | speed           | 合成语速,取值范围 0~100                               |
+ *  | volume          | 合成的音量,取值范围 0~100                             |
+ *  | voice_name      | 默认为”xiaoyan”;可以设置的参数列表可参考个性化发音人列表   |
+ *  | sample_rate     | 采样率:目前支持的采样率设置有 16000 和 8000。            |
+ *  | tts_audio_path  | 音频文件名 设置此参数后,将会自动保存合成的音频文件。<br>路径为Documents/(指定值)。不设置或者设置为nil,则不保存音频。|
+ *  | params          | 扩展参数: 对于一些特殊的参数可在此设置。                  |
+ *
+ *  @param value 参数取值
+ *  @param key   合成参数
+ *
+ *  @return 设置成功返回YES,失败返回NO
+ */
+-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
+
+/*!
+ *  获取合成参数
+ *
+ *  @param key 参数key
+ *
+ *  @return 参数值
+ */
+-(NSString*) parameterForKey:(NSString *)key;
+
+/*!
+ *  开始合成(播放)<br>
+ *  调用此函数进行合成,如果发生错误会回调错误`onCompleted`
+ *
+ *  @param text 合成的文本,最大的字节数为1k
+ */
+- (void) startSpeaking:(NSString *)text;
+
+/*!
+ *  开始合成(不播放)<br>
+ *  调用此函数进行合成,如果发生错误会回调错误`onCompleted`
+ *
+ *  @param text 合成的文本,最大的字节数为1k
+ *  @param uri  合成后,保存再本地的音频路径
+ */
+-(void)synthesize:(NSString *)text toUri:(NSString*)uri;
+
+/*!
+ *  暂停播放<br>
+ *  暂停播放之后,合成不会暂停,仍会继续,如果发生错误则会回调错误`onCompleted`
+ */
+- (void) pauseSpeaking;
+
+/*!
+ *  恢复播放
+ */
+- (void) resumeSpeaking;
+
+/*!
+ *  停止播放并停止合成
+ */
+- (void) stopSpeaking;
+
+/*!
+ *  是否正在播放
+ */
+@property (nonatomic, readonly) BOOL isSpeaking;
+
+@end

+ 81 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechSynthesizerDelegate.h

@@ -0,0 +1,81 @@
+//
+//  IFlySpeechSynthesizerDelegate.h
+//  MSC
+//
+//  Created by ypzhao on 13-3-20.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import "IFlySpeechEvent.h"
+
+@class IFlySpeechError;
+
+/*!
+ *  语音合成回调
+ */
+@protocol IFlySpeechSynthesizerDelegate <NSObject>
+
+@required
+
+/*!
+ *  结束回调<br>
+ *  当整个合成结束之后会回调此函数
+ *
+ *  @param error 错误码
+ */
+- (void) onCompleted:(IFlySpeechError*) error;
+
+@optional
+
+/*!
+ *  开始合成回调
+ */
+- (void) onSpeakBegin;
+
+/*!
+ *  缓冲进度回调
+ *
+ *  @param progress 缓冲进度,0-100
+ *  @param msg      附件信息,此版本为nil
+ */
+- (void) onBufferProgress:(int) progress message:(NSString *)msg;
+
+/*!
+ *  播放进度回调
+ *
+ *  @param progress 当前播放进度,0-100
+ *  @param beginPos 当前播放文本的起始位置(按照字节计算),对于汉字(2字节)需/2处理
+ *  @param endPos 当前播放文本的结束位置(按照字节计算),对于汉字(2字节)需/2处理
+ */
+- (void) onSpeakProgress:(int) progress beginPos:(int)beginPos endPos:(int)endPos;
+
+/*!
+ *  暂停播放回调
+ */
+- (void) onSpeakPaused;
+
+/*!
+ *  恢复播放回调<br>
+ *  注意:此回调方法SDK内部不执行,播放恢复全部在onSpeakBegin中执行
+ */
+- (void) onSpeakResumed;
+
+/*!
+ *  正在取消回调<br>
+ *  注意:此回调方法SDK内部不执行
+ */
+- (void) onSpeakCancel;
+
+/*!
+ *  扩展事件回调<br>
+ *  根据事件类型返回额外的数据
+ *
+ *  @param eventType 事件类型,具体参见IFlySpeechEventType枚举。目前只支持EVENT_TTS_BUFFER也就是实时返回合成音频。
+ *  @param arg0      arg0
+ *  @param arg1      arg1
+ *  @param eventData 事件数据
+ */
+- (void) onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData;
+
+@end

+ 184 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlySpeechUtility.h

@@ -0,0 +1,184 @@
+//
+//  IFlySpeechUtility.h
+//  MSCDemo
+//
+//  Created by admin on 14-5-7.
+//  Copyright (c) 2014年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+#define iOS_EXCLUSIVE       //iOS平台独占API
+
+@class IFlySpeechError;
+
+/*!
+ *  引擎模式
+ */
+typedef NS_ENUM(NSUInteger,IFlyEngineMode){
+    /*!
+     *  云端使用MSC,本地优先使用语记
+     */
+    IFlyEngineModeAuto = 0,
+    /*!
+     *  只使用MSC
+     */
+    IFlyEngineModeMsc,
+    /*!
+     *  本地只使用语记(受平台限制,云端无法使用语记)
+     */
+    IFlyEngineModePlus,
+};
+
+
+/*!
+ *  服务类型
+ */
+typedef NS_ENUM(NSUInteger,IFlySpeechPlusServiceType){
+    /*!
+     *  打开语记主界面
+     */
+    IFlySpeechPlusServiceTypeNone=0,
+    /*!
+     *  获取合成资源
+     */
+    IFlySpeechPlusServiceTypeTTS,
+    /*!
+     *  获取识别资源(未开放)
+     */
+    IFlySpeechPlusServiceTypeISR,
+    /*!
+     *  获取唤醒资源(未开放)
+     */
+    IFlySpeechPlusServiceTypeIVW,
+} ;
+
+/*! 语记返回回调
+ */
+@protocol IFlySpeechplusDelegate <NSObject>
+
+/*!
+ *  发生错误
+ *
+ *  @param errorCode 错误码
+ */
+- (void)onCompleted:(int)errorCode;
+
+/*!
+ *  服务正常结束
+ */
+- (void)onCompleted;
+
+@end
+
+/*!
+ * 用户配置
+ */
+@interface IFlySpeechUtility : NSObject
+
+/*!
+ *  创建用户语音配置<br>
+ *  注册应用请前往语音云开发者网站。<br>
+ *  网站:http://www.xfyun.cn
+ *
+ *  @param params 启动参数,必须保证appid参数传入,示例:appid=123456
+ *
+ *  @return 语音配置对象
+ */
++ (IFlySpeechUtility*) createUtility:(NSString *) params;
+
+/*!
+ *  销毁用户配置对象
+ *
+ *  @return 成功返回YES,失败返回NO
+ */
++(BOOL) destroy;
+
+/*!
+ *  获取用户配置对象
+ *
+ *  @return 用户配置对象
+ */
++(IFlySpeechUtility *) getUtility;
+
+/*!
+ *  设置MSC引擎的状态参数
+ *
+ *  @param value 参数值
+ *  @param key   参数名称
+ *
+ *  @return 成功返回YES,失败返回NO
+ */
+-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
+
+/*!
+ *  获取MSC引擎状态参数
+ *
+ *  @param key 参数名
+ *
+ *  @return 参数值
+ */
+- (NSString *)parameterForKey:(NSString *)key;
+
+/*!
+ *  引擎类型
+ */
+@property (nonatomic, readonly) IFlyEngineMode engineMode;
+
+/*!
+ *  语记协议委托
+ */
+@property (nonatomic, assign) id<IFlySpeechplusDelegate> delegate;
+
+@end
+
+/*!
+ *  讯飞语记类别
+ */
+@interface IFlySpeechUtility (SpeechPlus)
+
+/*!
+ *  检查讯飞语记是否安装
+ *
+ *  @return 已安装返回YES,否则返回NO
+ */
++ (BOOL)checkServiceInstalled;
+
+/*!
+ *  获取讯飞语记下载地址进行下载,安装完成后即可使用服务。<br>
+ *  下载地址需要通过[[UIApplication sharedApplication] openUrl:]打开
+ *
+ *  @return 讯飞语记在App Store下载地址
+ */
++ (NSString *)componentUrl;
+
+
+/*!
+ *  注意:此接口废弃,不再需要使用<br>
+ *  处理语记使用URL启动第三方应用程序时传递的数据<br>
+ *  需要在 application:openURL:sourceApplication:annotation:或者application:handleOpenURL中调用。
+ *
+ *  @param url 语记启动第三方应用程序时传递过来的URL
+ *
+ *  @return 成功返回YES,失败返回NO。
+ */
+- (BOOL)handleOpenURL:(NSURL *)url iOS_EXCLUSIVE;
+
+/*!
+ *  打开讯飞语记获取相应类型服务,0表示打开主界面
+ *
+ *  @param serviceType 服务类型
+ *
+ *  @return 成功打开返回YES,否则返回NO
+ */
+- (BOOL)openSpeechPlus:(IFlySpeechPlusServiceType)serviceType iOS_EXCLUSIVE;
+
+@end
+
+
+
+
+
+
+
+

+ 74 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyUserWords.h

@@ -0,0 +1,74 @@
+//
+//  IFlyUserWords.h
+//  MSC
+//
+//  Created by ypzhao on 13-2-26.
+//  Copyright (c) 2013年 iflytek. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+/*!
+ *  用户词表类
+ *    获取用户词表是为了更好的语音识别(iat),用户词表也属于个性化的一部分.
+ */
+@interface IFlyUserWords  : NSObject
+
+/*!
+ *  初始化对象
+ *
+ *  在进行初始化时,需要传入的格式如下:
+ *  <pre><code>{\"userword\":[{\"name\":\"iflytek\",\"words\":[\"科大讯飞\",
+ *  \"云平台\",\"用户词条\",\"开始上传词条\"]}]}</code></pre>
+ *
+ *  @param json 初始化时传入的数据
+ *
+ *  @return IFlyUserWords对象
+ */
+- (id) initWithJson:(NSString *)json;
+
+/*!
+ *  将数据转化为上传的数据格式
+ *
+ *  @return 没有数据或者格式不对时返回nil
+ */
+- (NSString *) toString;
+
+/*!
+ *  返回key对应的数据
+ *
+ *  @param key  在putword:value中设置的key
+ *
+ *  @return key对应的数组
+ */
+- (NSArray *) getWords: (NSString *) key;
+
+/*!
+ *  添加一条用户词数据
+ *
+ *  @param key   用户词对应的key
+ *  @param value 上传的用户词数据
+ *
+ *  @return 成功返回YES,失败返回NO
+ */
+- (BOOL) putWord: (NSString *) key value:(NSString *)value;
+
+/*!
+ *  添加一组数据
+ *
+ *  @param key   用户词对应的key
+ *  @param words 上传的用户词数据
+ *
+ *  @return 成功返回YES,失败返回NO
+ */
+- (BOOL) putwords: (NSString *) key words:(NSArray *)words;
+
+/*!
+ *  是否包含key对应的用户词数据
+ *
+ *  @param key 用户词对应的key
+ *
+ *  @return 成功返回YES,失败返回NO
+ */
+- (BOOL) containsKey: (NSString *) key;
+@end

+ 83 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyVoiceWakeuper.h

@@ -0,0 +1,83 @@
+//
+//  IFlyVoiceWakeuper.h
+//  wakeup
+//
+//  Created by admin on 14-3-18.
+//  Copyright (c) 2014年 iflytek. All rights reserved.
+//
+
+
+#import <Foundation/Foundation.h>
+#import "IFlyVoiceWakeuperDelegate.h"
+
+#define IFLY_AUDIO_SOURCE_MIC    @"1"
+#define IFLY_AUDIO_SOURCE_STREAM @"-1"
+
+/*!
+ *  语音唤醒
+ */
+@interface IFlyVoiceWakeuper : NSObject
+
+/*!
+ * 代理
+ */
+@property (nonatomic, assign) id<IFlyVoiceWakeuperDelegate> delegate;
+
+/*!
+ * 是否正在唤醒
+ */
+@property (nonatomic, readonly) BOOL isListening;
+
+/*!
+ * 创建唤醒实例,采用单例模式
+ */
++ (instancetype) sharedInstance;
+
+
+/*!
+ * 启动唤醒
+ * 返回值:YES 成功,NO:失败
+ */
+-(BOOL) startListening;
+
+/*!
+ * 停止录音
+ */
+-(BOOL) stopListening;
+
+/*!
+ * 取消唤醒会话
+ */
+-(BOOL) cancel;
+
+/*!
+ * 获取工作参数
+ */
+-(NSString*) getParameter:(NSString *)key;
+
+/*!
+ * 设置工作参数<br>
+ * 注意服务正在运行中,不能设置参数
+ */
+-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
+
+@end
+
+/*!
+ *  音频流唤醒<br>
+ *  音频流唤醒可以将文件分段写入
+ */
+@interface IFlyVoiceWakeuper(IFlyStreamVoiceWakeuper)
+
+/*!
+ *  写入音频流
+ *
+ *  @param audioData 音频数据
+ *
+ *  @return 写入成功返回YES,写入失败返回NO
+ */
+- (BOOL) writeAudio:(NSData *) audioData;
+
+@end
+
+

+ 60 - 0
ios/Frameworks/iflyMSC.framework/Headers/IFlyVoiceWakeuperDelegate.h

@@ -0,0 +1,60 @@
+
+//
+//  IFlyVoiceWakeuperDel.h
+//  wakeup
+//
+//  Created by admin on 14-3-18.
+//  Copyright (c) 2014年 iflytek. All rights reserved.
+//
+
+
+
+#import <Foundation/Foundation.h>
+
+@class IFlySpeechError;
+
+@protocol IFlyVoiceWakeuperDelegate <NSObject>
+
+@optional
+
+/*!
+ * 录音开始
+ */
+-(void) onBeginOfSpeech;
+
+/*!
+ * 录音结束
+ */
+-(void) onEndOfSpeech;
+
+/*!
+ * 会话错误
+ *
+ * @param errorCode 错误描述类,
+ */
+- (void) onCompleted:(IFlySpeechError *) error;
+
+/*!
+ * 唤醒结果
+ *
+ * @param resultDic 唤醒结果字典
+ */
+-(void) onResult:(NSMutableDictionary *)resultDic;
+
+/*!
+ * 音量反馈,返回频率与录音数据返回回调频率一致
+ *
+ * @param volume 音量值
+ */
+- (void) onVolumeChanged: (int)volume;
+
+/*!
+ * 扩展事件回调<br>
+ * 根据事件类型返回额外的数据
+ *
+ @param eventType 事件类型,具体参见IFlySpeechEvent枚举。
+ */
+- (void) onEvent:(int)eventType isLast:(BOOL)isLast arg1:(int)arg1 data:(NSMutableDictionary *)eventData;
+
+@end
+

BIN
ios/Frameworks/iflyMSC.framework/iflyMSC


+ 26 - 20
ios/speech_plugin.podspec

@@ -1,21 +1,27 @@
-#
-# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
-#
-Pod::Spec.new do |s|
-  s.name             = 'speech_plugin'
-  s.version          = '0.0.1'
-  s.summary          = 'A new Flutter plugin for speech lib.'
-  s.description      = <<-DESC
-A new Flutter plugin for speech lib.
-                       DESC
-  s.homepage         = 'http://example.com'
-  s.license          = { :file => '../LICENSE' }
-  s.author           = { 'Your Company' => 'email@example.com' }
-  s.source           = { :path => '.' }
-  s.source_files = 'Classes/**/*'
-  s.public_header_files = 'Classes/**/*.h'
-  s.dependency 'Flutter'
-
-  s.ios.deployment_target = '8.0'
-end
+#
+# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
+# Run `pod lib lint speech_plugin.podspec' to validate before publishing.
+#
+Pod::Spec.new do |s|
+  s.name             = 'speech_plugin'
+  s.version          = '0.0.1'
+  s.summary          = 'A new Flutter plugin.'
+  s.description      = <<-DESC
+A new Flutter plugin.
+                       DESC
+  s.homepage         = 'http://example.com'
+  s.license          = { :file => '../LICENSE' }
+  s.author           = { 'Your Company' => 'email@example.com' }
+  s.source           = { :path => '.' }
+  s.source_files = 'Classes/**/*'
+  s.public_header_files = 'Classes/**/*.h'
+  s.dependency 'Flutter'
+  s.platform = :ios, '8.0'
 
+  s.frameworks = 'AVFoundation', 'SystemConfiguration', 'Foundation', 'CoreTelephony', 'AudioToolbox', 'UIKit', 'CoreLocation', 'QuartzCore', 'CoreGraphics'
+  s.libraries = 'z', 'c++'
+  s.vendored_frameworks = 'Frameworks/iflyMSC.framework'
+
+  # Flutter.framework does not contain a i386 slice. Only x86_64 simulators are supported.
+  s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
+end

+ 67 - 18
pubspec.lock

@@ -1,34 +1,62 @@
 # Generated by pub
 # See https://dart.dev/tools/pub/glossary#lockfile
 packages:
+  archive:
+    dependency: transitive
+    description:
+      name: archive
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.0.10"
+  args:
+    dependency: transitive
+    description:
+      name: args
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "1.5.2"
   async:
     dependency: transitive
     description:
       name: async
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.3.0"
   boolean_selector:
     dependency: transitive
     description:
       name: boolean_selector
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.0.5"
   charcode:
     dependency: transitive
     description:
       name: charcode
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.2"
   collection:
     dependency: transitive
     description:
       name: collection
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.14.11"
+  convert:
+    dependency: transitive
+    description:
+      name: convert
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.1.1"
+  crypto:
+    dependency: transitive
+    description:
+      name: crypto
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.1.3"
   flutter:
     dependency: "direct main"
     description: flutter
@@ -39,39 +67,53 @@ packages:
     description: flutter
     source: sdk
     version: "0.0.0"
+  image:
+    dependency: transitive
+    description:
+      name: image
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.1.4"
   matcher:
     dependency: transitive
     description:
       name: matcher
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "0.12.5"
   meta:
     dependency: transitive
     description:
       name: meta
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.7"
   path:
     dependency: transitive
     description:
       name: path
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.6.4"
   pedantic:
     dependency: transitive
     description:
       name: pedantic
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.8.0+1"
+  petitparser:
+    dependency: transitive
+    description:
+      name: petitparser
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.4.0"
   quiver:
     dependency: transitive
     description:
       name: quiver
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.0.5"
   sky_engine:
@@ -83,57 +125,64 @@ packages:
     dependency: transitive
     description:
       name: source_span
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.5.5"
   stack_trace:
     dependency: transitive
     description:
       name: stack_trace
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.9.3"
   stream_channel:
     dependency: transitive
     description:
       name: stream_channel
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.0.0"
   string_scanner:
     dependency: transitive
     description:
       name: string_scanner
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.0.5"
   term_glyph:
     dependency: transitive
     description:
       name: term_glyph
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.0"
   test_api:
     dependency: transitive
     description:
       name: test_api
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "0.2.5"
   typed_data:
     dependency: transitive
     description:
       name: typed_data
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.1.6"
   vector_math:
     dependency: transitive
     description:
       name: vector_math
-      url: "https://pub.dartlang.org"
+      url: "https://pub.flutter-io.cn"
     source: hosted
     version: "2.0.8"
+  xml:
+    dependency: transitive
+    description:
+      name: xml
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "3.5.0"
 sdks:
-  dart: ">=2.2.2 <3.0.0"
+  dart: ">=2.4.0 <3.0.0"

+ 1 - 0
speech_plugin.iml

@@ -11,6 +11,7 @@
       <excludeFolder url="file://$MODULE_DIR$/example/.dart_tool" />
       <excludeFolder url="file://$MODULE_DIR$/example/.pub" />
       <excludeFolder url="file://$MODULE_DIR$/example/build" />
+      <excludeFolder url="file://$MODULE_DIR$/example/ios/Flutter/App.framework/flutter_assets/packages" />
     </content>
     <orderEntry type="sourceFolder" forTests="false" />
     <orderEntry type="library" name="Dart SDK" level="project" />