Android使用Flutter實現錄音插件

原生提供功能,Dart module 通過 method channel 異步調用

安卓部分

手動註冊

Flutter 官方的做法,就是自動註冊插件,

很方便

手動註冊,體現本文的不同

插件是 AudioRecorderPlugin

class MainActivity: FlutterActivity() {
    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        flutterEngine!!.plugins.add(AudioRecorderPlugin())
    }
}

Android和Dart的通訊

主要是消息回調

下文依次是,

  • 開始錄音
  • 結束錄音
  • 正在錄音
  • 是否有錄音權限

註意,這裡的錄音權限包含兩個,麥克風的權限,和存儲權限

@Override
public void onMethodCall(@NonNull MethodCall call, @NonNull Result result) {
  switch (call.method) {
    case "start":
      Log.d(LOG_TAG, "Start");
      Log.d(LOG_TAG, "11111____");
      String path = call.argument("path");
      mExtension = call.argument("extension");
      startTime = Calendar.getInstance().getTime();
      if (path != null) {
        mFilePath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + path;
      } else {
        Log.d(LOG_TAG, "11111____222");
        String fileName = String.valueOf(startTime.getTime());
        mFilePath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + fileName + mExtension;
      }
      Log.d(LOG_TAG, mFilePath);
      startRecording();
      isRecording = true;
      result.success(null);
      break;
    case "stop":
      Log.d(LOG_TAG, "Stop");
      stopRecording();
      long duration = Calendar.getInstance().getTime().getTime() - startTime.getTime();
      Log.d(LOG_TAG, "Duration : " + String.valueOf(duration));
      isRecording = false;
      HashMap<String, Object> recordingResult = new HashMap<>();
      recordingResult.put("duration", duration);
      recordingResult.put("path", mFilePath);
      recordingResult.put("audioOutputFormat", mExtension);
      result.success(recordingResult);
      break;
    case "isRecording":
      Log.d(LOG_TAG, "Get isRecording");
      result.success(isRecording);
      break;
    case "hasPermissions":
      Log.d(LOG_TAG, "Get hasPermissions");
      Context context = _flutterBinding.getApplicationContext();
      PackageManager pm = context.getPackageManager();
      int hasStoragePerm = pm.checkPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE, context.getPackageName());
      int hasRecordPerm = pm.checkPermission(Manifest.permission.RECORD_AUDIO, context.getPackageName());
      boolean hasPermissions = hasStoragePerm == PackageManager.PERMISSION_GRANTED && hasRecordPerm == PackageManager.PERMISSION_GRANTED;
      result.success(hasPermissions);
      break;
    default:
      result.notImplemented();
      break;
  }
}

安卓錄音

使用 wav 的封裝格式,用 AudioRecord;

其他封裝格式,用 MediaRecorder

上面兩個播放器,有開始錄音和結束錄音功能;

暫停錄音和恢復錄音,則多次開始和結束,再把文件拼接在一起

Dart module部分

建立 MethodChannel, 異步調用上面的原生功能

class AudioRecorder {
  static const MethodChannel _channel = const MethodChannel('audio_recorder');
  static LocalFileSystem fs = LocalFileSystem();
  static Future start(String path, AudioOutputFormat audioOutputFormat) async {
    String extension;
    if (path != null) {
      if (audioOutputFormat != null) {
        if (_convertStringInAudioOutputFormat(p.extension(path)) !=
            audioOutputFormat) {
          extension = _convertAudioOutputFormatInString(audioOutputFormat);
          path += extension;
        } else {
          extension = p.extension(path);
        }
      } else {
        if (_isAudioOutputFormat(p.extension(path))) {
          extension = p.extension(path);
        } else {
          extension = ".m4a"; // default value
          path += extension;
        }
      }
      File file = fs.file(path);
      if (await file.exists()) {
        throw new Exception("A file already exists at the path :" + path);
      } else if (!await file.parent.exists()) {
        throw new Exception("The specified parent directory does not exist");
      }
    } else {
      extension = ".m4a"; // default value
    }
    return _channel
        .invokeMethod('start', {"path": path, "extension": extension});
  }
  static Future<Recording?> stop() async {
    // 把原生帶出來的信息,放入字典中
    Map<String, dynamic> response =
        Map.from(await _channel.invokeMethod('stop'));
    if (response != null) {
      int duration = response['duration'];
      String fmt = response['audioOutputFormat'];
      AudioOutputFormat? outputFmt = _convertStringInAudioOutputFormat(fmt);
      if (fmt != null && outputFmt != null) {
        Recording recording = new Recording(
            new Duration(milliseconds: duration),
            response['path'],
            outputFmt,
            response['audioOutputFormat']);
        return recording;
      }
    } else {
      return null;
    }
  }

iOS部分

手動註冊插件

這裡的插件名, 為 SwiftAudioRecorderPlugin

public class SwiftAudioRecorderPlugin: NSObject, FlutterPlugin {
    var isRecording = false
    var hasPermissions = false
    var mExtension = ""
    var mPath = ""
    var startTime: Date!
    var audioRecorder: AVAudioRecorder?
  public static func register(with registrar: FlutterPluginRegistrar) {
    let channel = FlutterMethodChannel(name: "audio_recorder", binaryMessenger: registrar.messenger())
    let instance = SwiftAudioRecorderPlugin()
    registrar.addMethodCallDelegate(instance, channel: channel)
  }
  public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
    switch call.method {
        case "start":
            print("start")
            let dic = call.arguments as! [String : Any]
            mExtension = dic["extension"] as? String ?? ""
            mPath = dic["path"] as? String ?? ""
            startTime = Date()
            let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            if mPath == "" {
                mPath = documentsPath + "/" + String(Int(startTime.timeIntervalSince1970)) + ".m4a"
            }
            else{
                mPath = documentsPath + "/" + mPath
            }
            print("path: " + mPath)
            let settings = [
                AVFormatIDKey: getOutputFormatFromString(mExtension),
                AVSampleRateKey: 12000,
                AVNumberOfChannelsKey: 1,
                AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
            ]
            do {
                try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
                try AVAudioSession.sharedInstance().setActive(true)

                let recorder = try AVAudioRecorder(url: URL(string: mPath)!, settings: settings)
                recorder.delegate = self
                recorder.record()
                audioRecorder = recorder
            } catch {
                print("fail")
                result(FlutterError(code: "", message: "Failed to record", details: nil))
            }
            isRecording = true
            result(nil)
        case "pause":
            audioRecorder?.pause()
            result(nil)
        case "resume":
            audioRecorder?.record()
            result(nil)
        case "stop":
            print("stop")
            audioRecorder?.stop()
            audioRecorder = nil
            let duration = Int(Date().timeIntervalSince(startTime as Date) * 1000)
            isRecording = false
            var recordingResult = [String : Any]()
            recordingResult["duration"] = duration
            recordingResult["path"] = mPath
            recordingResult["audioOutputFormat"] = mExtension
            result(recordingResult)
        case "isRecording":
            print("isRecording")
            result(isRecording)
        case "hasPermissions":
            print("hasPermissions")
        switch AVAudioSession.sharedInstance().recordPermission{
            case AVAudioSession.RecordPermission.granted:
                print("granted")
                hasPermissions = true
            case AVAudioSession.RecordPermission.denied:
                print("denied")
                hasPermissions = false
            case AVAudioSession.RecordPermission.undetermined:
                print("undetermined")
                AVAudioSession.sharedInstance().requestRecordPermission() { [unowned self] allowed in
                    DispatchQueue.main.async {
                        if allowed {
                            self.hasPermissions = true
                        } else {
                            self.hasPermissions = false
                        }
                    }
                }
            default:()
            }
            result(hasPermissions)
        default:
            result(FlutterMethodNotImplemented)
        }
      }
    }

iOS插件

邏輯與安卓插件類似,

因為 iOS 的 AVAudioRecorderpauseresume 操作,支持友好,

所以增添瞭暫停和恢復錄音功能

iOS 端的權限比安卓權限,少一個

僅需要錄音麥克風權限

public class SwiftAudioRecorderPlugin: NSObject, FlutterPlugin {
    var isRecording = false
    var hasPermissions = false
    var mExtension = ""
    var mPath = ""
    var startTime: Date!
    var audioRecorder: AVAudioRecorder?
  public static func register(with registrar: FlutterPluginRegistrar) {
    let channel = FlutterMethodChannel(name: "audio_recorder", binaryMessenger: registrar.messenger())
    let instance = SwiftAudioRecorderPlugin()
    registrar.addMethodCallDelegate(instance, channel: channel)
  }
  public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
    switch call.method {
        case "start":
            print("start")
            let dic = call.arguments as! [String : Any]
            mExtension = dic["extension"] as? String ?? ""
            mPath = dic["path"] as? String ?? ""
            startTime = Date()
            let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            if mPath == "" {
                mPath = documentsPath + "/" + String(Int(startTime.timeIntervalSince1970)) + ".m4a"
            }
            else{
                mPath = documentsPath + "/" + mPath
            }
            print("path: " + mPath)
            let settings = [
                AVFormatIDKey: getOutputFormatFromString(mExtension),
                AVSampleRateKey: 12000,
                AVNumberOfChannelsKey: 1,
                AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
            ]
            do {
                try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
                try AVAudioSession.sharedInstance().setActive(true)
                let recorder = try AVAudioRecorder(url: URL(string: mPath)!, settings: settings)
                recorder.delegate = self
                recorder.record()
                audioRecorder = recorder
            } catch {
                print("fail")
                result(FlutterError(code: "", message: "Failed to record", details: nil))
            }
            isRecording = true
            result(nil)
        case "pause":
            audioRecorder?.pause()
            result(nil)
        case "resume":
            audioRecorder?.record()
            result(nil)
        case "stop":
            print("stop")
            audioRecorder?.stop()
            audioRecorder = nil
            let duration = Int(Date().timeIntervalSince(startTime as Date) * 1000)
            isRecording = false
            var recordingResult = [String : Any]()
            recordingResult["duration"] = duration
            recordingResult["path"] = mPath
            recordingResult["audioOutputFormat"] = mExtension
            result(recordingResult)
        case "isRecording":
            print("isRecording")
            result(isRecording)
        case "hasPermissions":
            print("hasPermissions")
        switch AVAudioSession.sharedInstance().recordPermission{
            case AVAudioSession.RecordPermission.granted:
                print("granted")
                hasPermissions = true
            case AVAudioSession.RecordPermission.denied:
                print("denied")
                hasPermissions = false
            case AVAudioSession.RecordPermission.undetermined:
                print("undetermined")
                AVAudioSession.sharedInstance().requestRecordPermission() { [unowned self] allowed in
                    DispatchQueue.main.async {
                        if allowed {
                            self.hasPermissions = true
                        } else {
                            self.hasPermissions = false
                        }
                    }
                }
            default:()
            }
            result(hasPermissions)
        default:
            result(FlutterMethodNotImplemented)
        }
      }
    }

Dart調用部分

通過判斷平臺,Platform.isIOS,

給 iOS 設備,增加完善的功能

@override
Widget build(BuildContext context) {
  final VoidCallback tapFirst;
  if (Platform.isAndroid && name == kEnd) {
    tapFirst = _audioEnd;
  } else {
    tapFirst = _audioGoOn;
  }
  List<Widget> views = [
    ElevatedButton(
      child: Text(
        name,
        style: Theme.of(context).textTheme.headline4,
      ),
      onPressed: tapFirst,
    )
  ];
  if (Platform.isIOS && name != kStarted) {
    views.add(SizedBox(height: 80));
    views.add(ElevatedButton(
      child: Text(
        kEnd,
        style: Theme.of(context).textTheme.headline4,
      ),
      onPressed: _audioEnd,
    ));
  }
  return Scaffold(
    appBar: AppBar(
      // Here we take the value from the MyHomePage object that was created by
      // the App.build method, and use it to set our appbar title.
      title: Text(widget.title),
    ),
    body: Center(
      // Center is a layout widget. It takes a single child and positions it
      // in the middle of the parent.
      child: Column(
        mainAxisAlignment: MainAxisAlignment.center,
        children: views,
      ),
    ), // This trailing comma makes auto-formatting nicer for build methods.
  );
}

github repo

到此這篇關於Android使用Flutter實現錄音插件的文章就介紹到這瞭,更多相關Android Flutter錄音內容請搜索WalkonNet以前的文章或繼續瀏覽下面的相關文章希望大傢以後多多支持WalkonNet!

推薦閱讀: