该插件基于flutter包 flutter_screen_recording 和 github库 SystemAudioCaptureAndroid,实现了在安卓手机上录制系统播放声音的功能,也就是说,只要一个安卓应用没有设置不允许其它应用录制声音,该插件可以录制该应用播放的声音。
Github 地址:flutterSystemAudioRecorder
创建工程
创建插件工程
flutter create -t plugin --platform android system_audio_recorder
创建好的插件文件夹如下图所示
 
创建好的插件工程主要需要修改代码的是以下三个目录
- android:Android的原生代码
- example:一个Flutter的实例项目,用来展示、测试你开发的plugin的
- lib:Plugin的Dart代码
原生代码
用android studio 打开 system_audio_recorder/android,开始修改配置。打开 system_audio_recorder/android/gradle/wrapper/gradle-warpper.properties,将 distributionUrl 修改为 file:///D://work//app//gradle-7.5-all.zip(需要根据实际情况修改,如果网络条件好可以不改)。打开 system_audio_recorder/android/build.gradle 在文件末尾添加flutter和androidx.core的配置
//获取local.properties配置文件  
def localProperties = new Properties()  
def localPropertiesFile = rootProject.file('local.properties')  
if (localPropertiesFile.exists()) {  localPropertiesFile.withReader('UTF-8') { reader ->  localProperties.load(reader)  }  
}  
//获取flutter的sdk路径  
def flutterRoot = localProperties.getProperty('flutter.sdk')  
if (flutterRoot == null) {  throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")  
}  dependencies {  implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"  compileOnly files("$flutterRoot/bin/cache/artifacts/engine/android-arm/flutter.jar")  compileOnly 'androidx.annotation:annotation:1.1.0'  implementation 'androidx.core:core:1.6.0'
}
点击 sync Now,同步gradle包。
kotlin/com/example/system_audio_recorder/ 已经有 SystemAudioRecorderPlugin.kt ,这个文件用来实现插件的各种方法,但是由于需要录音系统声音,需要使用前台服务,所以需要额外添加一个 ForegroundService.kt 文件用于配置前台服务,ForegroundService.kt 的内容如下
ForegroundService 的package不能和SystemAudioRecorderPlugin一致,否则在启动服务时会报错!!!
package com.foregroundservice  import android.Manifest  
import android.app.Activity  
import android.app.NotificationChannel  
import android.app.NotificationManager  
import android.app.PendingIntent  
import android.app.Service  
import android.content.Context  
import android.content.Intent  
import android.content.pm.PackageManager  
import android.os.Build  
import android.os.IBinder  
import android.util.Log  
import androidx.core.app.ActivityCompat  
import androidx.core.app.NotificationCompat  
import androidx.core.content.ContextCompat  
import com.example.system_audio_recorder.SystemAudioRecorderPlugin  class ForegroundService : Service() {  private val CHANNEL_ID = "ForegroundService Kotlin"  private val REQUEST_CODE_MEDIA_PROJECTION = 1001  // 静态方法,SystemAudioRecorderPlugin 会调用这些方法  companion object {  fun startService(context: Context, title: String, message: String) {  println("-------------------------- startService");  try {  val startIntent = Intent(context, ForegroundService::class.java)  startIntent.putExtra("messageExtra", message)  startIntent.putExtra("titleExtra", title)  println("-------------------------- startService2");  ContextCompat.startForegroundService(context, startIntent)  println("-------------------------- startService3");  } catch (err: Exception) {  println("startService err");  println(err);  }  }  fun stopService(context: Context) {  val stopIntent = Intent(context, ForegroundService::class.java)  context.stopService(stopIntent)  }  }  // 在 SystemAudioRecorderPlugin 调用 ActivityCompat.startActivityForResult 时调用  override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {  try {  Log.i("ForegroundService", "onStartCommand")  // Verification permission en Android 14 (SDK 34)  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE) {  if (ContextCompat.checkSelfPermission(this, Manifest.permission.FOREGROUND_SERVICE_MEDIA_PROJECTION)  != PackageManager.PERMISSION_GRANTED) {  Log.i("Foreground","MediaProjection permission not granted, requesting permission")  ActivityCompat.requestPermissions(  this as Activity,  arrayOf(Manifest.permission.FOREGROUND_SERVICE_MEDIA_PROJECTION),  REQUEST_CODE_MEDIA_PROJECTION  )  } else {  startForegroundServiceWithNotification(intent)  }  } else {  startForegroundServiceWithNotification(intent)  }  return START_NOT_STICKY  } catch (err: Exception) {  Log.e("ForegroundService", "onStartCommand error: $err")  }  return START_STICKY  }  private fun startForegroundServiceWithNotification(intent: Intent?) {  createNotificationChannel()  val notificationIntent = Intent(this, SystemAudioRecorderPlugin::class.java)  val pendingIntent = PendingIntent.getActivity(  this, 0, notificationIntent, PendingIntent.FLAG_MUTABLE  )  val notification = NotificationCompat.Builder(this, CHANNEL_ID)  .setContentIntent(pendingIntent)  .build()  startForeground(1, notification)  Log.i("ForegroundService", "startForegroundServiceWithNotification")  }  override fun onBind(intent: Intent): IBinder? {  return null  }  private fun createNotificationChannel() {  if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {  val serviceChannel = NotificationChannel(  CHANNEL_ID, "Foreground Service Channel", NotificationManager.IMPORTANCE_DEFAULT  )  val manager = getSystemService(NotificationManager::class.java)  manager!!.createNotificationChannel(serviceChannel)  }  }  }
SystemAudioRecorderPlugin 的内容如下
package com.example.system_audio_recorder  import android.annotation.SuppressLint  
import android.app.Activity  
import android.content.Context  
import android.content.Intent  
import android.media.AudioAttributes  
import android.media.AudioFormat  
import android.media.AudioPlaybackCaptureConfiguration  
import android.media.AudioRecord  
import android.media.projection.MediaProjection  
import android.media.projection.MediaProjectionManager  
import android.os.Build  
import android.os.Environment  
import android.util.Log  
import androidx.annotation.RequiresApi  
import androidx.core.app.ActivityCompat  import io.flutter.embedding.engine.plugins.FlutterPlugin  
import io.flutter.embedding.engine.plugins.activity.ActivityAware  
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding  
import io.flutter.plugin.common.MethodCall  
import io.flutter.plugin.common.MethodChannel  
import io.flutter.plugin.common.MethodChannel.MethodCallHandler  
import io.flutter.plugin.common.MethodChannel.Result  
import io.flutter.plugin.common.PluginRegistry  
import java.io.DataInputStream  
import java.io.DataOutputStream  
import java.io.File  
import java.io.FileInputStream  
import java.io.FileNotFoundException  
import java.io.FileOutputStream  
import java.io.IOException  
import java.nio.ByteBuffer  
import java.nio.ByteOrder  
import java.text.SimpleDateFormat  
import java.util.Date  
import com.foregroundservice.ForegroundService  /** SystemAudioRecorderPlugin */  
class SystemAudioRecorderPlugin: MethodCallHandler, PluginRegistry.ActivityResultListener, FlutterPlugin,  ActivityAware {  private lateinit var channel : MethodChannel  private var mProjectionManager: MediaProjectionManager? = null  private var mMediaProjection: MediaProjection? = null  private var mFileName: String? = ""  private val RECORD_REQUEST_CODE = 333  var TAG: String = "system_audio_recorder"  private lateinit var _result: Result  private var pluginBinding: FlutterPlugin.FlutterPluginBinding? = null  private var activityBinding: ActivityPluginBinding? = null;  var recordingThread: Thread? = null  private val bufferElements2Record = 1024  private val bytesPerElement = 2  private var mAudioRecord: AudioRecord? = null  private var isRecording: Boolean = false  private var RECORDER_SAMPLERATE: Int = 44100  val RECORDER_CHANNELS: Int = AudioFormat.CHANNEL_IN_MONO  val RECORDER_AUDIO_ENCODING: Int = AudioFormat.ENCODING_PCM_16BIT  private var root: File? = null  private var cache: File? = null  private var rawOutput: File? = null  override fun onAttachedToEngine(flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {  pluginBinding = flutterPluginBinding;  }  @RequiresApi(Build.VERSION_CODES.Q)  // 在 ForegroundService 的startCommand执行完后执行  override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?): Boolean {  if (requestCode == RECORD_REQUEST_CODE) {  if (resultCode == Activity.RESULT_OK) {  mMediaProjection = mProjectionManager?.getMediaProjection(resultCode, data!!)  startRecording(mMediaProjection!!)  _result.success(true)  return true  } else {  _result.success(false)  }  }  return false  }  override fun onMethodCall(call: MethodCall, result: Result) {  val appContext = pluginBinding!!.applicationContext  if (call.method == "getPlatformVersion") {  result.success("Android ${Build.VERSION.RELEASE}")  } else if (call.method == "startRecord"){  try {  _result = result  val sampleRate = call.argument<Int?>("sampleRate")  if (sampleRate != null){  RECORDER_SAMPLERATE = sampleRate  }  ForegroundService.startService(appContext, "开始录音", "开始录音")  mProjectionManager =  appContext.getSystemService(  Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager?  val permissionIntent = mProjectionManager?.createScreenCaptureIntent()  Log.i(TAG, "startActivityForResult")  // 调用 ForegroundService的 startCommand 方法  ActivityCompat.startActivityForResult(  activityBinding!!.activity,  permissionIntent!!,  RECORD_REQUEST_CODE,  null  )  } catch (e: Exception) {  Log.e(TAG, "Error onMethodCall startRecord: ${e.message}")  result.success(false)  }  }  else if (call.method == "stopRecord"){  Log.i(TAG, "stopRecord")  try {  ForegroundService.stopService(appContext)  if (mAudioRecord != null){  stop()  result.success(mFileName)  }else{  result.success("")  }  } catch (e: Exception) {  result.success("")  }  }  else {  result.notImplemented()  }  }  @RequiresApi(api = Build.VERSION_CODES.Q)  fun startRecording(mProjection: MediaProjection): Boolean {  Log.i(TAG, "startRecording")  if (mAudioRecord == null){  val config : AudioPlaybackCaptureConfiguration  try {  config = AudioPlaybackCaptureConfiguration.Builder(mProjection)  .addMatchingUsage(AudioAttributes.USAGE_MEDIA)  .addMatchingUsage(AudioAttributes.USAGE_GAME)  .build()  } catch (e: NoClassDefFoundError) {  return false  }  val format = AudioFormat.Builder()  .setEncoding(RECORDER_AUDIO_ENCODING)  .setSampleRate(RECORDER_SAMPLERATE)  .setChannelMask(RECORDER_CHANNELS)  .build()  mAudioRecord = AudioRecord.Builder().setAudioFormat(format).setBufferSizeInBytes(bufferElements2Record).setAudioPlaybackCaptureConfig(config).build()  isRecording = true  mAudioRecord!!.startRecording()  createAudioFile()  recordingThread = Thread({ writeAudioFile() }, "System Audio Capture")  recordingThread!!.start()  }  return true  }  @Throws(IOException::class)  private fun rawToWave(rawFile: File, waveFile: File) {  val rawData = ByteArray(rawFile.length().toInt())  var input: DataInputStream? = null  try {  input = DataInputStream(FileInputStream(rawFile))  input.read(rawData)  } finally {  input?.close()  }  var output: DataOutputStream? = null  try {  output = DataOutputStream(FileOutputStream(waveFile))  // WAVE header  writeString(output, "RIFF") // chunk id  writeInt(output, 36 + rawData.size) // chunk size  writeString(output, "WAVE") // format  writeString(output, "fmt ") // subchunk 1 id  writeInt(output, 16) // subchunk 1 size  writeShort(output, 1.toShort()) // audio format (1 = PCM)  writeShort(output, 1.toShort()) // number of channels  writeInt(output, RECORDER_SAMPLERATE) // sample rate  writeInt(output, RECORDER_SAMPLERATE) // byte rate  writeShort(output, 2.toShort()) // block align  writeShort(output, 16.toShort()) // bits per sample  writeString(output, "data") // subchunk 2 id  writeInt(output, rawData.size) // subchunk 2 size  // Audio data (conversion big endian -> little endian)      val shorts = ShortArray(rawData.size / 2)  ByteBuffer.wrap(rawData).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer()[shorts]  val bytes = ByteBuffer.allocate(shorts.size * 2)  for (s in shorts) {  bytes.putShort(s)  }  output.write(fullyReadFileToBytes(rawFile))  } finally {  output?.close()  }  }  @Throws(IOException::class)  fun fullyReadFileToBytes(f: File): ByteArray {  val size = f.length().toInt()  val bytes = ByteArray(size)  val tmpBuff = ByteArray(size)  val fis = FileInputStream(f)  try {  var read = fis.read(bytes, 0, size)  if (read < size) {  var remain = size - read  while (remain > 0) {  read = fis.read(tmpBuff, 0, remain)  System.arraycopy(tmpBuff, 0, bytes, size - remain, read)  remain -= read  }  }  } catch (e: IOException) {  throw e  } finally {  fis.close()  }  return bytes  }  private fun createAudioFile() {  root = File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC), "/Audio Capture")  
//    val mFilename: String? = pluginBinding!!.applicationContext.externalCacheDir?.absolutePath  
//    root = File(mFilename)  cache = File(pluginBinding!!.applicationContext.cacheDir.absolutePath, "/RawData")  if (!root!!.exists()) {  root!!.mkdir()  root!!.setWritable(true)  }  if (!cache!!.exists()) {  cache!!.mkdir()  cache!!.setWritable(true)  cache!!.setReadable(true)  }  rawOutput = File(cache, "raw.pcm")  try {  rawOutput!!.createNewFile()  } catch (e: IOException) {  Log.e(TAG, "createAudioFile: $e")  e.printStackTrace()  }  Log.d(TAG, "path: " + rawOutput!!.absolutePath)  }  @Throws(IOException::class)  private fun writeInt(output: DataOutputStream, value: Int) {  output.write(value shr 0)  output.write(value shr 8)  output.write(value shr 16)  output.write(value shr 24)  }  @Throws(IOException::class)  private fun writeShort(output: DataOutputStream, value: Short) {  output.write(value.toInt() shr 0)  output.write(value.toInt() shr 8)  }  @Throws(IOException::class)  private fun writeString(output: DataOutputStream, value: String) {  for (element in value) {  output.write(element.code)  }  }  private fun shortToByte(data: ShortArray): ByteArray {  val arraySize = data.size  val bytes = ByteArray(arraySize * 2)  for (i in 0 until arraySize) {  bytes[i * 2] = (data[i].toInt() and 0x00FF).toByte()  bytes[i * 2 + 1] = (data[i].toInt() shr 8).toByte()  data[i] = 0  }  return bytes  }  private fun writeAudioFile() {  try {  val outputStream = FileOutputStream(rawOutput!!.absolutePath)  val data = ShortArray(bufferElements2Record)  while (isRecording) {  mAudioRecord!!.read(data, 0, bufferElements2Record)  val buffer = ByteBuffer.allocate(8 * 1024)  outputStream.write(  shortToByte(data),  0,  bufferElements2Record * bytesPerElement  )  }  outputStream.close()  } catch (e: FileNotFoundException) {  Log.e(TAG, "File Not Found: $e")  e.printStackTrace()  } catch (e: IOException) {  Log.e(TAG, "IO Exception: $e")  e.printStackTrace()  }  }  @SuppressLint("SimpleDateFormat")  fun startProcessing() {  isRecording = false  mAudioRecord!!.stop()  mAudioRecord!!.release()  mFileName = SimpleDateFormat("yyyy-MM-dd hh-mm-ss").format(Date()) + ".mp3"  //Convert To mp3 from raw data i.e pcm  val output = File(root, mFileName)  try {  output.createNewFile()  } catch (e: IOException) {  e.printStackTrace()  Log.e(TAG, "startProcessing: $e")  }  try {  rawOutput?.let { rawToWave(it, output) }  } catch (e: IOException) {  e.printStackTrace()  } finally {  rawOutput!!.delete()  }  }  private fun stop(){  startProcessing()  if (mAudioRecord != null){  mAudioRecord = null  recordingThread = null  }  }  override fun onDetachedFromEngine(binding: FlutterPlugin.FlutterPluginBinding) {}  override fun onAttachedToActivity(binding: ActivityPluginBinding) {  activityBinding = binding;  channel = MethodChannel(pluginBinding!!.binaryMessenger, "system_audio_recorder")  channel.setMethodCallHandler(this)  activityBinding!!.addActivityResultListener(this);  }  override fun onDetachedFromActivityForConfigChanges() {}  override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) {  activityBinding = binding;  }  override fun onDetachedFromActivity() {}  
}
此外需要配置一下 system_audio_recorder/src/main/AndroidManifest.xml,添加一些权限。
<?xml version="1.0" encoding="utf-8"?>  
<manifest xmlns:android="http://schemas.android.com/apk/res/android"  package="com.example.system_audio_recorder">  <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />  <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />  <uses-permission android:name="android.permission.WRITE_INTERNAL_STORAGE" />  <uses-permission android:name="android.permission.FOREGROUND_SERVICE" />  <uses-permission android:name="android.permission.FOREGROUND_SERVICE_MEDIA_PROJECTION" />  <uses-permission android:name="android.permission.FOREGROUND_SERVICE_TYPE_MEDIA_PROJECTION" />  <uses-permission android:name="android.permission.WAKE_LOCK" />  <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />  <uses-permission android:name="android.permission.RECORD_AUDIO" />  <uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />  <uses-permission android:name="android.permission.FOREGROUND_SERVICE_MEDIA_PROJECTION" />  </manifest>
插件代码
接下来使用 android studio 打开 system_audio_recorder,编写插件代码。
首先配置一下 system_audio_recorder/pubspec.yaml,添加dependencies
dependencies:  flutter:  sdk: flutter  plugin_platform_interface: ^2.0.2 # 新添加的dependenciesflutter_foreground_task: ^6.0.0+1  meta: ^1.5.0
在 system_audio_recorder/lib 中有三个 dart 文件,三个文件的内容为
system_audio_recorder/lib/system_audio_recorder.dart
  
import 'dart:ffi';  
import 'dart:io';  import 'package:flutter/foundation.dart';  import 'system_audio_recorder_platform_interface.dart';  
import 'package:flutter_foreground_task/flutter_foreground_task.dart';  class SystemAudioRecorder {  Future<String?> getPlatformVersion() {  return SystemAudioRecorderPlatform.instance.getPlatformVersion();  }  static Future<bool> startRecord(String name, {String? titleNotification, String? messageNotification, int? sampleRate}) async {  try {  if (titleNotification == null) {  titleNotification = "";  }  if (messageNotification == null) {  messageNotification = "";  }  if (sampleRate == null){  sampleRate = 44100;  }  await _maybeStartFGS(titleNotification, messageNotification);  final bool start = await SystemAudioRecorderPlatform.instance.startRecord(  name,  notificationTitle: titleNotification,  notificationMessage: messageNotification,  sampleRate: sampleRate,  );  return start;  } catch (err) {  print("startRecord err");  print(err);  }  return false;  }  static Future<String> get stopRecord async {  try {  final String path = await SystemAudioRecorderPlatform.instance.stopRecord;  if (!kIsWeb && Platform.isAndroid) {  FlutterForegroundTask.stopService();  }  return path;  } catch (err) {  print("stopRecord err");  print(err);  }  return "";  }  static _maybeStartFGS(String titleNotification, String messageNotification) {  try {  if (!kIsWeb && Platform.isAndroid) {  FlutterForegroundTask.init(  androidNotificationOptions: AndroidNotificationOptions(  channelId: 'notification_channel_id',  channelName: titleNotification,  channelDescription: messageNotification,  channelImportance: NotificationChannelImportance.LOW,  priority: NotificationPriority.LOW,  iconData: const NotificationIconData(  resType: ResourceType.mipmap,  resPrefix: ResourcePrefix.ic,  name: 'launcher',  ),  ),  iosNotificationOptions: const IOSNotificationOptions(  showNotification: true,  playSound: false,  ),  foregroundTaskOptions: const ForegroundTaskOptions(  interval: 5000,  autoRunOnBoot: true,  allowWifiLock: true,  ),  );  }  } catch (err) {  print("_maybeStartFGS err");  print(err);  }  }  
}
system_audio_recorder_method_channel.dart
import 'package:flutter/foundation.dart';  
import 'package:flutter/services.dart';  import 'system_audio_recorder_platform_interface.dart';  /// An implementation of [SystemAudioRecorderPlatform] that uses method channels.  
class MethodChannelSystemAudioRecorder extends SystemAudioRecorderPlatform {  /// The method channel used to interact with the native platform.    final methodChannel = const MethodChannel('system_audio_recorder');    Future<String?> getPlatformVersion() async {  final version = await methodChannel.invokeMethod<String>('getPlatformVersion');  return version;  }  Future<bool> startRecord(  String name, {  String notificationTitle = "",  String notificationMessage = "",  int sampleRate = 44100  }) async {  final bool start = await methodChannel.invokeMethod('startRecord', {  "name": name,  "title": notificationTitle,  "message": notificationMessage,  "sampleRate": sampleRate  });  return start;  }  Future<String> get stopRecord async {  final String path = await methodChannel.invokeMethod('stopRecord');  return path;  }  
}
system_audio_recorder_platform_interface.dart
import 'package:plugin_platform_interface/plugin_platform_interface.dart';  import 'system_audio_recorder_method_channel.dart';  abstract class SystemAudioRecorderPlatform extends PlatformInterface {  /// Constructs a SystemAudioRecorderPlatform.  SystemAudioRecorderPlatform() : super(token: _token);  static final Object _token = Object();  static SystemAudioRecorderPlatform _instance = MethodChannelSystemAudioRecorder();  /// The default instance of [SystemAudioRecorderPlatform] to use.  ///  /// Defaults to [MethodChannelSystemAudioRecorder].  static SystemAudioRecorderPlatform get instance => _instance;  /// Platform-specific implementations should set this with their own  /// platform-specific class that extends [SystemAudioRecorderPlatform] when  /// they register themselves.  static set instance(SystemAudioRecorderPlatform instance) {  PlatformInterface.verifyToken(instance, _token);  _instance = instance;  }  Future<String?> getPlatformVersion() {  throw UnimplementedError('platformVersion() has not been implemented.');  }  Future<bool> startRecord(  String name, {  String notificationTitle = "",  String notificationMessage = "",  int sampleRate = 44100  }) {  throw UnimplementedError();  }  Future<String> get stopRecord {  throw UnimplementedError();  }  
}
example 代码
最后用 android studio 打开 system_audio_recorder/example 文件夹,这里需要在system_audio_recorder/example/android/app/src/main/AndroidManifest.xml中添加 service
<application  android:label="system_audio_recorder_example"  android:name="${applicationName}"  android:icon="@mipmap/ic_launcher"> <!--添加service--><service  android:name="com.foregroundservice.ForegroundService"  android:foregroundServiceType="mediaProjection"  android:enabled="true"  android:exported="false">  </service>  <activity  android:name=".MainActivity".....
同时修改 system_audio_recorder\example\android\app\build.gradle 中的 minSdkVersion 为 23

最后在 main.dart 中编写开始录音和停止录音的代码即可,录制完成的声音在系统 Music 文件夹的 Audio Capture 文件夹中。
import 'package:flutter/foundation.dart';  
import 'package:flutter/material.dart';  
import 'dart:async';  import 'package:flutter/services.dart';  
import 'package:system_audio_recorder/system_audio_recorder.dart';  
import 'package:permission_handler/permission_handler.dart';  void main() {  runApp(const MyApp());  
}  class MyApp extends StatefulWidget {  const MyApp({super.key});    State<MyApp> createState() => _MyAppState();  
}  class _MyAppState extends State<MyApp> {  String _platformVersion = 'Unknown';  final _systemAudioRecorderPlugin = SystemAudioRecorder();  requestPermissions() async {  if (!kIsWeb) {  if (await Permission.storage.request().isDenied) {  await Permission.storage.request();  }  if (await Permission.photos.request().isDenied) {  await Permission.photos.request();  }  if (await Permission.microphone.request().isDenied) {  await Permission.microphone.request();  }  }  }    void initState() {  super.initState();  requestPermissions();  initPlatformState();  }  // Platform messages are asynchronous, so we initialize in an async method.  Future<void> initPlatformState() async {  String platformVersion;  // Platform messages may fail, so we use a try/catch PlatformException.  // We also handle the message potentially returning null.    try {  platformVersion =  await _systemAudioRecorderPlugin.getPlatformVersion() ?? 'Unknown platform version';  } on PlatformException {  platformVersion = 'Failed to get platform version.';  }  // If the widget was removed from the tree while the asynchronous platform  // message was in flight, we want to discard the reply rather than calling    // setState to update our non-existent appearance.    if (!mounted) return;  setState(() {  _platformVersion = platformVersion;  });  }    Widget build(BuildContext context) {  return MaterialApp(  home: Scaffold(  appBar: AppBar(  title: const Text('Plugin example app'),  ),  body: Column(  children: [  Text('Running on: $_platformVersion\n'),  TextButton(onPressed: () async {  bool start = await SystemAudioRecorder.startRecord("test",  titleNotification: "titleNotification",  messageNotification: "messageNotification",  sampleRate: 16000  );  }, child: const Text("开始录制")),  TextButton(onPressed: ()async{  String path = await SystemAudioRecorder.stopRecord;  print(path);  }, child: const Text("停止录制"))  ]  ),  ),  );  }  
}
效果图如下
 
 录音存放的位置(mumu模拟器中)
