Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Track track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
}
myPlayer.setAudioFocus(focus: AudioFocus.requestFocusAndDuckOthers);
Lorem ipsum ...
Duration progress = (await getProgress())['progress'];
Duration duration = (await getProgress())['duration'];
setUIProgressBar(progress: Duration(milliseconds: progress.milliseconds - 500), duration: duration)
Lorem ipsum ...
await myPlayer.pausePlayer();
Lorem ipsum ...
await myPlayer.resumePlayer();
Lorem ipsum ...
Track track = Track( codec: Codec.opusOGG, trackPath: fileUri, trackAuthor: '3 Inches of Blood', trackTitle: 'Axes of Evil', albumArtAsset: albumArt );
await nowPlaying(Track);
Lorem ipsum ...
await myPlayer.seekToPlayer(Duration(milliseconds: milliSecs));
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent((){_mPlayer.stopPlayer();}));
Lorem ipsum ...
_playerSubscription = myPlayer.onProgress.listen((e)
{
Duration maxDuration = e.duration;
Duration position = e.position;
...
}
Lorem ipsum ...
if ( await myPlayer.isDecoderSupported(Codec.opusOGG) ) doSomething;
Lorem ipsum ...
fromDataBuffer:
Directory tempDir = await getTemporaryDirectory();
File fin = await File ('${tempDir.path}/flutter_sound-tmp.aac');
Duration d = await myPlayer.startPlayer(fin.path, codec: Codec.aacADTS);
_playerSubscription = myPlayer.onProgress.listen((e)
{
// ...
});
Lorem ipsum ...
final fileUri = "https://file-examples.com/wp-content/uploads/2017/11/file_example_MP3_700KB.mp3";
Duration d = await myPlayer.startPlayer
(
fromURI: fileUri,
codec: Codec.mp3,
whenFinished: ()
{
print( 'I hope you enjoyed listening to this song' );
},
);
Lorem ipsum ...
Duration progress = (await getProgress())['progress'];
Duration duration = (await getProgress())['duration'];
Lorem ipsum ...
FlutterSoundPlayer myPlayer = FlutterSoundPlayer();
Lorem ipsum ...
await myPlayer.stopPlayer();
if (_playerSubscription != null)
{
_playerSubscription.cancel();
_playerSubscription = null;
}
Lorem ipsum ...
final fileUri = "https://file-examples.com/wp-content/uploads/2017/11/file_example_MP3_700KB.mp3";
Track track = Track( codec: Codec.opusOGG, trackPath: fileUri, trackAuthor: '3 Inches of Blood', trackTitle: 'Axes of Evil', albumArtAsset: albumArt )
Duration d = await myPlayer.startPlayerFromTrack
(
track,
whenFinished: ()
{
print( 'I hope you enjoyed listening to this song' );
},
);
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent(()async {await _mPlayer.stopPlayer(); setState((){});}));
Lorem ipsum ...
myPlayer.setSubscriptionDuration(Duration(milliseconds: 100));
Lorem ipsum ...
await myPlayer.setVolume(0.1);
Lorem ipsum ...
Duration t = await flutterSoundHelper.duration(aPathFile);
Lorem ipsum ...
// 0 is default
myRecorder.setSubscriptionDuration(0.010);
Lorem ipsum ...
await myRecorder.resumeRecorder();
Lorem ipsum ...@override
void dispose()
{
if (myPlayer != null)
{
myPlayer.closeAudioSession();
myPlayer = null;
}
super.dispose();
} while (aCondition) // *DON'T DO THAT*
{
flutterSound = FlutterSoundPlayer().openAudioSession(); // A **new** Flutter Sound instance is created and opened
flutterSound.startPlayer(bipSound);
}
myPlayer = await FlutterSoundPlayer().openAudioSession(focus: Focus.requestFocusAndDuckOthers, outputToSpeaker | allowBlueTooth);
...
(do something with myPlayer)
...
await myPlayer.closeAudioSession();
myPlayer = null;
FlutterSoundPlayer myPlayer = FlutterSoundPlayer();
Lorem ipsum ...true when the player is in the "Playing" mode.
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
await myPlayer.feedFromStream(aBuffer);
await myPlayer.feedFromStream(anotherBuffer);
await myPlayer.feedFromStream(myOtherBuffer);
await myPlayer.stopPlayer();
Lorem ipsum ...
myPlayer.setAudioFocus(focus: AudioFocus.requestFocusAndDuckOthers);
Lorem ipsum ...
swtich(myPlayer.playerState)
{
case PlayerState.isPlaying: doSomething; break;
case PlayerState.isStopped: doSomething; break;
case PlayerState.isPaused: doSomething; break;
}
...
if (myPlayer.isStopped) doSomething;
if (myPlayer.isPlaying) doSomething;
if (myPlayer.isPaused) doSomething;
...
PlayerState theState = await myPlayer.getPlayerState();
...
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
await myPlayer.feedFromStream(aBuffer);
await myPlayer.feedFromStream(anotherBuffer);
await myPlayer.feedFromStream(myOtherBuffer);
await myPlayer.stopPlayer();
);
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent((){_mPlayer.stopPlayer();}));
Lorem ipsum ...true when the recorder is in the "Recording" mode.
_recorderSubscription = myrecorder.onProgress.listen((e)
{
Duration maxDuration = e.duration;
double decibels = e.decibels
...
}
Lorem ipsum ...
switch(myRecorder.recorderState)
{
case RecorderState.isRecording: doSomething; break;
case RecorderState.isStopped: doSomething; break;
case RecorderState.isPaused: doSomething; break;
}
...
if (myRecorder.isStopped) doSomething;
if (myRecorder.isRecording) doSomething;
if (myRecorder.isPaused) doSomething;
Lorem ipsum ...
// Request Microphone permission if needed
PermissionStatus status = await Permission.microphone.request();
if (status != PermissionStatus.granted)
throw RecordingPermissionException("Microphone permission not granted");
Directory tempDir = await getTemporaryDirectory();
File outputFile = await File ('${tempDir.path}/flutter_sound-tmp.aac');
await myRecorder.startRecorder(toFile: outputFile.path, codec: t_CODEC.CODEC_AAC,);
Lorem ipsum ...
await myRecorder.pauseRecorder();
Lorem ipsum ...
if ( await myRecorder.isEncoderSupported(Codec.opusOGG) ) doSomething;
Lorem ipsum ...
Uint8List pcmBuffer flutterSoundHelper.waveToPCMBuffer(inputBuffer: aWaveBuffer);
Lorem ipsum ...
String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.waveToPCM(inputFile: inputFile, outpoutFile: outputFile);
Lorem ipsum ...
if ( await flutterSoundHelper.isFFmpegAvailable() )
{
Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");
}
Lorem ipsum ...@override
void dispose()
{
if (myRecorder != null)
{
myRecorder.closeAudioSession();
myPlayer = null;
}
super.dispose();
} while (aCondition) // *DO'NT DO THAT*
{
flutterSound = FlutterSoundRecorder().openAudioSession(); // A **new** Flutter Sound instance is created and opened
...
}
myRecorder = await FlutterSoundRecorder().openAudioSession();
...
(do something with myRecorder)
...
myRecorder.closeAudioSession();
myRecorder = null;
Lorem ipsum ...
Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");
Lorem ipsum ...
await myRecorder.stopRecorder();
if (_recorderSubscription != null)
{
_recorderSubscription.cancel();
_recorderSubscription = null;
}
Lorem ipsum ...
Uint8List myWavBuffer = await flutterSoundHelper.pcmToWaveBuffer(inputBuffer: myPCMBuffer, numChannels: 1, sampleRate: 8000);
Lorem ipsum ...
String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.pcmToWave(inputFile: inputFile, outpoutFile: outputFile, numChannels: 1, sampleRate: 8000);
Lorem ipsum ...
print( await getLastFFmpegCommandOutput() );
Lorem ipsum ...
int result = await getLastFFmpegReturnCode();
Lorem ipsum ...
int rc = await flutterSoundHelper.executeFFmpegWithArguments
([
'-loglevel',
'error',
'-y',
'-i',
infile,
'-c:a',
'copy',
outfile,
]); // remux OGG to CAF
Lorem ipsum ...
String inputFile = '$myInputPath/bar.wav';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.mp3';
await flutterSoundHelper.convertFile(inputFile, codec.pcm16WAV, outputFile, Codec.mp3)
Lorem ipsum ...
print( await getLastFFmpegCommandOutput() );
Map info = await flutterSoundHelper.FFmpegGetMediaInformation( uri );Track track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
}FlutterSoundHelper flutterSoundHelper = FlutterSoundHelper(); // SingletonDuration t = await flutterSoundHelper.duration(aPathFile);Future<bool> convertFile
(
String infile,
Codec codecin,
String outfile,
Codec codecout
) async String inputFile = '$myInputPath/bar.wav';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.mp3';
await flutterSoundHelper.convertFile(inputFile, codec.pcm16WAV, outputFile, Codec.mp3)Future<void> pcmToWave
(
{
String inputFile,
String outputFile,
int numChannels,
int sampleRate,
}
) async String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.pcmToWave(inputFile: inputFile, outpoutFile: outputFile, numChannels: 1, sampleRate: 8000);Future<Uint8List> pcmToWaveBuffer
(
{
Uint8List inputBuffer,
int numChannels,
int sampleRate,
}
) async Uint8List myWavBuffer = await flutterSoundHelper.pcmToWaveBuffer(inputBuffer: myPCMBuffer, numChannels: 1, sampleRate: 8000);Future<void> waveToPCM
(
{
String inputFile,
String outputFile,
}
) async String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.waveToPCM(inputFile: inputFile, outpoutFile: outputFile);Uint8List waveToPCMBuffer (Uint8List inputBuffer) Uint8List pcmBuffer flutterSoundHelper.waveToPCMBuffer(inputBuffer: aWaveBuffer); Future<Duration> duration(String uri) async Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");Future<bool> isFFmpegAvailable() async if ( await flutterSoundHelper.isFFmpegAvailable() )
{
Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");
}Future<int> executeFFmpegWithArguments(List<String> arguments) int rc = await flutterSoundHelper.executeFFmpegWithArguments
([
'-loglevel',
'error',
'-y',
'-i',
infile,
'-c:a',
'copy',
outfile,
]); // remux OGG to CAFFuture<int> getLastFFmpegReturnCode() async int result = await getLastFFmpegReturnCode();Future<String> getLastFFmpegCommandOutput() async print( await getLastFFmpegCommandOutput() );Future<Map<dynamic, dynamic>> FFmpegGetMediaInformation(String uri) asyncMap<dynamic, dynamic> info = await flutterSoundHelper.FFmpegGetMediaInformation( uri );Track track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
}myPlayer.closeAudioSession();myRecorder.closeAudioSession();import 'package:flutter_sound/flutter_sound.dart';FlutterSoundPlayer myPlayer = FlutterSoundPlayer();myPlayer.openAudioSession().then( (){ ...} );FlutterSoundRecorder myRecorder = FlutterSoundRecorder();myRecorder.openAudioSession().then( (){ ...} );Track track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
} import 'package:flauto/flutter_sound.dart';
...
FlutterSoundPlayer _myPlayer = FlutterSoundPlayer();@override
void initState() {
super.initState();
// Be careful : openAudioSession return a Future.
// Do not access your FlutterSoundPlayer or FlutterSoundRecorder before the completion of the Future
_myPlayer.openAudioSession().then((value) {
setState(() {
_mPlayerIsInited = true;
});
});
}
@override
void dispose() {
// Be careful : you must `close` the audio session when you have finished with it.
_myPlayer.closeAudioSession();
_myPlayer = null;
super.dispose();
}void play() async {
await _myPlayer.startPlayer(
fromURI: _exampleAudioFilePathMP3,
codec: Codec.mp3,
whenFinished: (){setState((){});}
);
setState(() {});
}
Future<void> stopPlayer() async {
if (_myPlayer != null) {
await _myPlayer.stopPlayer();
}
} FlutterSoundRecorder _myRecorder = FlutterSoundRecorder();@override
void initState() {
super.initState();
// Be careful : openAudioSession return a Future.
// Do not access your FlutterSoundPlayer or FlutterSoundRecorder before the completion of the Future
_myRecorder.openAudioSession().then((value) {
setState(() {
_mRecorderIsInited = true;
});
});
}
@override
void dispose() {
// Be careful : you must `close` the audio session when you have finished with it.
_myRecorder.closeAudioSession();
_myRecorder = null;
super.dispose();
} Future<void> record() async {
await _myRecorder.startRecorder(
toFile: _mPath,
codec: Codec.aacADTS,
);
}
Future<void> stopRecorder() async {
await _myRecorder.stopRecorder();
}dependencies:
flutter:
sdk: flutter
flutter_sound: ^6.0.0dependencies:
flutter:
sdk: flutter
flutter_sound_lite: ^6.0.0cd some/where
git clone https://github.com/canardoux/tau
cd some/where/flutter_sound
bin/flavor FULLdependencies:
flutter:
sdk: flutter
flutter_sound:
path: some/where/flutter_soundcd some/where
git clone https://github.com/canardoux/tau
cd some/where/flutter_sound
bin/flavor LITEdependencies:
flutter:
sdk: flutter
flutter_sound_lite:
path: some/where/flutter_sound <key>NSAppleMusicUsageDescription</key>
<string>MyApp does not need this permission</string>
<key>NSCalendarsUsageDescription</key>
<string>MyApp does not need this permission</string>
<key>NSCameraUsageDescription</key>
<string>MyApp does not need this permission</string>
<key>NSContactsUsageDescription</key>
<string>MyApp does not need this permission</string>
<key>NSLocationWhenInUseUsageDescription</key>
<string>MyApp does not need this permission</string>
<key>NSMotionUsageDescription</key>
<string>MyApp does not need this permission</string>
<key>NSSpeechRecognitionUsageDescription</key>
<string>MyApp does not need this permission</string>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>
<key>NSMicrophoneUsageDescription</key>
<string>MyApp uses the microphone to record your speech and convert it to text.</string><uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" /> <script src="assets/packages/flutter_sound_web/js/flutter_sound/flutter_sound.js"></script>
<script src="assets/packages/flutter_sound_web/js/flutter_sound/flutter_sound_player.js"></script>
<script src="assets/packages/flutter_sound_web/js/flutter_sound/flutter_sound_recorder.js"></script>
<script src="assets/packages/flutter_sound_web/js/howler/howler.js"></script> <script src="https://cdn.jsdelivr.net/npm/tau_engine@6/js/flutter_sound/flutter_sound.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/tau_engine@6/js/flutter_sound/flutter_sound_player.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/tau_engine@6/js/flutter_sound/flutter_sound_recorder.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/howler@2/dist/howler.min.js"></script>Cocoapods could not find compatible versions for pod ...cd ios
pod cache clean --all
rm Podfile.lock
rm -rf .symlinks/
cd ..
flutter clean
flutter pub get
cd ios
pod update
pod repo update
pod install --repo-update
pod update
pod install
cd ..Recording PCM.
Controls on the lock-screen.
Directory tempDir = await getTemporaryDirectory();
String outputFile = '${tempDir.path}/myFile.pcm';
await myRecorder.startRecorder
(
codec: Codec.pcm16,
toFile: outputFile,
sampleRate: 16000,
numChannels: 1,
);
...
myRecorder.stopRecorder();
...
await myPlayer.startPlayer
(
fromURI: = outputFile,
codec: Codec.pcm16,
numChannels: 1,
sampleRate: 16000, // Used only with codec == Codec.pcm16
whenFinished: (){ /* Do something */},
);SoundPlayer.withShadeUI(track);var player = SoundPlayer.withShadeUI(track, canSkipBackward:true
, canSkipForward:true);
player.onSkipBackwards = () => player.startPlayer(getPreviousTrack());
player.onSkipForwards = () => player.startPlayer(getNextTrack());await myPlayer.startPlayerFromStream
(
codec: Codec.pcm16 // Actually this is the only codec possible
numChannels: 1 // Actually this is the only value possible. You cannot have several channels.
sampleRate: 48100 // This parameter is very important if you want to specify your own sample rate
);myPlayer.foodSink.add
( FoodEvent
(
() async
{
await myPlayer.stopPlayer();
setState((){});
}
)
);await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent((){_mPlayer.stopPlayer();}));await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
await myPlayer.feedFromStream(aBuffer);
await myPlayer.feedFromStream(anotherBuffer);
await myPlayer.feedFromStream(myOtherBuffer);
await myPlayer.stopPlayer();startPlayerFromStream()is not yet implemented. IOSink outputFile = await createFile();
StreamController<Food> recordingDataController = StreamController<Food>();
_mRecordingDataSubscription =
recordingDataController.stream.listen
((Uint8List buffer)
{
outputFile.add(buffer);
}
);
await _mRecorder.startRecorder(
toStream: recordingDataController.sink,
codec: Codec.pcm16,
numChannels: 1,
sampleRate: 48000,
);await startRecorder(codec: opusWebM, toFile: 'foo'); // the LocalSessionStorage key `foo` will contain the URL of the recorded object
...
await stopRecorder();
await startPlayer('foo'); // ('foo' is the LocalSessionStorage key of the recorded sound URL object)startPlayer()Migration from previous version
NULL to Player and Recorder subscriptions.pod name+'/audio-lts', :path => File.join(symlink, 'ios') in your Podfile.pod cache clean --all
pod install --repo-updateflutter_sound_platform_interfaceflauto_platform_interfaceWe need you!
Flutter Sound License
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007startPlayer()Directory tempDir = await getTemporaryDirectory();
String outputFile = '${tempDir.path}/myFile.pcm';
await myRecorder.startRecorder
(
codec: Codec.pcm16,
toFile: outputFile,
sampleRate: 16000,
numChannels: 1,
);
...
myRecorder.stopRecorder();
...
await myPlayer.startPlayer
(
fromURI: = outputFile,
codec: Codec.pcm16,
numChannels: 1,
sampleRate: 16000, // Used only with codec == Codec.pcm16
whenFinished: (){ /* Do something */},
); IOSink outputFile = await createFile();
StreamController<Food> recordingDataController = StreamController<Food>();
_mRecordingDataSubscription =
recordingDataController.stream.listen
((Uint8List buffer)
{
outputFile.add(buffer);
}
);
await _mRecorder.startRecorder(
toStream: recordingDataController.sink,
codec: Codec.pcm16,
numChannels: 1,
sampleRate: 48000,
);await myPlayer.startPlayerFromStream
(
codec: Codec.pcm16 // Actually this is the only codec possible
numChannels: 1 // Actually this is the only value possible. You cannot have several channels.
sampleRate: 48100 // This parameter is very important if you want to specify your own sample rate
);myPlayer.foodSink.add
( FoodEvent
(
() async
{
await myPlayer.stopPlayer();
setState((){});
}
)
);await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent((){_mPlayer.stopPlayer();}));await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
await myPlayer.feedFromStream(aBuffer);
await myPlayer.feedFromStream(anotherBuffer);
await myPlayer.feedFromStream(myOtherBuffer);
await myPlayer.stopPlayer();

