Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
print( await getLastFFmpegCommandOutput() );
Lorem ipsum ...
await myPlayer.resumePlayer();
Lorem ipsum ...
final fileUri = "https://file-examples.com/wp-content/uploads/2017/11/file_example_MP3_700KB.mp3";
Track track = Track( codec: Codec.opusOGG, trackPath: fileUri, trackAuthor: '3 Inches of Blood', trackTitle: 'Axes of Evil', albumArtAsset: albumArt )
Duration d = await myPlayer.startPlayerFromTrack
(
track,
whenFinished: ()
{
print( 'I hope you enjoyed listening to this song' );
},
);
Lorem ipsum ...
myPlayer.setAudioFocus(focus: AudioFocus.requestFocusAndDuckOthers);
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent((){_mPlayer.stopPlayer();}));
_playerSubscription = myPlayer.onProgress.listen((e)
{
Duration maxDuration = e.duration;
Duration position = e.position;
...
}
Track track = Track( codec: Codec.opusOGG, trackPath: fileUri, trackAuthor: '3 Inches of Blood', trackTitle: 'Axes of Evil', albumArtAsset: albumArt );
await nowPlaying(Track);
Lorem ipsum ...
await myPlayer.pausePlayer();
Lorem ipsum ...
await myPlayer.seekToPlayer(Duration(milliseconds: milliSecs));
Lorem ipsum ...fromDataBuffer:
Duration progress = (await getProgress())['progress'];
Duration duration = (await getProgress())['duration'];
setUIProgressBar(progress: Duration(milliseconds: progress.milliseconds - 500), duration: duration)
Lorem ipsum ...
Lorem ipsum ...
Lorem ipsum ...
Directory tempDir = await getTemporaryDirectory();
File fin = await File ('${tempDir.path}/flutter_sound-tmp.aac');
Duration d = await myPlayer.startPlayer(fin.path, codec: Codec.aacADTS);
_playerSubscription = myPlayer.onProgress.listen((e)
{
// ...
});
Lorem ipsum ...
final fileUri = "https://file-examples.com/wp-content/uploads/2017/11/file_example_MP3_700KB.mp3";
Duration d = await myPlayer.startPlayer
(
fromURI: fileUri,
codec: Codec.mp3,
whenFinished: ()
{
print( 'I hope you enjoyed listening to this song' );
},
);
Lorem ipsum ...
await myPlayer.setVolume(0.1);
Lorem ipsum ...
if ( await myPlayer.isDecoderSupported(Codec.opusOGG) ) doSomething;
Lorem ipsum ...
FlutterSoundPlayer myPlayer = FlutterSoundPlayer();
Lorem ipsum ...
await myPlayer.stopPlayer();
if (_playerSubscription != null)
{
_playerSubscription.cancel();
_playerSubscription = null;
}
Lorem ipsum ...true when the player is in the "Playing" mode.
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent(()async {await _mPlayer.stopPlayer(); setState((){});}));
Lorem ipsum ...
swtich(myPlayer.playerState)
{
case PlayerState.isPlaying: doSomething; break;
case PlayerState.isStopped: doSomething; break;
case PlayerState.isPaused: doSomething; break;
}
...
if (myPlayer.isStopped) doSomething;
if (myPlayer.isPlaying) doSomething;
if (myPlayer.isPaused) doSomething;
...
PlayerState theState = await myPlayer.getPlayerState();
...
Lorem ipsum ...@override
void dispose()
{
if (myPlayer != null)
{
myPlayer.closeAudioSession();
myPlayer = null;
}
super.dispose();
} while (aCondition) // *DON'T DO THAT*
{
flutterSound = FlutterSoundPlayer().openAudioSession(); // A **new** Flutter Sound instance is created and opened
flutterSound.startPlayer(bipSound);
}
myPlayer = await FlutterSoundPlayer().openAudioSession(focus: Focus.requestFocusAndDuckOthers, outputToSpeaker | allowBlueTooth);
...
(do something with myPlayer)
...
await myPlayer.closeAudioSession();
myPlayer = null;
FlutterSoundPlayer myPlayer = FlutterSoundPlayer();
Lorem ipsum ...
Duration progress = (await getProgress())['progress'];
Duration duration = (await getProgress())['duration'];
Lorem ipsum ...true when the recorder is in the "Recording" mode.
await myRecorder.pauseRecorder();
Lorem ipsum ...
Duration t = await flutterSoundHelper.duration(aPathFile);
Lorem ipsum ...
myPlayer.setSubscriptionDuration(Duration(milliseconds: 100));
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
await myPlayer.feedFromStream(aBuffer);
await myPlayer.feedFromStream(anotherBuffer);
await myPlayer.feedFromStream(myOtherBuffer);
await myPlayer.stopPlayer();
);
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
myPlayer.foodSink.add(FoodData(aBuffer));
myPlayer.foodSink.add(FoodData(anotherBuffer));
myPlayer.foodSink.add(FoodData(myOtherBuffer));
myPlayer.foodSink.add(FoodEvent((){_mPlayer.stopPlayer();}));
Lorem ipsum ...
await myPlayer.startPlayerFromStream(codec: Codec.pcm16, numChannels: 1, sampleRate: 48000);
await myPlayer.feedFromStream(aBuffer);
await myPlayer.feedFromStream(anotherBuffer);
await myPlayer.feedFromStream(myOtherBuffer);
await myPlayer.stopPlayer();
Lorem ipsum ...
myPlayer.setAudioFocus(focus: AudioFocus.requestFocusAndDuckOthers);
Lorem ipsum ...
switch(myRecorder.recorderState)
{
case RecorderState.isRecording: doSomething; break;
case RecorderState.isStopped: doSomething; break;
case RecorderState.isPaused: doSomething; break;
}
...
if (myRecorder.isStopped) doSomething;
if (myRecorder.isRecording) doSomething;
if (myRecorder.isPaused) doSomething;
Lorem ipsum ...startRecorder()startRecorder()
// Request Microphone permission if needed
PermissionStatus status = await Permission.microphone.request();
if (status != PermissionStatus.granted)
throw RecordingPermissionException("Microphone permission not granted");
Directory tempDir = await getTemporaryDirectory();
File outputFile = await File ('${tempDir.path}/flutter_sound-tmp.aac');
await myRecorder.startRecorder(toFile: outputFile.path, codec: t_CODEC.CODEC_AAC,);
Lorem ipsum ...
_recorderSubscription = myrecorder.onProgress.listen((e)
{
Duration maxDuration = e.duration;
double decibels = e.decibels
...
}
Lorem ipsum ...
if ( await myRecorder.isEncoderSupported(Codec.opusOGG) ) doSomething;
Lorem ipsum ...
Uint8List pcmBuffer flutterSoundHelper.waveToPCMBuffer(inputBuffer: aWaveBuffer);
Lorem ipsum ...
await myRecorder.resumeRecorder();
Lorem ipsum ...
await myRecorder.stopRecorder();
if (_recorderSubscription != null)
{
_recorderSubscription.cancel();
_recorderSubscription = null;
}
Lorem ipsum ...
String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.waveToPCM(inputFile: inputFile, outpoutFile: outputFile);
Lorem ipsum ...
if ( await flutterSoundHelper.isFFmpegAvailable() )
{
Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");
}
Lorem ipsum ...
int result = await getLastFFmpegReturnCode();
Lorem ipsum ...
// 0 is default
myRecorder.setSubscriptionDuration(0.010);
Lorem ipsum ...@override
void dispose()
{
if (myRecorder != null)
{
myRecorder.closeAudioSession();
myPlayer = null;
}
super.dispose();
} while (aCondition) // *DO'NT DO THAT*
{
flutterSound = FlutterSoundRecorder().openAudioSession(); // A **new** Flutter Sound instance is created and opened
...
}
myRecorder = await FlutterSoundRecorder().openAudioSession();
...
(do something with myRecorder)
...
myRecorder.closeAudioSession();
myRecorder = null;
Lorem ipsum ...
int rc = await flutterSoundHelper.executeFFmpegWithArguments
([
'-loglevel',
'error',
'-y',
'-i',
infile,
'-c:a',
'copy',
outfile,
]); // remux OGG to CAF
print( await getLastFFmpegCommandOutput() );
Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");
Lorem ipsum ...
Lorem ipsum ...
Map info = await flutterSoundHelper.FFmpegGetMediaInformation( uri );Track track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
}
String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.pcmToWave(inputFile: inputFile, outpoutFile: outputFile, numChannels: 1, sampleRate: 8000);
Uint8List myWavBuffer = await flutterSoundHelper.pcmToWaveBuffer(inputBuffer: myPCMBuffer, numChannels: 1, sampleRate: 8000);
Lorem ipsum ...
Lorem ipsum ...
String inputFile = '$myInputPath/bar.wav';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.mp3';
await flutterSoundHelper.convertFile(inputFile, codec.pcm16WAV, outputFile, Codec.mp3)
Lorem ipsum ...Track track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
}Track track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
}FlutterSoundHelper flutterSoundHelper = FlutterSoundHelper(); // SingletonTrack track;
/// global key so we can pause/resume the player via the api.
var playerStateKey = GlobalKey<SoundPlayerUIState>();
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
var player = SoundPlayerUI.fromTrack(track, key: playerStateKey);
return
Column(child: [
player,
RaisedButton("Pause", onPressed: () => playerState.currentState.pause()),
RaisedButton("Resume", onPressed: () => playerState.currentState.resume())
]);
}Track track;
void initState()
{
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
}
Widget build(BuildContext build)
{
return SoundPlayerUI.fromLoader((context) => loadTrack());
}
Future<Track> loadTrack()
{
Track track;
track = Track.fromAsset('assets/rock.mp3', mediaFormat: Mp3MediaFormat());
track.title = "Asset playback.";
track.artist = "By sounds";
}myPlayer.closeAudioSession();myRecorder.closeAudioSession();import 'package:flutter_sound/flutter_sound.dart';FlutterSoundPlayer myPlayer = FlutterSoundPlayer();myPlayer.openAudioSession().then( (){ ...} );FlutterSoundRecorder myRecorder = FlutterSoundRecorder();myRecorder.openAudioSession().then( (){ ...} );Duration t = await flutterSoundHelper.duration(aPathFile);Future<bool> convertFile
(
String infile,
Codec codecin,
String outfile,
Codec codecout
) async String inputFile = '$myInputPath/bar.wav';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.mp3';
await flutterSoundHelper.convertFile(inputFile, codec.pcm16WAV, outputFile, Codec.mp3)Future<void> pcmToWave
(
{
String inputFile,
String outputFile,
int numChannels,
int sampleRate,
}
) async String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.pcmToWave(inputFile: inputFile, outpoutFile: outputFile, numChannels: 1, sampleRate: 8000);Future<Uint8List> pcmToWaveBuffer
(
{
Uint8List inputBuffer,
int numChannels,
int sampleRate,
}
) async Uint8List myWavBuffer = await flutterSoundHelper.pcmToWaveBuffer(inputBuffer: myPCMBuffer, numChannels: 1, sampleRate: 8000);Future<void> waveToPCM
(
{
String inputFile,
String outputFile,
}
) async String inputFile = '$myInputPath/bar.pcm';
var tempDir = await getTemporaryDirectory();
String outpufFile = '${tempDir.path}/$foo.wav';
await flutterSoundHelper.waveToPCM(inputFile: inputFile, outpoutFile: outputFile);Uint8List waveToPCMBuffer (Uint8List inputBuffer) Uint8List pcmBuffer flutterSoundHelper.waveToPCMBuffer(inputBuffer: aWaveBuffer); Future<Duration> duration(String uri) async Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");Future<bool> isFFmpegAvailable() async if ( await flutterSoundHelper.isFFmpegAvailable() )
{
Duration d = flutterSoundHelper.duration("$myFilePath/bar.wav");
}Future<int> executeFFmpegWithArguments(List<String> arguments) int rc = await flutterSoundHelper.executeFFmpegWithArguments
([
'-loglevel',
'error',
'-y',
'-i',
infile,
'-c:a',
'copy',
outfile,
]); // remux OGG to CAFFuture<int> getLastFFmpegReturnCode() async int result = await getLastFFmpegReturnCode();Future<String> getLastFFmpegCommandOutput() async print( await getLastFFmpegCommandOutput() );Future<Map<dynamic, dynamic>> FFmpegGetMediaInformation(String uri) asyncMap<dynamic, dynamic> info = await flutterSoundHelper.FFmpegGetMediaInformation( uri );