I've been trying out NAudio and it seems very very interesting, at least for me to handle audio playback directly from streams in a WPF application.
Right now I'm trying to assemble something with basic functionalities, so I'm trying to playing with some code from the example from the demos. I have the following class that loads and plays MP3s:
public class AudioPlayer : IDisposable { IWavePlayer playbackDevice; WaveStream fileStream; SampleAggregator aggregator; public event EventHandler<FftEventArgs> FftCalculated { add { aggregator.FftCalculated += value; } remove { aggregator.FftCalculated -= value; } } public event EventHandler<MaxSampleEventArgs> MaximumCalculated { add { aggregator.MaximumCalculated += value; } remove { aggregator.MaximumCalculated -= value; } } public event EventHandler PlaybackStopped { add { playbackDevice.PlaybackStopped += value; } remove { playbackDevice.PlaybackStopped -= value; } } public PlaybackState PlaybackState { get { return playbackDevice.PlaybackState; } } public AudioPlayer() { aggregator = new SampleAggregator(); aggregator.NotificationCount = 882; aggregator.PerformFFT = true; } public void Load(string fileName) { Stop(); CloseFile(); EnsureDeviceCreated(); OpenFile(fileName); } void CloseFile() { if (fileStream == null) return; fileStream.Dispose(); fileStream = null; } void OpenFile(string fileName) { var inputStream = CreateInputStream(fileName); playbackDevice.Init(new SampleToWaveProvider(inputStream)); } ISampleProvider CreateInputStream(string fileName) { if (fileName.EndsWith(".wav")) fileStream = OpenWavStream(fileName); else if (fileName.EndsWith(".mp3")) fileStream = new Mp3FileReader(fileName); else throw new InvalidOperationException("Unsupported extension"); SampleChannel inputStream = new SampleChannel(fileStream); NotifyingSampleProvider sampleStream = new NotifyingSampleProvider(inputStream); sampleStream.Sample += (s, e) => aggregator.Add(e.Left); return sampleStream; } static WaveStream OpenWavStream(string fileName) { WaveStream readerStream = new WaveFileReader(fileName); if (readerStream.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { readerStream = WaveFormatConversionStream.CreatePcmStream(readerStream); readerStream = new BlockAlignReductionStream(readerStream); } return readerStream; } void EnsureDeviceCreated() { if (playbackDevice != null) return; CreateDevice(); } void CreateDevice() { playbackDevice = new WaveOut(); } public void Play() { if (playbackDevice != null && fileStream != null && playbackDevice.PlaybackState != PlaybackState.Playing) playbackDevice.Play(); } public void Pause() { if (playbackDevice == null) return; playbackDevice.Pause(); } public void Stop() { if (playbackDevice == null) return; playbackDevice.Stop(); fileStream.Position = 0; } public void Dispose() { Stop(); CloseFile(); if (playbackDevice != null) { playbackDevice.Dispose(); playbackDevice = null; } } }
Then I try the aformentioned with the following:
public partial class MainWindow : Window { AudioPlayer player; void Button_Click(object sender, RoutedEventArgs e) { player = new AudioPlayer(); player.Load(@"c:\DIR1\P027I003.mp3"); player.PlaybackStopped += new EventHandler(player_PlaybackStopped); Debug.Print("PlaybackState: " + player.PlaybackState); player.Play(); Debug.Print("play!"); Debug.Print("PlaybackState: " + player.PlaybackState); } void player_PlaybackStopped(object sender, EventArgs e) { Debug.Print("playback stopped"); Debug.Print("PlaybackState: " + player.PlaybackState); }
When the MP3 file finishes, player_PlaybackStopped is invoked, but the PlaybackState remains at Playing.
I looked at WaveOut.cs and it seems that RaisePlaybackStoppedEvent is raised at the end of the stream, but that the PlaybackState is not changed.
Am I doing something incorrectly?
I'm using NAudio 1.5.