using System;
using System.Collections.Generic;
using System.Text;
using NAudio.Wave;
using System.Threading;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace NAudio.CoreAudioApi
{
///
/// Audio Capture using Wasapi
/// See http://msdn.microsoft.com/en-us/library/dd370800%28VS.85%29.aspx
///
public class WasapiCapture : IWaveIn
{
private const long REFTIMES_PER_SEC = 10000000;
private const long REFTIMES_PER_MILLISEC = 10000;
private volatile bool stop;
private byte[] recordBuffer;
private Thread captureThread;
private AudioClient audioClient;
private int bytesPerFrame;
///
/// Indicates recorded data is available
///
public event EventHandler DataAvailable;
///
/// Indicates that all recorded data has now been received.
///
public event EventHandler RecordingStopped;
///
/// Initialises a new instance of the WASAPI capture class
///
public WasapiCapture() :
this(GetDefaultCaptureDevice())
{
}
///
/// Initialises a new instance of the WASAPI capture class
///
/// Capture device to use
public WasapiCapture(MMDevice captureDevice)
{
this.audioClient = captureDevice.AudioClient;
WaveFormat = audioClient.MixFormat;
}
///
/// Recording wave format
///
public WaveFormat WaveFormat { get; set; }
///
/// Gets the default audio capture device
///
/// The default audio capture device
public static MMDevice GetDefaultCaptureDevice()
{
MMDeviceEnumerator devices = new MMDeviceEnumerator();
return devices.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Console);
}
private void InitializeCaptureDevice()
{
long requestedDuration = REFTIMES_PER_MILLISEC * 100;
if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, WaveFormat))
{
throw new ArgumentException("Unsupported Wave Format");
}
audioClient.Initialize(AudioClientShareMode.Shared,
AudioClientStreamFlags.None,
requestedDuration,
0,
WaveFormat,
Guid.Empty);
int bufferFrameCount = audioClient.BufferSize;
bytesPerFrame = WaveFormat.Channels * WaveFormat.BitsPerSample / 8;
recordBuffer = new byte[bufferFrameCount * bytesPerFrame];
Debug.WriteLine(string.Format("record buffer size = {0}", recordBuffer.Length));
}
///
/// Start Recording
///
public void StartRecording()
{
InitializeCaptureDevice();
ThreadStart start = delegate { this.CaptureThread(this.audioClient); };
this.captureThread = new Thread(start);
Debug.WriteLine("Thread starting...");
this.stop = false;
this.captureThread.Start();
}
///
/// Stop Recording
///
public void StopRecording()
{
if (this.captureThread != null)
{
this.stop = true;
Debug.WriteLine("Thread ending...");
// wait for thread to end
this.captureThread.Join();
this.captureThread = null;
Debug.WriteLine("Done.");
this.stop = false;
}
}
private void CaptureThread(AudioClient client)
{
Debug.WriteLine(client.BufferSize);
int bufferFrameCount = audioClient.BufferSize;
// Calculate the actual duration of the allocated buffer.
long actualDuration = (long)((double)REFTIMES_PER_SEC *
bufferFrameCount / WaveFormat.SampleRate);
int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2);
AudioCaptureClient capture = client.AudioCaptureClient;
client.Start();
try
{
Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds));
while (!this.stop)
{
Thread.Sleep(sleepMilliseconds);
ReadNextPacket(capture);
}
client.Stop();
if (RecordingStopped != null)
{
RecordingStopped(this, EventArgs.Empty);
}
}
finally
{
if (capture != null)
{
capture.Dispose();
}
if (client != null)
{
client.Dispose();
}
client = null;
capture = null;
}
System.Diagnostics.Debug.WriteLine("stop wasapi");
}
private void ReadNextPacket(AudioCaptureClient capture)
{
IntPtr buffer;
int framesAvailable;
AudioClientBufferFlags flags;
int packetSize = capture.GetNextPacketSize();
int recordBufferOffset = 0;
//Debug.WriteLine(string.Format("packet size: {0} samples", packetSize / 4));
while (packetSize != 0)
{
buffer = capture.GetBuffer(out framesAvailable, out flags);
int bytesAvailable = framesAvailable * bytesPerFrame;
//Debug.WriteLine(string.Format("got buffer: {0} frames", framesAvailable));
// if not silence...
if ((flags & AudioClientBufferFlags.Silent) != AudioClientBufferFlags.Silent)
{
Marshal.Copy(buffer, recordBuffer, recordBufferOffset, bytesAvailable);
}
else
{
Array.Clear(recordBuffer, recordBufferOffset, bytesAvailable);
}
recordBufferOffset += bytesAvailable;
capture.ReleaseBuffer(framesAvailable);
packetSize = capture.GetNextPacketSize();
}
if (DataAvailable != null)
{
DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
}
}
///
/// Dispose
///
public void Dispose()
{
StopRecording();
}
}
}