Files
cuetools.net/CUETools.Codecs.CoreAudio/CoreAudioApi/WasapiCapture.cs
chudov 881951cb02 2.0.7
2010-03-30 02:41:43 +00:00

217 lines
7.0 KiB
C#

using System;
using System.Collections.Generic;
using System.Text;
using NAudio.Wave;
using System.Threading;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace NAudio.CoreAudioApi
{
/// <summary>
/// Audio Capture using Wasapi
/// See http://msdn.microsoft.com/en-us/library/dd370800%28VS.85%29.aspx
/// </summary>
public class WasapiCapture : IWaveIn
{
private const long REFTIMES_PER_SEC = 10000000;
private const long REFTIMES_PER_MILLISEC = 10000;
private volatile bool stop;
private byte[] recordBuffer;
private Thread captureThread;
private AudioClient audioClient;
private int bytesPerFrame;
/// <summary>
/// Indicates recorded data is available
/// </summary>
public event EventHandler<WaveInEventArgs> DataAvailable;
/// <summary>
/// Indicates that all recorded data has now been received.
/// </summary>
public event EventHandler RecordingStopped;
/// <summary>
/// Initialises a new instance of the WASAPI capture class
/// </summary>
public WasapiCapture() :
this(GetDefaultCaptureDevice())
{
}
/// <summary>
/// Initialises a new instance of the WASAPI capture class
/// </summary>
/// <param name="captureDevice">Capture device to use</param>
public WasapiCapture(MMDevice captureDevice)
{
this.audioClient = captureDevice.AudioClient;
WaveFormat = audioClient.MixFormat;
}
/// <summary>
/// Recording wave format
/// </summary>
public WaveFormat WaveFormat { get; set; }
/// <summary>
/// Gets the default audio capture device
/// </summary>
/// <returns>The default audio capture device</returns>
public static MMDevice GetDefaultCaptureDevice()
{
MMDeviceEnumerator devices = new MMDeviceEnumerator();
return devices.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Console);
}
private void InitializeCaptureDevice()
{
long requestedDuration = REFTIMES_PER_MILLISEC * 100;
if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, WaveFormat))
{
throw new ArgumentException("Unsupported Wave Format");
}
audioClient.Initialize(AudioClientShareMode.Shared,
AudioClientStreamFlags.None,
requestedDuration,
0,
WaveFormat,
Guid.Empty);
int bufferFrameCount = audioClient.BufferSize;
bytesPerFrame = WaveFormat.Channels * WaveFormat.BitsPerSample / 8;
recordBuffer = new byte[bufferFrameCount * bytesPerFrame];
Debug.WriteLine(string.Format("record buffer size = {0}", recordBuffer.Length));
}
/// <summary>
/// Start Recording
/// </summary>
public void StartRecording()
{
InitializeCaptureDevice();
ThreadStart start = delegate { this.CaptureThread(this.audioClient); };
this.captureThread = new Thread(start);
Debug.WriteLine("Thread starting...");
this.stop = false;
this.captureThread.Start();
}
/// <summary>
/// Stop Recording
/// </summary>
public void StopRecording()
{
if (this.captureThread != null)
{
this.stop = true;
Debug.WriteLine("Thread ending...");
// wait for thread to end
this.captureThread.Join();
this.captureThread = null;
Debug.WriteLine("Done.");
this.stop = false;
}
}
private void CaptureThread(AudioClient client)
{
Debug.WriteLine(client.BufferSize);
int bufferFrameCount = audioClient.BufferSize;
// Calculate the actual duration of the allocated buffer.
long actualDuration = (long)((double)REFTIMES_PER_SEC *
bufferFrameCount / WaveFormat.SampleRate);
int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2);
AudioCaptureClient capture = client.AudioCaptureClient;
client.Start();
try
{
Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds));
while (!this.stop)
{
Thread.Sleep(sleepMilliseconds);
ReadNextPacket(capture);
}
client.Stop();
if (RecordingStopped != null)
{
RecordingStopped(this, EventArgs.Empty);
}
}
finally
{
if (capture != null)
{
capture.Dispose();
}
if (client != null)
{
client.Dispose();
}
client = null;
capture = null;
}
System.Diagnostics.Debug.WriteLine("stop wasapi");
}
private void ReadNextPacket(AudioCaptureClient capture)
{
IntPtr buffer;
int framesAvailable;
AudioClientBufferFlags flags;
int packetSize = capture.GetNextPacketSize();
int recordBufferOffset = 0;
//Debug.WriteLine(string.Format("packet size: {0} samples", packetSize / 4));
while (packetSize != 0)
{
buffer = capture.GetBuffer(out framesAvailable, out flags);
int bytesAvailable = framesAvailable * bytesPerFrame;
//Debug.WriteLine(string.Format("got buffer: {0} frames", framesAvailable));
// if not silence...
if ((flags & AudioClientBufferFlags.Silent) != AudioClientBufferFlags.Silent)
{
Marshal.Copy(buffer, recordBuffer, recordBufferOffset, bytesAvailable);
}
else
{
Array.Clear(recordBuffer, recordBufferOffset, bytesAvailable);
}
recordBufferOffset += bytesAvailable;
capture.ReleaseBuffer(framesAvailable);
packetSize = capture.GetNextPacketSize();
}
if (DataAvailable != null)
{
DataAvailable(this, new WaveInEventArgs(recordBuffer, recordBufferOffset));
}
}
/// <summary>
/// Dispose
/// </summary>
public void Dispose()
{
StopRecording();
}
}
}