本文主要从实时音视频场景中,简单介绍该API的使用。
我对实时音视频中音频设备的使用简单的分为:
IMMDeviceEnumerator* ptrEnumerator; CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL, __uuidof(IMMDeviceEnumerator), reinterpret_cast<void**>(&ptrEnumerator)); 并通过IMMDeviceEnumerator可以实现:获取系统默认设备GetDefaultAudioEndpoint、获取设备集合IMMDeviceCollection、获取指定设备GetDevice、注册设备监听IMMNotificationClient(监听设备插拔及状态变更)。复制代码
首先我们需要一个IMMDevice对象,可以在设备管理的相关功能中获取。
IMMDevice* pDevice; //GetDefault ptrEnumerator->GetDefaultAudioEndpoint((EDataFlow)dir, (ERole)role/* eCommunications */, &pDevice); //Get by path ptrEnumerator->GetDevice(device_path, &pDevice); //GetIndex pCollection->Item(index, &pDevice);复制代码
//mic capturer ptrClient->Initialize( AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK | AUDCLNT_STREAMFLAGS_NOPERSIST, 0, 0, (WAVEFORMATEX*)&Wfx, NULL); //playout render ptrClient->Initialize( AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK, 0, 0, (WAVEFORMATEX*)&Wfx, NULL); //playout capturer ptrClient->Initialize( AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_LOOPBACK, 0, 0, (WAVEFORMATEX*)&Wfx, NULL); 复制代码
IDeviceTopology* pTopo; pDevice->Activate(__uuidof(IDeviceTopology), CLSCTX_INPROC_SERVER, 0,&pTopo);复制代码
麦克风采集:
扬声器播放:
扬声器采集:
//capturer IAudioCaptureClient* ptrCaptureClient;//audioin or audioout ptrClient->GetService(__uuidof(IAudioCaptureClient), (void**)&ptrCaptureClient); {//work thread //Wait Event ptrCaptureClient->GetBuffer( &pData, // packet which is ready to be read by used &framesAvailable, // #frames in the captured packet (can be zero) &flags, // support flags (check) &recPos, // device position of first audio frame in data packet &recTime); // value of performance counter at the time of recording //pData processing ptrCaptureClient->ReleaseBuffer(framesAvailable); } //render IAudioRenderClient* ptrRenderClient;//audioout ptrClient->GetService(__uuidof(IAudioRenderClient), (void**)&ptrRenderClient); {//work thread BYTE* pData;//form buffer UINT32 bufferLength = 0; ptrClient->GetBufferSize(&bufferLength); UINT32 playBlockSize = nSamplesPerSec / 100; //Wait Event UINT32 padding = 0; ptrClient->GetCurrentPadding(&padding); if (bufferLength - padding > playBlockSize) { ptrRenderClient->GetBuffer(playBlockSize, &pData); //request and getdata ptrCaptureClient->ReleaseBuffer(playBlockSize, 0); } } 复制代码
在整个音视频系统中,设备数据线程还需要统计数据处理时长、采集播放缓存大小等,用户监听检查设备状态及aec延迟计算。
IAudioEndpointVolume* pVolume; pDevice->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL, reinterpret_cast<void**>(&pVolume));复制代码
pVolume->GetMasterVolumeLevelScalar(&fLevel); pVolume->SetMasterVolumeLevelScalar(fLevel, NULL);复制代码
BOOL mute; pVolume->GetMute(&mute); pVolume->SetMute(mute, NULL);复制代码
IAudioEndpointVolumeCallback* cbSessionVolume;//need to do pVolume->RegisterControlChangeNotify(cbSessionVolume);复制代码
IAudioSessionControl* ptrSessionControl; ptrClient->GetService(__uuidof(IAudioSessionControl), (void**)&ptrSessionControl); IAudioSessionEvents* notify; ptrSessionControl->RegisterAudioSessionNotification(notify);复制代码
avrt_module_ = LoadLibrary(TEXT("Avrt.dll")); if (avrt_module_) { _PAvRevertMmThreadCharacteristics = (PAvRevertMmThreadCharacteristics)GetProcAddress(avrt_module_, "AvRevertMmThreadCharacteristics"); _PAvSetMmThreadCharacteristicsA = (PAvSetMmThreadCharacteristicsA)GetProcAddress(avrt_module_, "AvSetMmThreadCharacteristicsA"); _PAvSetMmThreadPriority = (PAvSetMmThreadPriority)GetProcAddress(avrt_module_, "AvSetMmThreadPriority"); }复制代码
hMmTask_ = _PAvSetMmThreadCharacteristicsA("Pro Audio", &taskIndex); if (hMmTask_) { _PAvSetMmThreadPriority(hMmTask_, AVRT_PRIORITY_CRITICAL); }复制代码