【MediaFoundation】OpenCV VideoCapture 读取音频源码

2024-01-08 19:33:14

OpenCV 读取音频代码实例

在windows7 以及OpenCV4 过后可以使用 CAP_MSMF 读取音频,但是OpenCV没有播放音频的API。代码示例如下。 本文解析OpenCVCAP_MSMF 进行文件、设备的 音频读取,学习MediaFoundation 的使用。

#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
using namespace std;

int main(int argc, const char** argv)
{
    Mat videoFrame;
    Mat audioFrame;
    vector<vector<Mat>> audioData;
    VideoCapture cap;
    vector<int> params {    CAP_PROP_AUDIO_STREAM, 0,
                            CAP_PROP_VIDEO_STREAM, -1,
                            CAP_PROP_AUDIO_DATA_DEPTH, CV_32F   };

    //cap.open(file, CAP_MSMF, params);
    // 打开第一个音频输入设备
    cap.open(0, CAP_MSMF, params);

    if (!cap.isOpened())
    {
        cerr << "ERROR! Can't to open file: " + file << endl;
        return -1;
    }

    const int audioBaseIndex = (int)cap.get(CAP_PROP_AUDIO_BASE_INDEX);
    const int numberOfChannels = (int)cap.get(CAP_PROP_AUDIO_TOTAL_CHANNELS);
    cout << "CAP_PROP_AUDIO_DATA_DEPTH: " << depthToString((int)cap.get(CAP_PROP_AUDIO_DATA_DEPTH)) << endl;
    cout << "CAP_PROP_AUDIO_SAMPLES_PER_SECOND: " << cap.get(CAP_PROP_AUDIO_SAMPLES_PER_SECOND) << endl;
    cout << "CAP_PROP_AUDIO_TOTAL_CHANNELS: " << cap.get(CAP_PROP_AUDIO_TOTAL_CHANNELS) << endl;
    cout << "CAP_PROP_AUDIO_TOTAL_STREAMS: " << cap.get(CAP_PROP_AUDIO_TOTAL_STREAMS) << endl;

    int numberOfSamples = 0;
    int numberOfFrames = 0;
    audioData.resize(numberOfChannels);
    mfcap::AudioOutput audioOutput;
    audioOutput.Open((int)cap.get(CAP_PROP_AUDIO_TOTAL_CHANNELS),
                     (int)cap.get(CAP_PROP_AUDIO_SAMPLES_PER_SECOND),
                     16);
    
    for (;;)
    {
        if (cap.grab())
        {
            //cap.retrieve(videoFrame);
            std::vector<const unsigned char*> planes;
            planes.resize(numberOfChannels);
            for (int nCh = 0; nCh < numberOfChannels; nCh++)
            {
                cap.retrieve(audioFrame, audioBaseIndex+nCh);
                if (!audioFrame.empty())
                {
                    audioData[nCh].push_back(audioFrame);
                    //planes[nCh] = audioFrame.data + nCh * audioFrame.cols;
                }
                numberOfSamples+=audioFrame.cols;
            }
        } else { break; }
    }

    cout << "Number of audio samples: " << numberOfSamples << endl
         << "Number of video frames: " << numberOfFrames << endl;
    return 0;
}

打开设备

bool CvCapture_MSMF::open(int index, const cv::VideoCaptureParameters* params)
{
	// 先重置环境
    close();
    if (index < 0)
        return false;

    if (params)
    {
    	// 开启硬件编解码加速,这里先省略,在后面的硬件加速上学习。
        configureHW(*params);

		/* configureStream 主要是配置是否捕获音频或视频流
		// 如果需要捕获音频流: audioStream = 0 否者 audioStream  = -1
		// 视频流同理,对应的变量为: videoStream
		*/
		
		/* setAudioProperties 
		// outputAudioFormat: 音频的位深, CV_16S 等
		// audioSamplesPerSecond 采样率
		// syncLastFrame: 是否需要音视频同步,OpenCV里面只支持视频文件的音视频同步
		*/
        if (!(configureStreams(*params) && setAudioProperties(*params)))
            return false;
    }
	
	// 仅支持打开音频流或者视频流,不能在一个对象里面打开或者都不打开。
    if (videoStream != -1 && audioStream != -1 || videoStream == -1 && audioStream == -1)
    {
        CV_LOG_DEBUG(NULL, "Only one of the properties CAP_PROP_AUDIO_STREAM " << audioStream << " and " << CAP_PROP_VIDEO_STREAM << " must be different from -1");
        return false;
    }
    DeviceList devices;
    UINT32 count = 0;
    if (audioStream != -1)
        count = devices.read(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID);
    if (videoStream != -1)
        count = devices.read(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
    if (count == 0 || static_cast<UINT32>(index) > count)
    {
        CV_LOG_DEBUG(NULL, "Device " << index << " not found (total " << count << " devices)");
        return false;
    }
    _ComPtr<IMFAttributes> attr = getDefaultSourceConfig();
    _ComPtr<IMFSourceReaderCallback> cb = new SourceReaderCB();
    attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, cb.Get());
    _ComPtr<IMFMediaSource> src = devices.activateSource(index);
    if (!src.Get() || FAILED(MFCreateSourceReaderFromMediaSource(src.Get(), attr.Get(), &videoFileSource)))
    {
        CV_LOG_DEBUG(NULL, "Failed to create source reader");
        return false;
    }

    isOpen = true;
    device_status = true;
    camid = index;
    readCallback = cb;
    duration = 0;
    if (configureOutput())
    {
        frameStep = captureVideoFormat.getFrameStep();
    }
    if (isOpen && !openFinalize_(params))
    {
        close();
        return false;
    }
    if (isOpen)
    {
        if (audioStream != -1)
            if (!checkAudioProperties())
                return false;
    }

    return isOpen;
}

文章来源:https://blog.csdn.net/qq_30340349/article/details/135463864
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。