- android - 多次调用 OnPrimaryClipChangedListener
- android - 无法更新 RecyclerView 中的 TextView 字段
- android.database.CursorIndexOutOfBoundsException : Index 0 requested, 光标大小为 0
- android - 使用 AppCompat 时,我们是否需要明确指定其 UI 组件(Spinner、EditText)颜色
我正在使用 directshow 来控制相机设置并使用 open cv 我正在捕获图像..但我的问题是当我捕获图像时我给出的图像设置在 2 或 3 次捕获后更改并变为默认值值(value)……我的大学项目需要这个,我在下面给出了我的代码……我总是想要一个具有相同相机设置的图像……你给出的解决方案将非常有帮助,因为我是全新的这..
#include <stdio.h>
#include <atlstr.h>
#include <dshow.h>
#include <opencv2\highgui\highgui.hpp>
#include <opencv2\imgproc\imgproc.hpp>
#include <opencv2\core\core.hpp>
#include <opencv\cv.h>
#include <iostream>
#include <streams.h>
CFactoryTemplate g_Templates[1];
int g_cTemplates;
void setCameraMode(ICaptureGraphBuilder2 *pCaptureGraphBuilder2, IAMStreamConfig *pConfig, IBaseFilter *pDeviceFilter, HRESULT hr)
{
// Set res, frame rate, and color mode
hr = CoInitialize(0);
hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_CAPTURE, 0, pDeviceFilter, IID_IAMStreamConfig, (void**)&pConfig);
int iCount = 0, iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount, &iSize);
// Check the size to make sure we pass in the correct structure.
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
// Use the video capabilities structure.
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr))
{
if ((pmtConfig->majortype == MEDIATYPE_Video)) //&&
//(pmtConfig->subtype == MEDIASUBTYPE_RGB24))
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
// pVih contains the detailed format information.
LONG lWidth = pVih->bmiHeader.biWidth;
LONG lHeight = pVih->bmiHeader.biHeight;
pVih->bmiHeader.biWidth = 160;
pVih->bmiHeader.biHeight = 120;
pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
// pVih->AvgTimePerFrame = 10000000;
}
}
hr = pConfig->SetFormat(pmtConfig);
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
//DeleteMediaType(pmtConfig);
}
}
}
void setCameraControl(IBaseFilter *pDeviceFilter, HRESULT hr, int exposure, int focus)
{
// Query the capture filter for the IAMCameraControl interface.
IAMCameraControl *pCameraControl = 0;
hr = pDeviceFilter->QueryInterface(IID_IAMCameraControl, (void**)&pCameraControl);
if (FAILED(hr))
{
// The device does not support IAMCameraControl
}
else
{
long Min, Max, Step, Default, Flags, Val;
// Get the range and default values
hr = pCameraControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step, &Default, &Flags);
hr = pCameraControl->GetRange(CameraControl_Focus, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr))
{
hr = pCameraControl->Set(CameraControl_Exposure, -10, CameraControl_Flags_Manual );
// Min = -11, Max = 1, Step = 1
hr = pCameraControl->Set(CameraControl_Focus, focus, CameraControl_Flags_Manual );
}
}
}
void setCameraProperties(IBaseFilter *pDeviceFilter, HRESULT hr, int brightness, int backLightCompensation, int contrast, int saturation, int sharpness, int whiteBalance)
{
// Query the capture filter for the IAMVideoProcAmp interface.
IAMVideoProcAmp *pProcAmp = 0;
hr = pDeviceFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
if (FAILED(hr))
{
// The device does not support IAMVideoProcAmp
}
else
{
long Min, Max, Step, Default, Flags, Val;
// Get the range and default values
hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_BacklightCompensation, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_Contrast, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_Saturation, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_Sharpness, &Min, &Max, &Step, &Default, &Flags);
hr = pProcAmp->GetRange(VideoProcAmp_WhiteBalance, &Min, &Max, &Step, &Default, &Flags);
if (SUCCEEDED(hr))
{
hr = pProcAmp->Set(VideoProcAmp_Brightness,100, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_BacklightCompensation, 0, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_Contrast, 20 , VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_Saturation,50, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_Sharpness, 0, VideoProcAmp_Flags_Manual);
hr = pProcAmp->Set(VideoProcAmp_WhiteBalance, 0, VideoProcAmp_Flags_Manual);
}
}
}
//given in the example program
IPin *GetPin(IBaseFilter *pFilter, PIN_DIRECTION PinDir)
{
BOOL bFound = FALSE;
IEnumPins *pEnum;
IPin *pPin;
pFilter->EnumPins(&pEnum);
while(pEnum->Next(1, &pPin, 0) == S_OK)
{
PIN_DIRECTION PinDirThis;
pPin->QueryDirection(&PinDirThis);
if (bFound = (PinDir == PinDirThis))
break;
pPin->Release();
}
pEnum->Release();
return (bFound ? pPin : 0);
}
int main()
{
// for playing
IGraphBuilder *pGraphBuilder;
ICaptureGraphBuilder2 *pCaptureGraphBuilder2;
IMediaControl *pMediaControl = NULL;
IMediaEventEx *pEvent = NULL;
// multiple cameras
IBaseFilter *pDeviceFilter_0 = NULL;
IBaseFilter *m_pGrabber_0 = NULL;
ISampleGrabber *m_pGrabberSettings_0 = NULL;
// select camera
ICreateDevEnum *pCreateDevEnum = NULL;
IEnumMoniker *pEnumMoniker = NULL;
IMoniker *pMoniker = NULL;
ULONG nFetched = 0;
// initialize COM
CoInitialize(NULL);
// selecting a device
// Create CreateDevEnum to list device
std::string USB1 = "\\\\?\\usb#vid_045e&pid_076d&mi_00#7&1ba27d43&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\\global";
CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
// Create EnumMoniker to list VideoInputDevice
pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0);
if (pEnumMoniker == NULL) {
// this will be shown if there is no capture device
printf("no device\n");
return 0;
}
// reset EnumMoniker
pEnumMoniker->Reset();
// get each Moniker
while (pEnumMoniker->Next(1, &pMoniker, &nFetched) == S_OK)
{
IPropertyBag *pPropertyBag;
TCHAR devname[256];
TCHAR devpath[256];
// bind to IPropertyBag
pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropertyBag);
VARIANT var;
// get FriendlyName
var.vt = VT_BSTR;
pPropertyBag->Read(L"FriendlyName", &var, 0);
WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, devname, sizeof(devname), 0, 0);
VariantClear(&var);
// get DevicePath
// DevicePath : A unique string
var.vt = VT_BSTR;
pPropertyBag->Read(L"DevicePath", &var, 0);
WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, devpath, sizeof(devpath), 0, 0);
std::string devpathString = devpath;
pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&pDeviceFilter_0 );
pMoniker->Release();
pPropertyBag->Release();
if (pDeviceFilter_0 == NULL)
{
MessageBox(NULL, "No MS HD-5000 cameras found", "No cameras", MB_OK);
return 0;
}
}
// create FilterGraph and CaptureGraphBuilder2
CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC, IID_IGraphBuilder, (LPVOID *)&pGraphBuilder);
CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, (LPVOID *)&pCaptureGraphBuilder2);
HRESULT hr = CoInitialize(0);
IAMStreamConfig *pConfig = NULL;
setCameraMode(pCaptureGraphBuilder2, pConfig, pDeviceFilter_0, hr); // FPS, Res, color mode
setCameraControl(pDeviceFilter_0, hr, 10 , 12); // Focus, exposure
setCameraProperties(pDeviceFilter_0, hr, 180, 0, 4, 100, 0, 2800); // Brightness, saturation, etc
// set grabber properties
AM_MEDIA_TYPE mt;
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&m_pGrabber_0); // create ISampleGrabber
pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder); // set FilterGraph
pGraphBuilder->QueryInterface(IID_IMediaControl, (LPVOID *)&pMediaControl); // get MediaControl interface
m_pGrabber_0->QueryInterface(IID_ISampleGrabber, (void**)&m_pGrabberSettings_0);
ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
hr = m_pGrabberSettings_0->SetMediaType(&mt);
if (FAILED(hr))
{
return hr;
}
hr = m_pGrabberSettings_0->SetOneShot(FALSE);
hr = m_pGrabberSettings_0->SetBufferSamples(TRUE);
// build filter graph
pGraphBuilder->AddFilter(pDeviceFilter_0, L"Device Filter");
pGraphBuilder->AddFilter(m_pGrabber_0, L"Sample Grabber");
IPin* pSourceOut_0 = GetPin(pDeviceFilter_0, PINDIR_OUTPUT);
IPin* pGrabberIn_0 = GetPin(m_pGrabber_0, PINDIR_INPUT);
pGraphBuilder->Connect(pSourceOut_0, pGrabberIn_0);
/*
pMediaControl->Run();
long pBufferSize;
unsigned char* pBuffer_0 = 0;
hr = m_pGrabberSettings_0->GetCurrentBuffer(&pBufferSize, NULL);
if (FAILED(hr))
{
return 0;
}
pBuffer_0 = (BYTE*)CoTaskMemAlloc(pBufferSize);
if (!pBuffer_0)
{
hr = E_OUTOFMEMORY;
return 0;
}
long pBufferSize = 0;
unsigned char* pBuffer_0 = 0;
long Size=0;
hr = m_pGrabberSettings_0->GetCurrentBuffer(&Size, NULL);
if (Size != pBufferSize)
{
pBufferSize = Size;
if (pBuffer_0 != 0)
{
delete[] pBuffer_0;
}
pBuffer_0= new unsigned char[pBufferSize];
}
long pBufferSize = 425;
unsigned char* pBuffer_0 = 0;
pBuffer_0 = new unsigned char[pBufferSize];
// start playing
pMediaControl->Run();
while (1) {
if (MessageBox(NULL, "Grab frame?", "Grab?", MB_OKCANCEL) == 2)
{
break;
}
hr = m_pGrabberSettings_0->GetCurrentBuffer(&pBufferSize,(long*)pBuffer_0);
Cleanup:
// convert to OpenCV format
IplImage* img_0 = cvCreateImage(cvSize(160,120),IPL_DEPTH_8U,3);
for (int i = 0; i < pBufferSize ; i++)
{
img_0->imageData[i] = pBuffer_0[i];
}
cvFlip(img_0, NULL, 0);
// show
// cvNamedWindow("mainWin_0", CV_WINDOW_AUTOSIZE);
// cvMoveWindow("mainWin_0", 100, 100);
cvShowImage("mainWin_0", img_0 );
cvSaveImage("c:\\users\\senthil\\desktop\\img.png",img_0 );
//cvWaitKey(0);
cvReleaseImage(&img_0 );
}
*/
pMediaControl->Run();
cvNamedWindow("Camera_Output", 1); //Create window
CvCapture* capture = cvCaptureFromCAM(0); //Capture using any camera connected to your system
while(1)
{
//Create infinte loop for live streaming
if (MessageBox(NULL, "Grab frame?", "Grab?", MB_OKCANCEL) == 2)
{
break;
}
IplImage* frame = cvQueryFrame(capture); //Create image frames from capture
cvShowImage("Camera_Output", frame); //Show image frames on created window
cvSaveImage("c:\\users\\senthil\\desktop\\img1.png",frame);
// cv::Mat img(frame);
// cv::imwrite("c:\\users\\selvaraj\\desktop\\img.png",img);
}
//std::cout << "FPS: " << fps << std::endl;
//std::cout << "PROP_BRIGHTNESS: " << PROP_BRIGHTNESS << std::endl;
//WriteComPort("COM3","A");
cvReleaseCapture(&capture); //Release capture.
cvDestroyWindow("Camera_Output"); //Destroy Window */
// release
pMediaControl->Release();
pCaptureGraphBuilder2->Release();
pGraphBuilder->Release();
pEnumMoniker->Release();
pCreateDevEnum->Release();
// finalize COM
CoUninitialize();
return 0;
}
i tried using the sample grabber also but it is also not usefull...help me to solve this code..
最佳答案
您是在谈论这些设置吗:
setCameraMode(pCaptureGraphBuilder2, pConfig, pDeviceFilter_0, hr); // FPS, Res, color mode
setCameraControl(pDeviceFilter_0, hr, 10 , 12); // Focus, exposure
setCameraProperties(pDeviceFilter_0, hr, 180, 0, 4, 100, 0, 2800); // Brightness, saturation, etc
关于c++ - 使用directshow控制摄像头,使用open cv抓图,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/27895975/
OpenAL.org && 创意开发网站已关闭。我选择替代版本 OpenAL Soft .我很担心,因为在 OpenAL Soft 的二进制安装中我找不到 alut.h header 。 alut.h
我使用 Android Studio 已经有一段时间了,但有一天应用程序突然出错了。当我尝试单击我的目录以查找要导入或打开的文件时,应用程序变得异常缓慢并且根本没有响应。当我最终成功切换到存储我的文件
自 Firefox 4 以来,这似乎是一个奇怪的功能变化。在使用 window.open() 打开一个窗口后,当用鼠标中键单击打开的窗口中的链接时(或右键单击并选择“在新窗口中打开”选项卡') 导致链
我无法从 Open::URI 的 rdoc 中得知当我这样做时返回的是什么: result = open(url) URL 返回 XML,但我如何查看/解析 XML? 最佳答案 open 返回一个 I
经常开发asp但对于细致的说法,真实不太清楚,这里简单的介绍下。 一般情况下 读取数据都是用rs.open sql,conn,1,1 修改数据:rs.open sql,conn,1,3 删除
关于 pathlib 标准库中的模块,是 path.open() 方法只是内置 open() 的“包装器”功能? 最佳答案 如果您阅读了 source code的 pathlib.Path.open你
我想将 Open Liberty 运行时的语言更改为 en_US从 Eclipse IDE 中,但我不知道如何。 也尝试使用 JVM 参数的首选项来设置它,但它没有用。 -Duser.language
这是我所拥有的: 参数“opener”未在可能的函数调用参数中列出。这是 PyCharm 错误还是其他原因? PyCharm 2018.3.5 社区版,Windows 7 上的 Python 3.6.
我正在使用 Tinkerpop 的 GraphFactory.open(Configuration 配置) Java 命令来访问 Neo4j 数据库。 一个最低限度的工作示例是: Configurat
这个问题在这里已经有了答案: What is the python "with" statement designed for? (11 个答案) 关闭 7 年前。 我没有使用过 with 语句,但
我正在玩 python 3.5 中的 open 函数。我不明白 opener 参数(最后一个参数)在 open 函数中的用法。根据 python 文档:可以通过将可调用对象作为打开器传递来使用自定义打
关闭。此题需要details or clarity 。目前不接受答案。 想要改进这个问题吗?通过 editing this post 添加详细信息并澄清问题. 已关闭 5 年前。 Improve th
我试图用 Python 来做一些模拟 3D 声音的工作。我试图运行此代码(答案中提供):Python openAL 3D sound类似,两次都收到: ModuleNotFoundError: No
我一直认为 open 和 io.open 可以互换。 显然不是,如果我相信这个片段: import ctypes, io class POINT(ctypes.Structure): _fie
这个问题在这里已经有了答案: What's the difference between io.open() and os.open() on Python? (7 个答案) 关闭 9 年前。 我是
我正在尝试更好地了解 WCF 的一些内部工作原理。我已经做了相当多的环顾四周,但我无法找到关于 ChannelFactory.Open() 与 IClientChannel.Open() 相比的明确解
这个问题在这里已经有了答案: What is the python "with" statement designed for? (11 个答案) 关闭 7 年前。 我知道有很多关于在 python
CFSDN坚持开源创造价值,我们致力于搭建一个资源共享平台,让每一个IT人在这里找到属于你的精彩世界. 这篇CFSDN的博客文章adodb.recordset.open(rs.open)方法参数详解由
不久前我遇到了一个interesting security hole Link 看起来足够无害,但有一个漏洞,因为默认情况下,正在打开的页面允许打开的页面通过 window.opener 回调到它。有
这在我的应用程序上运行良好,但由于某种原因我无法让它在这里正常工作。无论如何,我的问题是,当我单击列表标题时,我想关闭之前打开的列表标题并仅保留事件的列表标题打开。目前它会打开我点击的所有内容,但也会
我是一名优秀的程序员,十分优秀!