1 | |
2 | //
|
3 | // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
|
4 | //
|
5 | // By downloading, copying, installing or using the software you agree to this license.
|
6 | // If you do not agree to this license, do not download, install,
|
7 | // copy or use the software.
|
8 | //
|
9 | //
|
10 | // Intel License Agreement
|
11 | // For Open Source Computer Vision Library
|
12 | //
|
13 | // Copyright (C) 2000, Intel Corporation, all rights reserved.
|
14 | // Third party copyrights are property of their respective owners.
|
15 | //
|
16 | // Redistribution and use in source and binary forms, with or without modification,
|
17 | // are permitted provided that the following conditions are met:
|
18 | //
|
19 | // * Redistribution's of source code must retain the above copyright notice,
|
20 | // this list of conditions and the following disclaimer.
|
21 | //
|
22 | // * Redistribution's in binary form must reproduce the above copyright notice,
|
23 | // this list of conditions and the following disclaimer in the documentation
|
24 | // and/or other materials provided with the distribution.
|
25 | //
|
26 | // * The name of Intel Corporation may not be used to endorse or promote products
|
27 | // derived from this software without specific prior written permission.
|
28 | //
|
29 | // This software is provided by the copyright holders and contributors "as is" and
|
30 | // any express or implied warranties, including, but not limited to, the implied
|
31 | // warranties of merchantability and fitness for a particular purpose are disclaimed.
|
32 | // In no event shall the Intel Corporation or contributors be liable for any direct,
|
33 | // indirect, incidental, special, exemplary, or consequential damages
|
34 | // (including, but not limited to, procurement of substitute goods or services;
|
35 | // loss of use, data, or profits; or business interruption) however caused
|
36 | // and on any theory of liability, whether in contract, strict liability,
|
37 | // or tort (including negligence or otherwise) arising in any way out of
|
38 | // the use of this software, even if advised of the possibility of such damage.
|
39 | //
|
40 | //M*/
|
41 |
|
42 | #include "precomp.hpp"
|
43 |
|
44 | #if (defined WIN32 || defined _WIN32) && defined HAVE_VIDEOINPUT
|
45 |
|
46 | |
47 | DirectShow-based Video Capturing module is based on
|
48 | videoInput library by Theodore Watson:
|
49 | http://muonics.net/school/spring05/videoInput/
|
50 |
|
51 | Below is the original copyright
|
52 | */
|
53 |
|
54 |
|
55 |
|
56 |
|
57 |
|
58 |
|
59 |
|
60 |
|
61 |
|
62 |
|
63 |
|
64 |
|
65 |
|
66 |
|
67 |
|
68 |
|
69 |
|
70 |
|
71 | |
72 |
|
73 | Thanks to:
|
74 |
|
75 | Dillip Kumar Kara for crossbar code.
|
76 | Zachary Lieberman for getting me into this stuff
|
77 | and for being so generous with time and code.
|
78 | The guys at Potion Design for helping me with VC++
|
79 | Josh Fisher for being a serious C++ nerd :)
|
80 | Golan Levin for helping me debug the strangest
|
81 | and slowest bug in the world!
|
82 |
|
83 | And all the people using this library who send in
|
84 | bugs, suggestions and improvements who keep me working on
|
85 | the next version - yeah thanks a lot ;)
|
86 |
|
87 | */
|
88 |
|
89 |
|
90 | #include "precomp.hpp"
|
91 |
|
92 | #if _MSC_VER >= 100
|
93 | #pragma warning(disable: 4995)
|
94 | #endif
|
95 |
|
96 | #include <tchar.h>
|
97 | #include <stdlib.h>
|
98 | #include <stdio.h>
|
99 | #include <math.h>
|
100 | #include <string.h>
|
101 | #include <wchar.h>
|
102 |
|
103 | #include <vector>
|
104 |
|
105 |
|
106 | #if _MSC_VER >= 1500
|
107 | #include "DShow.h"
|
108 | #include "strmif.h"
|
109 | #include "Aviriff.h"
|
110 | #include "dvdmedia.h"
|
111 | #include "bdaiface.h"
|
112 | #else
|
113 | #ifdef _MSC_VER
|
114 | #define __extension__
|
115 | typedef BOOL WINBOOL;
|
116 | #endif
|
117 | #include "dshow/dshow.h"
|
118 | #include "dshow/dvdmedia.h"
|
119 | #include "dshow/bdatypes.h"
|
120 |
|
121 | interface IEnumPIDMap : public IUnknown
|
122 | {
|
123 | public:
|
124 | virtual HRESULT STDMETHODCALLTYPE Next(
|
125 | ULONG cRequest,
|
126 | PID_MAP *pPIDMap,
|
127 | ULONG *pcReceived) = 0;
|
128 |
|
129 | virtual HRESULT STDMETHODCALLTYPE Skip(
|
130 | ULONG cRecords) = 0;
|
131 |
|
132 | virtual HRESULT STDMETHODCALLTYPE Reset( void) = 0;
|
133 |
|
134 | virtual HRESULT STDMETHODCALLTYPE Clone(
|
135 | IEnumPIDMap **ppIEnumPIDMap) = 0;
|
136 | };
|
137 |
|
138 | interface IMPEG2PIDMap : public IUnknown
|
139 | {
|
140 | virtual HRESULT STDMETHODCALLTYPE MapPID(
|
141 | ULONG culPID,
|
142 | ULONG *pulPID,
|
143 | MEDIA_SAMPLE_CONTENT MediaSampleContent) = 0;
|
144 |
|
145 | virtual HRESULT STDMETHODCALLTYPE UnmapPID(
|
146 | ULONG culPID,
|
147 | ULONG *pulPID) = 0;
|
148 |
|
149 | virtual HRESULT STDMETHODCALLTYPE EnumPIDMap(
|
150 | IEnumPIDMap **pIEnumPIDMap) = 0;
|
151 | };
|
152 |
|
153 | #endif
|
154 |
|
155 |
|
156 | #include <process.h>
|
157 |
|
158 |
|
159 | #ifndef _WIN32_WINNT
|
160 | #define _WIN32_WINNT 0x400
|
161 | #endif
|
162 |
|
163 |
|
164 | |
165 | MEDIASUBTYPE_I420 : TGUID ='{30323449-0000-0010-8000-00AA00389B71}';
|
166 | MEDIASUBTYPE_Y800 : TGUID ='{30303859-0000-0010-8000-00AA00389B71}';
|
167 | MEDIASUBTYPE_Y8 : TGUID ='{20203859-0000-0010-8000-00AA00389B71}';
|
168 | MEDIASUBTYPE_Y160 : TGUID ='{30363159-0000-0010-8000-00AA00389B71}';
|
169 | MEDIASUBTYPE_YV16 : TGUID ='{32315659-0000-0010-8000-00AA00389B71}';
|
170 | MEDIASUBTYPE_Y422 : TGUID ='{32323459-0000-0010-8000-00AA00389B71}';
|
171 | MEDIASUBTYPE_GREY : TGUID ='{59455247-0000-0010-8000-00AA00389B71}';
|
172 | */
|
173 |
|
174 | #include <initguid.h>
|
175 |
|
176 | DEFINE_GUID(MEDIASUBTYPE_GREY, 0x59455247, 0x0000, 0x0010, 0x80, 0x00,
|
177 | 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
|
178 | DEFINE_GUID(MEDIASUBTYPE_Y8, 0x20203859, 0x0000, 0x0010, 0x80, 0x00,
|
179 | 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
|
180 | DEFINE_GUID(MEDIASUBTYPE_Y800, 0x30303859, 0x0000, 0x0010, 0x80, 0x00,
|
181 | 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
|
182 |
|
183 | DEFINE_GUID(CLSID_CaptureGraphBuilder2,0xbf87b6e1,0x8c27,0x11d0,0xb3,0xf0,0x00,0xaa,0x00,0x37,0x61,0xc5);
|
184 | DEFINE_GUID(CLSID_FilterGraph,0xe436ebb3,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
185 | DEFINE_GUID(CLSID_NullRenderer,0xc1f400a4,0x3f08,0x11d3,0x9f,0x0b,0x00,0x60,0x08,0x03,0x9e,0x37);
|
186 | DEFINE_GUID(CLSID_SampleGrabber,0xc1f400a0,0x3f08,0x11d3,0x9f,0x0b,0x00,0x60,0x08,0x03,0x9e,0x37);
|
187 | DEFINE_GUID(CLSID_SystemDeviceEnum,0x62be5d10,0x60eb,0x11d0,0xbd,0x3b,0x00,0xa0,0xc9,0x11,0xce,0x86);
|
188 | DEFINE_GUID(CLSID_VideoInputDeviceCategory,0x860bb310,0x5d01,0x11d0,0xbd,0x3b,0x00,0xa0,0xc9,0x11,0xce,0x86);
|
189 | DEFINE_GUID(FORMAT_VideoInfo,0x05589f80,0xc356,0x11ce,0xbf,0x01,0x00,0xaa,0x00,0x55,0x59,0x5a);
|
190 | DEFINE_GUID(IID_IAMAnalogVideoDecoder,0xc6e13350,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
|
191 | DEFINE_GUID(IID_IAMCameraControl,0xc6e13370,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
|
192 | DEFINE_GUID(IID_IAMCrossbar,0xc6e13380,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
|
193 | DEFINE_GUID(IID_IAMStreamConfig,0xc6e13340,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
|
194 | DEFINE_GUID(IID_IAMVideoProcAmp,0xc6e13360,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
|
195 | DEFINE_GUID(IID_IBaseFilter,0x56a86895,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
196 | DEFINE_GUID(IID_ICaptureGraphBuilder2,0x93e5a4e0,0x2d50,0x11d2,0xab,0xfa,0x00,0xa0,0xc9,0xc6,0xe3,0x8d);
|
197 | DEFINE_GUID(IID_ICreateDevEnum,0x29840822,0x5b84,0x11d0,0xbd,0x3b,0x00,0xa0,0xc9,0x11,0xce,0x86);
|
198 | DEFINE_GUID(IID_IGraphBuilder,0x56a868a9,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
199 | DEFINE_GUID(IID_IMPEG2PIDMap,0xafb6c2a1,0x2c41,0x11d3,0x8a,0x60,0x00,0x00,0xf8,0x1e,0x0e,0x4a);
|
200 | DEFINE_GUID(IID_IMediaControl,0x56a868b1,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
201 | DEFINE_GUID(IID_IMediaFilter,0x56a86899,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
202 | DEFINE_GUID(IID_ISampleGrabber,0x6b652fff,0x11fe,0x4fce,0x92,0xad,0x02,0x66,0xb5,0xd7,0xc7,0x8f);
|
203 | DEFINE_GUID(LOOK_UPSTREAM_ONLY,0xac798be0,0x98e3,0x11d1,0xb3,0xf1,0x00,0xaa,0x00,0x37,0x61,0xc5);
|
204 | DEFINE_GUID(MEDIASUBTYPE_AYUV,0x56555941,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
205 | DEFINE_GUID(MEDIASUBTYPE_IYUV,0x56555949,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
206 | DEFINE_GUID(MEDIASUBTYPE_RGB24,0xe436eb7d,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
207 | DEFINE_GUID(MEDIASUBTYPE_RGB32,0xe436eb7e,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
208 | DEFINE_GUID(MEDIASUBTYPE_RGB555,0xe436eb7c,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
209 | DEFINE_GUID(MEDIASUBTYPE_RGB565,0xe436eb7b,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
|
210 | DEFINE_GUID(MEDIASUBTYPE_UYVY,0x59565955,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
211 | DEFINE_GUID(MEDIASUBTYPE_Y211,0x31313259,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
212 | DEFINE_GUID(MEDIASUBTYPE_Y411,0x31313459,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
213 | DEFINE_GUID(MEDIASUBTYPE_Y41P,0x50313459,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
214 | DEFINE_GUID(MEDIASUBTYPE_YUY2,0x32595559,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
215 | DEFINE_GUID(MEDIASUBTYPE_YUYV,0x56595559,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
216 | DEFINE_GUID(MEDIASUBTYPE_YV12,0x32315659,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
217 | DEFINE_GUID(MEDIASUBTYPE_YVU9,0x39555659,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
218 | DEFINE_GUID(MEDIASUBTYPE_YVYU,0x55595659,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
219 | DEFINE_GUID(MEDIASUBTYPE_MJPG,0x47504A4D, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
|
220 | DEFINE_GUID(MEDIATYPE_Interleaved,0x73766169,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
221 | DEFINE_GUID(MEDIATYPE_Video,0x73646976,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
|
222 | DEFINE_GUID(PIN_CATEGORY_CAPTURE,0xfb6c4281,0x0353,0x11d1,0x90,0x5f,0x00,0x00,0xc0,0xcc,0x16,0xba);
|
223 | DEFINE_GUID(PIN_CATEGORY_PREVIEW,0xfb6c4282,0x0353,0x11d1,0x90,0x5f,0x00,0x00,0xc0,0xcc,0x16,0xba);
|
224 |
|
225 | interface ISampleGrabberCB : public IUnknown
|
226 | {
|
227 | virtual HRESULT STDMETHODCALLTYPE SampleCB(
|
228 | double SampleTime,
|
229 | IMediaSample *pSample) = 0;
|
230 |
|
231 | virtual HRESULT STDMETHODCALLTYPE BufferCB(
|
232 | double SampleTime,
|
233 | BYTE *pBuffer,
|
234 | LONG BufferLen) = 0;
|
235 |
|
236 | };
|
237 |
|
238 | interface ISampleGrabber : public IUnknown
|
239 | {
|
240 | virtual HRESULT STDMETHODCALLTYPE SetOneShot(
|
241 | BOOL OneShot) = 0;
|
242 |
|
243 | virtual HRESULT STDMETHODCALLTYPE SetMediaType(
|
244 | const AM_MEDIA_TYPE *pType) = 0;
|
245 |
|
246 | virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType(
|
247 | AM_MEDIA_TYPE *pType) = 0;
|
248 |
|
249 | virtual HRESULT STDMETHODCALLTYPE SetBufferSamples(
|
250 | BOOL BufferThem) = 0;
|
251 |
|
252 | virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer(
|
253 | LONG *pBufferSize,
|
254 | LONG *pBuffer) = 0;
|
255 |
|
256 | virtual HRESULT STDMETHODCALLTYPE GetCurrentSample(
|
257 | IMediaSample **ppSample) = 0;
|
258 |
|
259 | virtual HRESULT STDMETHODCALLTYPE SetCallback(
|
260 | ISampleGrabberCB *pCallback,
|
261 | LONG WhichMethodToCallback) = 0;
|
262 |
|
263 | };
|
264 |
|
265 | #ifndef HEADER
|
266 | #define HEADER(p) (&(((VIDEOINFOHEADER*)(p))->bmiHeader))
|
267 | #endif
|
268 |
|
269 |
|
270 | |
271 | //create a videoInput object
|
272 | videoInput VI;
|
273 |
|
274 | //Prints out a list of available devices and returns num of devices found
|
275 | int numDevices = VI.listDevices();
|
276 |
|
277 | int device1 = 0; //this could be any deviceID that shows up in listDevices
|
278 | int device2 = 1; //this could be any deviceID that shows up in listDevices
|
279 |
|
280 | //if you want to capture at a different frame rate (default is 30)
|
281 | //specify it here, you are not guaranteed to get this fps though.
|
282 | //VI.setIdealFramerate(dev, 60);
|
283 |
|
284 | //setup the first device - there are a number of options:
|
285 |
|
286 | VI.setupDevice(device1); //setup the first device with the default settings
|
287 | //VI.setupDevice(device1, VI_COMPOSITE); //or setup device with specific connection type
|
288 | //VI.setupDevice(device1, 320, 240); //or setup device with specified video size
|
289 | //VI.setupDevice(device1, 320, 240, VI_COMPOSITE); //or setup device with video size and connection type
|
290 |
|
291 | //VI.setFormat(device1, VI_NTSC_M); //if your card doesn't remember what format it should be
|
292 | //call this with the appropriate format listed above
|
293 | //NOTE: must be called after setupDevice!
|
294 |
|
295 | //optionally setup a second (or third, fourth ...) device - same options as above
|
296 | VI.setupDevice(device2);
|
297 |
|
298 | //As requested width and height can not always be accomodated
|
299 | //make sure to check the size once the device is setup
|
300 |
|
301 | int width = VI.getWidth(device1);
|
302 | int height = VI.getHeight(device1);
|
303 | int size = VI.getSize(device1);
|
304 |
|
305 | unsigned char * yourBuffer1 = new unsigned char[size];
|
306 | unsigned char * yourBuffer2 = new unsigned char[size];
|
307 |
|
308 | //to get the data from the device first check if the data is new
|
309 | if(VI.isFrameNew(device1)){
|
310 | VI.getPixels(device1, yourBuffer1, false, false); //fills pixels as a BGR (for openCV) unsigned char array - no flipping
|
311 | VI.getPixels(device1, yourBuffer2, true, true); //fills pixels as a RGB (for openGL) unsigned char array - flipping!
|
312 | }
|
313 |
|
314 | //same applies to device2 etc
|
315 |
|
316 | //to get a settings dialog for the device
|
317 | VI.showSettingsWindow(device1);
|
318 |
|
319 |
|
320 | //Shut down devices properly
|
321 | VI.stopDevice(device1);
|
322 | VI.stopDevice(device2);
|
323 | */
|
324 |
|
325 |
|
326 |
|
327 |
|
328 |
|
329 |
|
330 |
|
331 |
|
332 | static bool verbose = true;
|
333 |
|
334 |
|
335 |
|
336 |
|
337 |
|
338 |
|
339 |
|
340 | #define VI_VERSION 0.1995
|
341 | #define VI_MAX_CAMERAS 20
|
342 | #define VI_NUM_TYPES 19
|
343 | #define VI_NUM_FORMATS 18
|
344 |
|
345 |
|
346 | #define VI_COMPOSITE 0
|
347 | #define VI_S_VIDEO 1
|
348 | #define VI_TUNER 2
|
349 | #define VI_USB 3
|
350 | #define VI_1394 4
|
351 |
|
352 |
|
353 | #define VI_NTSC_M 0
|
354 | #define VI_PAL_B 1
|
355 | #define VI_PAL_D 2
|
356 | #define VI_PAL_G 3
|
357 | #define VI_PAL_H 4
|
358 | #define VI_PAL_I 5
|
359 | #define VI_PAL_M 6
|
360 | #define VI_PAL_N 7
|
361 | #define VI_PAL_NC 8
|
362 | #define VI_SECAM_B 9
|
363 | #define VI_SECAM_D 10
|
364 | #define VI_SECAM_G 11
|
365 | #define VI_SECAM_H 12
|
366 | #define VI_SECAM_K 13
|
367 | #define VI_SECAM_K1 14
|
368 | #define VI_SECAM_L 15
|
369 | #define VI_NTSC_M_J 16
|
370 | #define VI_NTSC_433 17
|
371 |
|
372 |
|
373 |
|
374 | struct ICaptureGraphBuilder2;
|
375 | struct IGraphBuilder;
|
376 | struct IBaseFilter;
|
377 | struct IAMCrossbar;
|
378 | struct IMediaControl;
|
379 | struct ISampleGrabber;
|
380 | struct IMediaEventEx;
|
381 | struct IAMStreamConfig;
|
382 | struct _AMMediaType;
|
383 | class SampleGrabberCallback;
|
384 | typedef _AMMediaType AM_MEDIA_TYPE;
|
385 |
|
386 |
|
387 |
|
388 | static int comInitCount = 0;
|
389 |
|
390 |
|
391 |
|
392 |
|
393 | class videoDevice{
|
394 |
|
395 |
|
396 | public:
|
397 |
|
398 | videoDevice();
|
399 | void setSize(int w, int h);
|
400 | void NukeDownstream(IBaseFilter *pBF);
|
401 | void destroyGraph();
|
402 | ~videoDevice();
|
403 |
|
404 | int videoSize;
|
405 | int width;
|
406 | int height;
|
407 |
|
408 | int tryWidth;
|
409 | int tryHeight;
|
410 | GUID tryVideoType;
|
411 |
|
412 | ICaptureGraphBuilder2 *pCaptureGraph;
|
413 | IGraphBuilder *pGraph;
|
414 | IMediaControl *pControl;
|
415 | IBaseFilter *pVideoInputFilter;
|
416 | IBaseFilter *pGrabberF;
|
417 | IBaseFilter * pDestFilter;
|
418 | IAMStreamConfig *streamConf;
|
419 | ISampleGrabber * pGrabber;
|
420 | AM_MEDIA_TYPE * pAmMediaType;
|
421 |
|
422 | IMediaEventEx * pMediaEvent;
|
423 |
|
424 | GUID videoType;
|
425 | long formatType;
|
426 |
|
427 | SampleGrabberCallback * sgCallback;
|
428 |
|
429 | bool tryDiffSize;
|
430 | bool useCrossbar;
|
431 | bool readyToCapture;
|
432 | bool sizeSet;
|
433 | bool setupStarted;
|
434 | bool specificFormat;
|
435 | bool autoReconnect;
|
436 | int nFramesForReconnect;
|
437 | unsigned long nFramesRunning;
|
438 | int connection;
|
439 | int storeConn;
|
440 | int myID;
|
441 | long requestedFrameTime;
|
442 |
|
443 | char nDeviceName[255];
|
444 | WCHAR wDeviceName[255];
|
445 |
|
446 | unsigned char * pixels;
|
447 | char * pBuffer;
|
448 |
|
449 | };
|
450 |
|
451 |
|
452 |
|
453 |
|
454 |
|
455 |
|
456 |
|
457 |
|
458 | class videoInput{
|
459 |
|
460 | public:
|
461 | videoInput();
|
462 | ~videoInput();
|
463 |
|
464 |
|
465 | static void setVerbose(bool _verbose);
|
466 |
|
467 |
|
468 | static int listDevices(bool silent = false);
|
469 |
|
470 |
|
471 | static char * getDeviceName(int deviceID);
|
472 |
|
473 |
|
474 | void setUseCallback(bool useCallback);
|
475 |
|
476 |
|
477 |
|
478 | void setIdealFramerate(int deviceID, int idealFramerate);
|
479 |
|
480 |
|
481 |
|
482 |
|
483 | void setAutoReconnectOnFreeze(int deviceNumber, bool doReconnect, int numMissedFramesBeforeReconnect);
|
484 |
|
485 |
|
486 | bool setupDevice(int deviceID);
|
487 | bool setupDevice(int deviceID, int w, int h);
|
488 | bool setupDeviceFourcc(int deviceID, int w, int h,int fourcc);
|
489 |
|
490 |
|
491 |
|
492 | bool setupDevice(int deviceID, int connection);
|
493 | bool setupDevice(int deviceID, int w, int h, int connection);
|
494 |
|
495 | bool setFourcc(int deviceNumber, int fourcc);
|
496 |
|
497 |
|
498 |
|
499 |
|
500 |
|
501 |
|
502 | bool setFormat(int deviceNumber, int format);
|
503 |
|
504 |
|
505 | bool isFrameNew(int deviceID);
|
506 |
|
507 | bool isDeviceSetup(int deviceID);
|
508 |
|
509 |
|
510 | unsigned char * getPixels(int deviceID, bool flipRedAndBlue = true, bool flipImage = false);
|
511 |
|
512 |
|
513 | bool getPixels(int id, unsigned char * pixels, bool flipRedAndBlue = true, bool flipImage = false);
|
514 |
|
515 |
|
516 |
|
517 | void showSettingsWindow(int deviceID);
|
518 |
|
519 |
|
520 |
|
521 | bool setVideoSettingFilter(int deviceID, long Property, long lValue, long Flags = NULL, bool useDefaultValue = false);
|
522 | bool setVideoSettingFilterPct(int deviceID, long Property, float pctValue, long Flags = NULL);
|
523 | bool getVideoSettingFilter(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue);
|
524 |
|
525 | bool setVideoSettingCamera(int deviceID, long Property, long lValue, long Flags = NULL, bool useDefaultValue = false);
|
526 | bool setVideoSettingCameraPct(int deviceID, long Property, float pctValue, long Flags = NULL);
|
527 | bool getVideoSettingCamera(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue);
|
528 |
|
529 |
|
530 |
|
531 |
|
532 | int getWidth(int deviceID);
|
533 | int getHeight(int deviceID);
|
534 | int getSize(int deviceID);
|
535 | int getFourcc(int deviceID);
|
536 | double getFPS(int deviceID);
|
537 |
|
538 |
|
539 | void stopDevice(int deviceID);
|
540 |
|
541 |
|
542 | bool restartDevice(int deviceID);
|
543 |
|
544 |
|
545 | int devicesFound;
|
546 |
|
547 |
|
548 | int getVideoPropertyFromCV(int cv_property);
|
549 | int getCameraPropertyFromCV(int cv_property);
|
550 |
|
551 | private:
|
552 | void setPhyCon(int deviceID, int conn);
|
553 | void setAttemptCaptureSize(int deviceID, int w, int h,GUID mediaType=MEDIASUBTYPE_RGB24);
|
554 | bool setup(int deviceID);
|
555 | void processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip);
|
556 | int start(int deviceID, videoDevice * VD);
|
557 | int getDeviceCount();
|
558 | void getMediaSubtypeAsString(GUID type, char * typeAsString);
|
559 | GUID *getMediaSubtypeFromFourcc(int fourcc);
|
560 | int getFourccFromMediaSubtype(GUID type);
|
561 |
|
562 | void getVideoPropertyAsString(int prop, char * propertyAsString);
|
563 | void getCameraPropertyAsString(int prop, char * propertyAsString);
|
564 |
|
565 | HRESULT getDevice(IBaseFilter **pSrcFilter, int deviceID, WCHAR * wDeviceName, char * nDeviceName);
|
566 | static HRESULT ShowFilterPropertyPages(IBaseFilter *pFilter);
|
567 | static HRESULT ShowStreamPropertyPages(IAMStreamConfig *pStream);
|
568 |
|
569 | HRESULT SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath);
|
570 | HRESULT routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter **pVidInFilter, int conType, GUID captureMode);
|
571 |
|
572 |
|
573 | static bool comInit();
|
574 | static bool comUnInit();
|
575 |
|
576 | int connection;
|
577 | int callbackSetCount;
|
578 | bool bCallback;
|
579 |
|
580 | GUID CAPTURE_MODE;
|
581 |
|
582 |
|
583 | GUID MEDIASUBTYPE_Y800;
|
584 | GUID MEDIASUBTYPE_Y8;
|
585 | GUID MEDIASUBTYPE_GREY;
|
586 |
|
587 | videoDevice * VDList[VI_MAX_CAMERAS];
|
588 | GUID mediaSubtypes[VI_NUM_TYPES];
|
589 | long formatTypes[VI_NUM_FORMATS];
|
590 |
|
591 | static void __cdecl basicThread(void * objPtr);
|
592 |
|
593 | static char deviceNames[VI_MAX_CAMERAS][255];
|
594 |
|
595 | };
|
596 |
|
597 |
|
598 |
|
599 | void MyFreeMediaType(AM_MEDIA_TYPE& mt){
|
600 | if (mt.cbFormat != 0)
|
601 | {
|
602 | CoTaskMemFree((PVOID)mt.pbFormat);
|
603 | mt.cbFormat = 0;
|
604 | mt.pbFormat = NULL;
|
605 | }
|
606 | if (mt.pUnk != NULL)
|
607 | {
|
608 |
|
609 | mt.pUnk->Release();
|
610 | mt.pUnk = NULL;
|
611 | }
|
612 | }
|
613 |
|
614 | void MyDeleteMediaType(AM_MEDIA_TYPE *pmt)
|
615 | {
|
616 | if (pmt != NULL)
|
617 | {
|
618 | MyFreeMediaType(*pmt);
|
619 | CoTaskMemFree(pmt);
|
620 | }
|
621 | }
|
622 |
|
623 |
|
624 |
|
625 |
|
626 | class SampleGrabberCallback : public ISampleGrabberCB{
|
627 | public:
|
628 |
|
629 |
|
630 | SampleGrabberCallback(){
|
631 | InitializeCriticalSection(&critSection);
|
632 | freezeCheck = 0;
|
633 |
|
634 |
|
635 | bufferSetup = false;
|
636 | newFrame = false;
|
637 | latestBufferLength = 0;
|
638 |
|
639 | hEvent = CreateEvent(NULL, true, false, NULL);
|
640 | }
|
641 |
|
642 |
|
643 |
|
644 | ~SampleGrabberCallback(){
|
645 | ptrBuffer = NULL;
|
646 | DeleteCriticalSection(&critSection);
|
647 | CloseHandle(hEvent);
|
648 | if(bufferSetup){
|
649 | delete pixels;
|
650 | }
|
651 | }
|
652 |
|
653 |
|
654 |
|
655 | bool setupBuffer(int numBytesIn){
|
656 | if(bufferSetup){
|
657 | return false;
|
658 | }else{
|
659 | numBytes = numBytesIn;
|
660 | pixels = new unsigned char[numBytes];
|
661 | bufferSetup = true;
|
662 | newFrame = false;
|
663 | latestBufferLength = 0;
|
664 | }
|
665 | return true;
|
666 | }
|
667 |
|
668 |
|
669 |
|
670 | STDMETHODIMP_(ULONG) AddRef() { return 1; }
|
671 | STDMETHODIMP_(ULONG) Release() { return 2; }
|
672 |
|
673 |
|
674 |
|
675 | STDMETHODIMP QueryInterface(REFIID, void **ppvObject){
|
676 | *ppvObject = static_cast<ISampleGrabberCB*>(this);
|
677 | return S_OK;
|
678 | }
|
679 |
|
680 |
|
681 |
|
682 |
|
683 | STDMETHODIMP SampleCB(double , IMediaSample *pSample){
|
684 | if(WaitForSingleObject(hEvent, 0) == WAIT_OBJECT_0) return S_OK;
|
685 |
|
686 | HRESULT hr = pSample->GetPointer(&ptrBuffer);
|
687 |
|
688 | if(hr == S_OK){
|
689 | latestBufferLength = pSample->GetActualDataLength();
|
690 | if(latestBufferLength == numBytes){
|
691 | EnterCriticalSection(&critSection);
|
692 | memcpy(pixels, ptrBuffer, latestBufferLength);
|
693 | newFrame = true;
|
694 | freezeCheck = 1;
|
695 | LeaveCriticalSection(&critSection);
|
696 | SetEvent(hEvent);
|
697 | }else{
|
698 | printf("ERROR: SampleCB() - buffer sizes do not match\n");
|
699 | }
|
700 | }
|
701 |
|
702 | return S_OK;
|
703 | }
|
704 |
|
705 |
|
706 |
|
707 | STDMETHODIMP BufferCB(double, BYTE *, long){
|
708 | return E_NOTIMPL;
|
709 | }
|
710 |
|
711 | int freezeCheck;
|
712 |
|
713 | int latestBufferLength;
|
714 | int numBytes;
|
715 | bool newFrame;
|
716 | bool bufferSetup;
|
717 | unsigned char * pixels;
|
718 | unsigned char * ptrBuffer;
|
719 | CRITICAL_SECTION critSection;
|
720 | HANDLE hEvent;
|
721 | };
|
722 |
|
723 |
|
724 |
|
725 |
|
726 |
|
727 |
|
728 |
|
729 |
|
730 |
|
731 | videoDevice::videoDevice(){
|
732 |
|
733 | pCaptureGraph = NULL;
|
734 | pGraph = NULL;
|
735 | pControl = NULL;
|
736 | pVideoInputFilter = NULL;
|
737 | pGrabber = NULL;
|
738 | pDestFilter = NULL;
|
739 | pGrabberF = NULL;
|
740 | pMediaEvent = NULL;
|
741 | streamConf = NULL;
|
742 | pAmMediaType = NULL;
|
743 |
|
744 |
|
745 | sgCallback = new SampleGrabberCallback();
|
746 | sgCallback->newFrame = false;
|
747 |
|
748 |
|
749 | videoType = MEDIASUBTYPE_RGB24;
|
750 | connection = PhysConn_Video_Composite;
|
751 | storeConn = 0;
|
752 |
|
753 | videoSize = 0;
|
754 | width = 0;
|
755 | height = 0;
|
756 |
|
757 | tryWidth = 0;
|
758 | tryHeight = 0;
|
759 | tryVideoType = MEDIASUBTYPE_RGB24;
|
760 | nFramesForReconnect= 10000;
|
761 | nFramesRunning = 0;
|
762 | myID = -1;
|
763 |
|
764 | tryDiffSize = false;
|
765 | useCrossbar = false;
|
766 | readyToCapture = false;
|
767 | sizeSet = false;
|
768 | setupStarted = false;
|
769 | specificFormat = false;
|
770 | autoReconnect = false;
|
771 | requestedFrameTime = -1;
|
772 |
|
773 | memset(wDeviceName, 0, sizeof(WCHAR) * 255);
|
774 | memset(nDeviceName, 0, sizeof(char) * 255);
|
775 |
|
776 | }
|
777 |
|
778 |
|
779 |
|
780 |
|
781 |
|
782 |
|
783 |
|
784 | void videoDevice::setSize(int w, int h){
|
785 | if(sizeSet){
|
786 | if(verbose)printf("SETUP: Error device size should not be set more than once \n");
|
787 | }
|
788 | else
|
789 | {
|
790 | width = w;
|
791 | height = h;
|
792 | videoSize = w*h*3;
|
793 | sizeSet = true;
|
794 | pixels = new unsigned char[videoSize];
|
795 | pBuffer = new char[videoSize];
|
796 |
|
797 | memset(pixels, 0 , videoSize);
|
798 | sgCallback->setupBuffer(videoSize);
|
799 |
|
800 | }
|
801 | }
|
802 |
|
803 |
|
804 |
|
805 |
|
806 |
|
807 |
|
808 |
|
809 | void videoDevice::NukeDownstream(IBaseFilter *pBF){
|
810 | IPin *pP, *pTo;
|
811 | ULONG u;
|
812 | IEnumPins *pins = NULL;
|
813 | PIN_INFO pininfo;
|
814 | HRESULT hr = pBF->EnumPins(&pins);
|
815 | pins->Reset();
|
816 | while (hr == NOERROR)
|
817 | {
|
818 | hr = pins->Next(1, &pP, &u);
|
819 | if (hr == S_OK && pP)
|
820 | {
|
821 | pP->ConnectedTo(&pTo);
|
822 | if (pTo)
|
823 | {
|
824 | hr = pTo->QueryPinInfo(&pininfo);
|
825 | if (hr == NOERROR)
|
826 | {
|
827 | if (pininfo.dir == PINDIR_INPUT)
|
828 | {
|
829 | NukeDownstream(pininfo.pFilter);
|
830 | pGraph->Disconnect(pTo);
|
831 | pGraph->Disconnect(pP);
|
832 | pGraph->RemoveFilter(pininfo.pFilter);
|
833 | }
|
834 | pininfo.pFilter->Release();
|
835 | pininfo.pFilter = NULL;
|
836 | }
|
837 | pTo->Release();
|
838 | }
|
839 | pP->Release();
|
840 | }
|
841 | }
|
842 | if (pins) pins->Release();
|
843 | }
|
844 |
|
845 |
|
846 |
|
847 |
|
848 |
|
849 |
|
850 | void videoDevice::destroyGraph(){
|
851 | HRESULT hr = NULL;
|
852 |
|
853 |
|
854 |
|
855 | int i = 0;
|
856 | while (hr == NOERROR)
|
857 | {
|
858 | IEnumFilters * pEnum = 0;
|
859 | ULONG cFetched;
|
860 |
|
861 |
|
862 |
|
863 | hr = pGraph->EnumFilters(&pEnum);
|
864 | if (FAILED(hr)) { if(verbose)printf("SETUP: pGraph->EnumFilters() failed. \n"); return; }
|
865 |
|
866 | IBaseFilter * pFilter = NULL;
|
867 | if (pEnum->Next(1, &pFilter, &cFetched) == S_OK)
|
868 | {
|
869 | FILTER_INFO FilterInfo={0};
|
870 | memset(&FilterInfo, 0, sizeof(FilterInfo));
|
871 | hr = pFilter->QueryFilterInfo(&FilterInfo);
|
872 | FilterInfo.pGraph->Release();
|
873 |
|
874 | int count = 0;
|
875 | char buffer[255];
|
876 | memset(buffer, 0, 255 * sizeof(char));
|
877 |
|
878 | while( FilterInfo.achName[count] != 0x00 )
|
879 | {
|
880 | buffer[count] = (char)FilterInfo.achName[count];
|
881 | count++;
|
882 | }
|
883 |
|
884 | if(verbose)printf("SETUP: removing filter %s...\n", buffer);
|
885 | hr = pGraph->RemoveFilter(pFilter);
|
886 | if (FAILED(hr)) { if(verbose)printf("SETUP: pGraph->RemoveFilter() failed. \n"); return; }
|
887 | if(verbose)printf("SETUP: filter removed %s \n",buffer);
|
888 |
|
889 | pFilter->Release();
|
890 | pFilter = NULL;
|
891 | }
|
892 | else break;
|
893 | pEnum->Release();
|
894 | pEnum = NULL;
|
895 | i++;
|
896 | }
|
897 |
|
898 | return;
|
899 | }
|
900 |
|
901 |
|
902 |
|
903 |
|
904 |
|
905 |
|
906 |
|
907 |
|
908 | videoDevice::~videoDevice(){
|
909 |
|
910 | if(setupStarted){ if(verbose)printf("\nSETUP: Disconnecting device %i\n", myID); }
|
911 | else{
|
912 | if(sgCallback){
|
913 | sgCallback->Release();
|
914 | delete sgCallback;
|
915 | }
|
916 | return;
|
917 | }
|
918 |
|
919 | HRESULT HR = NOERROR;
|
920 |
|
921 |
|
922 | if( (sgCallback) && (pGrabber) )
|
923 | {
|
924 | pGrabber->SetCallback(NULL, 1);
|
925 | if(verbose)printf("SETUP: freeing Grabber Callback\n");
|
926 | sgCallback->Release();
|
927 |
|
928 |
|
929 | if(sizeSet){
|
930 | delete[] pixels;
|
931 | delete[] pBuffer;
|
932 | }
|
933 |
|
934 | delete sgCallback;
|
935 | }
|
936 |
|
937 |
|
938 | if( (pControl) )
|
939 | {
|
940 | HR = pControl->Pause();
|
941 | if (FAILED(HR)) if(verbose)printf("ERROR - Could not pause pControl\n");
|
942 |
|
943 | HR = pControl->Stop();
|
944 | if (FAILED(HR)) if(verbose)printf("ERROR - Could not stop pControl\n");
|
945 | }
|
946 |
|
947 |
|
948 | if( (pVideoInputFilter) )NukeDownstream(pVideoInputFilter);
|
949 |
|
950 |
|
951 | if( (pDestFilter) ){ if(verbose)printf("SETUP: freeing Renderer \n");
|
952 | (pDestFilter)->Release();
|
953 | (pDestFilter) = 0;
|
954 | }
|
955 | if( (pVideoInputFilter) ){ if(verbose)printf("SETUP: freeing Capture Source \n");
|
956 | (pVideoInputFilter)->Release();
|
957 | (pVideoInputFilter) = 0;
|
958 | }
|
959 | if( (pGrabberF) ){ if(verbose)printf("SETUP: freeing Grabber Filter \n");
|
960 | (pGrabberF)->Release();
|
961 | (pGrabberF) = 0;
|
962 | }
|
963 | if( (pGrabber) ){ if(verbose)printf("SETUP: freeing Grabber \n");
|
964 | (pGrabber)->Release();
|
965 | (pGrabber) = 0;
|
966 | }
|
967 | if( (pControl) ){ if(verbose)printf("SETUP: freeing Control \n");
|
968 | (pControl)->Release();
|
969 | (pControl) = 0;
|
970 | }
|
971 | if( (pMediaEvent) ){ if(verbose)printf("SETUP: freeing Media Event \n");
|
972 | (pMediaEvent)->Release();
|
973 | (pMediaEvent) = 0;
|
974 | }
|
975 | if( (streamConf) ){ if(verbose)printf("SETUP: freeing Stream \n");
|
976 | (streamConf)->Release();
|
977 | (streamConf) = 0;
|
978 | }
|
979 |
|
980 | if( (pAmMediaType) ){ if(verbose)printf("SETUP: freeing Media Type \n");
|
981 | MyDeleteMediaType(pAmMediaType);
|
982 | }
|
983 |
|
984 | if((pMediaEvent)){
|
985 | if(verbose)printf("SETUP: freeing Media Event \n");
|
986 | (pMediaEvent)->Release();
|
987 | (pMediaEvent) = 0;
|
988 | }
|
989 |
|
990 |
|
991 | if( (pGraph) )destroyGraph();
|
992 |
|
993 |
|
994 | if( (pCaptureGraph) ){ if(verbose)printf("SETUP: freeing Capture Graph \n");
|
995 | (pCaptureGraph)->Release();
|
996 | (pCaptureGraph) = 0;
|
997 | }
|
998 | if( (pGraph) ){ if(verbose)printf("SETUP: freeing Main Graph \n");
|
999 | (pGraph)->Release();
|
1000 | (pGraph) = 0;
|
1001 | }
|
1002 |
|
1003 |
|
1004 | delete pDestFilter;
|
1005 | delete pVideoInputFilter;
|
1006 | delete pGrabberF;
|
1007 | delete pGrabber;
|
1008 | delete pControl;
|
1009 | delete streamConf;
|
1010 | delete pMediaEvent;
|
1011 | delete pCaptureGraph;
|
1012 | delete pGraph;
|
1013 |
|
1014 | if(verbose)printf("SETUP: Device %i disconnected and freed\n\n",myID);
|
1015 | }
|
1016 |
|
1017 |
|
1018 |
|
1019 |
|
1020 |
|
1021 |
|
1022 |
|
1023 |
|
1024 |
|
1025 |
|
1026 |
|
1027 | videoInput::videoInput(){
|
1028 |
|
1029 | comInit();
|
1030 |
|
1031 | devicesFound = 0;
|
1032 | callbackSetCount = 0;
|
1033 | bCallback = true;
|
1034 |
|
1035 |
|
1036 | for(int i=0; i<VI_MAX_CAMERAS; i++) VDList[i] = new videoDevice();
|
1037 |
|
1038 | if(verbose)printf("\n***** VIDEOINPUT LIBRARY - %2.04f - TFW07 *****\n\n",VI_VERSION);
|
1039 |
|
1040 |
|
1041 |
|
1042 |
|
1043 |
|
1044 |
|
1045 |
|
1046 |
|
1047 |
|
1048 | mediaSubtypes[0] = MEDIASUBTYPE_RGB24;
|
1049 | mediaSubtypes[1] = MEDIASUBTYPE_RGB32;
|
1050 | mediaSubtypes[2] = MEDIASUBTYPE_RGB555;
|
1051 | mediaSubtypes[3] = MEDIASUBTYPE_RGB565;
|
1052 | mediaSubtypes[4] = MEDIASUBTYPE_YUY2;
|
1053 | mediaSubtypes[5] = MEDIASUBTYPE_YVYU;
|
1054 | mediaSubtypes[6] = MEDIASUBTYPE_YUYV;
|
1055 | mediaSubtypes[7] = MEDIASUBTYPE_IYUV;
|
1056 | mediaSubtypes[8] = MEDIASUBTYPE_UYVY;
|
1057 | mediaSubtypes[9] = MEDIASUBTYPE_YV12;
|
1058 | mediaSubtypes[10] = MEDIASUBTYPE_YVU9;
|
1059 | mediaSubtypes[11] = MEDIASUBTYPE_Y411;
|
1060 | mediaSubtypes[12] = MEDIASUBTYPE_Y41P;
|
1061 | mediaSubtypes[13] = MEDIASUBTYPE_Y211;
|
1062 | mediaSubtypes[14] = MEDIASUBTYPE_AYUV;
|
1063 | mediaSubtypes[15] = MEDIASUBTYPE_MJPG;
|
1064 |
|
1065 |
|
1066 | mediaSubtypes[16] = MEDIASUBTYPE_Y800;
|
1067 | mediaSubtypes[17] = MEDIASUBTYPE_Y8;
|
1068 | mediaSubtypes[18] = MEDIASUBTYPE_GREY;
|
1069 |
|
1070 |
|
1071 | formatTypes[VI_NTSC_M] = AnalogVideo_NTSC_M;
|
1072 | formatTypes[VI_NTSC_M_J] = AnalogVideo_NTSC_M_J;
|
1073 | formatTypes[VI_NTSC_433] = AnalogVideo_NTSC_433;
|
1074 |
|
1075 | formatTypes[VI_PAL_B] = AnalogVideo_PAL_B;
|
1076 | formatTypes[VI_PAL_D] = AnalogVideo_PAL_D;
|
1077 | formatTypes[VI_PAL_G] = AnalogVideo_PAL_G;
|
1078 | formatTypes[VI_PAL_H] = AnalogVideo_PAL_H;
|
1079 | formatTypes[VI_PAL_I] = AnalogVideo_PAL_I;
|
1080 | formatTypes[VI_PAL_M] = AnalogVideo_PAL_M;
|
1081 | formatTypes[VI_PAL_N] = AnalogVideo_PAL_N;
|
1082 | formatTypes[VI_PAL_NC] = AnalogVideo_PAL_N_COMBO;
|
1083 |
|
1084 | formatTypes[VI_SECAM_B] = AnalogVideo_SECAM_B;
|
1085 | formatTypes[VI_SECAM_D] = AnalogVideo_SECAM_D;
|
1086 | formatTypes[VI_SECAM_G] = AnalogVideo_SECAM_G;
|
1087 | formatTypes[VI_SECAM_H] = AnalogVideo_SECAM_H;
|
1088 | formatTypes[VI_SECAM_K] = AnalogVideo_SECAM_K;
|
1089 | formatTypes[VI_SECAM_K1] = AnalogVideo_SECAM_K1;
|
1090 | formatTypes[VI_SECAM_L] = AnalogVideo_SECAM_L;
|
1091 |
|
1092 |
|
1093 |
|
1094 | }
|
1095 |
|
1096 |
|
1097 |
|
1098 |
|
1099 |
|
1100 |
|
1101 |
|
1102 | void videoInput::setVerbose(bool _verbose){
|
1103 | verbose = _verbose;
|
1104 | }
|
1105 |
|
1106 |
|
1107 |
|
1108 |
|
1109 |
|
1110 |
|
1111 | void videoInput::setUseCallback(bool useCallback){
|
1112 | if(callbackSetCount == 0){
|
1113 | bCallback = useCallback;
|
1114 | callbackSetCount = 1;
|
1115 | }else{
|
1116 | printf("ERROR: setUseCallback can only be called before setup\n");
|
1117 | }
|
1118 | }
|
1119 |
|
1120 |
|
1121 |
|
1122 |
|
1123 |
|
1124 |
|
1125 | void videoInput::setIdealFramerate(int deviceNumber, int idealFramerate){
|
1126 | if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return;
|
1127 |
|
1128 | if( idealFramerate > 0 ){
|
1129 | VDList[deviceNumber]->requestedFrameTime = (unsigned long)(10000000 / idealFramerate);
|
1130 | }
|
1131 | }
|
1132 |
|
1133 |
|
1134 |
|
1135 |
|
1136 |
|
1137 |
|
1138 |
|
1139 | void videoInput::setAutoReconnectOnFreeze(int deviceNumber, bool doReconnect, int numMissedFramesBeforeReconnect){
|
1140 | if(deviceNumber >= VI_MAX_CAMERAS) return;
|
1141 |
|
1142 | VDList[deviceNumber]->autoReconnect = doReconnect;
|
1143 | VDList[deviceNumber]->nFramesForReconnect = numMissedFramesBeforeReconnect;
|
1144 |
|
1145 | }
|
1146 |
|
1147 |
|
1148 |
|
1149 |
|
1150 |
|
1151 |
|
1152 |
|
1153 | bool videoInput::setupDevice(int deviceNumber){
|
1154 | if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
|
1155 |
|
1156 | if(setup(deviceNumber))return true;
|
1157 | return false;
|
1158 | }
|
1159 |
|
1160 |
|
1161 |
|
1162 |
|
1163 |
|
1164 |
|
1165 |
|
1166 | bool videoInput::setupDevice(int deviceNumber, int connection){
|
1167 | if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
|
1168 |
|
1169 | setPhyCon(deviceNumber, connection);
|
1170 | if(setup(deviceNumber))return true;
|
1171 | return false;
|
1172 | }
|
1173 |
|
1174 |
|
1175 |
|
1176 |
|
1177 |
|
1178 |
|
1179 |
|
1180 | bool videoInput::setupDevice(int deviceNumber, int w, int h){
|
1181 | if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
|
1182 |
|
1183 | setAttemptCaptureSize(deviceNumber,w,h);
|
1184 | if(setup(deviceNumber))return true;
|
1185 | return false;
|
1186 | }
|
1187 |
|
1188 |
|
1189 |
|
1190 |
|
1191 |
|
1192 |
|
1193 |
|
1194 |
|
1195 | bool videoInput::setupDeviceFourcc(int deviceNumber, int w, int h,int fourcc){
|
1196 | if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
|
1197 |
|
1198 | if ( fourcc > 0 ) {
|
1199 | GUID *mediaType = getMediaSubtypeFromFourcc(fourcc);
|
1200 | if ( mediaType ) {
|
1201 | setAttemptCaptureSize(deviceNumber,w,h,*mediaType);
|
1202 | }
|
1203 | } else {
|
1204 | setAttemptCaptureSize(deviceNumber,w,h);
|
1205 | }
|
1206 | if(setup(deviceNumber))return true;
|
1207 | return false;
|
1208 | }
|
1209 |
|
1210 |
|
1211 |
|
1212 |
|
1213 |
|
1214 |
|
1215 |
|
1216 | bool videoInput::setupDevice(int deviceNumber, int w, int h, int connection){
|
1217 | if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
|
1218 |
|
1219 | setAttemptCaptureSize(deviceNumber,w,h);
|
1220 | setPhyCon(deviceNumber, connection);
|
1221 | if(setup(deviceNumber))return true;
|
1222 | return false;
|
1223 | }
|
1224 |
|
1225 |
|
1226 |
|
1227 |
|
1228 |
|
1229 |
|
1230 |
|
1231 |
|
1232 |
|
1233 | bool videoInput::setFormat(int deviceNumber, int format){
|
1234 | if(deviceNumber >= VI_MAX_CAMERAS || !VDList[deviceNumber]->readyToCapture) return false;
|
1235 |
|
1236 | bool returnVal = false;
|
1237 |
|
1238 | if(format >= 0 && format < VI_NUM_FORMATS){
|
1239 | VDList[deviceNumber]->formatType = formatTypes[format];
|
1240 | VDList[deviceNumber]->specificFormat = true;
|
1241 |
|
1242 | if(VDList[deviceNumber]->specificFormat){
|
1243 |
|
1244 | HRESULT hr = getDevice(&VDList[deviceNumber]->pVideoInputFilter, deviceNumber, VDList[deviceNumber]->wDeviceName, VDList[deviceNumber]->nDeviceName);
|
1245 | if(hr != S_OK){
|
1246 | return false;
|
1247 | }
|
1248 |
|
1249 | IAMAnalogVideoDecoder *pVideoDec = NULL;
|
1250 | hr = VDList[deviceNumber]->pCaptureGraph->FindInterface(NULL, &MEDIATYPE_Video, VDList[deviceNumber]->pVideoInputFilter, IID_IAMAnalogVideoDecoder, (void **)&pVideoDec);
|
1251 |
|
1252 |
|
1253 |
|
1254 | if(VDList[deviceNumber]->pVideoInputFilter)VDList[deviceNumber]->pVideoInputFilter->Release();
|
1255 | if(VDList[deviceNumber]->pVideoInputFilter)VDList[deviceNumber]->pVideoInputFilter = NULL;
|
1256 |
|
1257 | if(FAILED(hr)){
|
1258 | printf("SETUP: couldn't set requested format\n");
|
1259 | }else{
|
1260 | long lValue = 0;
|
1261 | hr = pVideoDec->get_AvailableTVFormats(&lValue);
|
1262 | if( SUCCEEDED(hr) && (lValue & VDList[deviceNumber]->formatType) )
|
1263 | {
|
1264 | hr = pVideoDec->put_TVFormat(VDList[deviceNumber]->formatType);
|
1265 | if( FAILED(hr) ){
|
1266 | printf("SETUP: couldn't set requested format\n");
|
1267 | }else{
|
1268 | returnVal = true;
|
1269 | }
|
1270 | }
|
1271 |
|
1272 | pVideoDec->Release();
|
1273 | pVideoDec = NULL;
|
1274 | }
|
1275 | }
|
1276 | }
|
1277 |
|
1278 | return returnVal;
|
1279 | }
|
1280 |
|
1281 |
|
1282 |
|
1283 |
|
1284 |
|
1285 |
|
1286 | char videoInput::deviceNames[VI_MAX_CAMERAS][255]={{0}};
|
1287 |
|
1288 | char * videoInput::getDeviceName(int deviceID){
|
1289 | if( deviceID >= VI_MAX_CAMERAS ){
|
1290 | return NULL;
|
1291 | }
|
1292 | return deviceNames[deviceID];
|
1293 | }
|
1294 |
|
1295 |
|
1296 |
|
1297 |
|
1298 |
|
1299 |
|
1300 |
|
1301 | int videoInput::listDevices(bool silent){
|
1302 |
|
1303 |
|
1304 | comInit();
|
1305 |
|
1306 | if(!silent)printf("\nVIDEOINPUT SPY MODE!\n\n");
|
1307 |
|
1308 |
|
1309 | ICreateDevEnum *pDevEnum = NULL;
|
1310 | IEnumMoniker *pEnum = NULL;
|
1311 | int deviceCounter = 0;
|
1312 |
|
1313 | HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
|
1314 | CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
|
1315 | reinterpret_cast<void**>(&pDevEnum));
|
1316 |
|
1317 |
|
1318 | if (SUCCEEDED(hr))
|
1319 | {
|
1320 |
|
1321 | hr = pDevEnum->CreateClassEnumerator(
|
1322 | CLSID_VideoInputDeviceCategory,
|
1323 | &pEnum, 0);
|
1324 |
|
1325 | if(hr == S_OK){
|
1326 |
|
1327 | if(!silent)printf("SETUP: Looking For Capture Devices\n");
|
1328 | IMoniker *pMoniker = NULL;
|
1329 |
|
1330 | while (pEnum->Next(1, &pMoniker, NULL) == S_OK){
|
1331 |
|
1332 | IPropertyBag *pPropBag;
|
1333 | hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
|
1334 | (void**)(&pPropBag));
|
1335 |
|
1336 | if (FAILED(hr)){
|
1337 | pMoniker->Release();
|
1338 | continue;
|
1339 | }
|
1340 |
|
1341 |
|
1342 |
|
1343 | VARIANT varName;
|
1344 | VariantInit(&varName);
|
1345 | hr = pPropBag->Read(L"Description", &varName, 0);
|
1346 |
|
1347 | if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &varName, 0);
|
1348 |
|
1349 | if (SUCCEEDED(hr)){
|
1350 |
|
1351 | hr = pPropBag->Read(L"FriendlyName", &varName, 0);
|
1352 |
|
1353 | int count = 0;
|
1354 | int maxLen = sizeof(deviceNames[0])/sizeof(deviceNames[0][0]) - 2;
|
1355 | while( varName.bstrVal[count] != 0x00 && count < maxLen) {
|
1356 | deviceNames[deviceCounter][count] = (char)varName.bstrVal[count];
|
1357 | count++;
|
1358 | }
|
1359 | deviceNames[deviceCounter][count] = 0;
|
1360 |
|
1361 | if(!silent)printf("SETUP: %i) %s \n",deviceCounter, deviceNames[deviceCounter]);
|
1362 | }
|
1363 |
|
1364 | pPropBag->Release();
|
1365 | pPropBag = NULL;
|
1366 |
|
1367 | pMoniker->Release();
|
1368 | pMoniker = NULL;
|
1369 |
|
1370 | deviceCounter++;
|
1371 | }
|
1372 |
|
1373 | pDevEnum->Release();
|
1374 | pDevEnum = NULL;
|
1375 |
|
1376 | pEnum->Release();
|
1377 | pEnum = NULL;
|
1378 | }
|
1379 |
|
1380 | if(!silent)printf("SETUP: %i Device(s) found\n\n", deviceCounter);
|
1381 | }
|
1382 |
|
1383 | comUnInit();
|
1384 |
|
1385 | return deviceCounter;
|
1386 | }
|
1387 |
|
1388 |
|
1389 |
|
1390 |
|
1391 |
|
1392 |
|
1393 |
|
1394 | int videoInput::getWidth(int id){
|
1395 |
|
1396 | if(isDeviceSetup(id))
|
1397 | {
|
1398 | return VDList[id] ->width;
|
1399 | }
|
1400 |
|
1401 | return 0;
|
1402 |
|
1403 | }
|
1404 |
|
1405 |
|
1406 |
|
1407 |
|
1408 |
|
1409 |
|
1410 |
|
1411 | int videoInput::getHeight(int id){
|
1412 |
|
1413 | if(isDeviceSetup(id))
|
1414 | {
|
1415 | return VDList[id] ->height;
|
1416 | }
|
1417 |
|
1418 | return 0;
|
1419 |
|
1420 | }
|
1421 |
|
1422 |
|
1423 |
|
1424 |
|
1425 |
|
1426 | int videoInput::getFourcc(int id){
|
1427 |
|
1428 | if(isDeviceSetup(id))
|
1429 | {
|
1430 | return getFourccFromMediaSubtype(VDList[id]->videoType);
|
1431 | }
|
1432 |
|
1433 | return 0;
|
1434 |
|
1435 | }
|
1436 |
|
1437 | double videoInput::getFPS(int id){
|
1438 |
|
1439 | if(isDeviceSetup(id))
|
1440 | {
|
1441 | double frameTime= VDList[id]->requestedFrameTime;
|
1442 | if (frameTime>0) {
|
1443 | return (10000000.0 / frameTime);
|
1444 | }
|
1445 | }
|
1446 |
|
1447 | return 0;
|
1448 |
|
1449 | }
|
1450 |
|
1451 |
|
1452 |
|
1453 |
|
1454 |
|
1455 |
|
1456 |
|
1457 | int videoInput::getSize(int id){
|
1458 |
|
1459 | if(isDeviceSetup(id))
|
1460 | {
|
1461 | return VDList[id] ->videoSize;
|
1462 | }
|
1463 |
|
1464 | return 0;
|
1465 |
|
1466 | }
|
1467 |
|
1468 |
|
1469 |
|
1470 |
|
1471 |
|
1472 |
|
1473 | bool videoInput::getPixels(int id, unsigned char * dstBuffer, bool flipRedAndBlue, bool flipImage){
|
1474 |
|
1475 | bool success = false;
|
1476 |
|
1477 | if(isDeviceSetup(id)){
|
1478 | if(bCallback){
|
1479 |
|
1480 |
|
1481 | DWORD result = WaitForSingleObject(VDList[id]->sgCallback->hEvent, 1000);
|
1482 | if( result != WAIT_OBJECT_0) return false;
|
1483 |
|
1484 |
|
1485 | EnterCriticalSection(&VDList[id]->sgCallback->critSection);
|
1486 |
|
1487 | unsigned char * src = VDList[id]->sgCallback->pixels;
|
1488 | unsigned char * dst = dstBuffer;
|
1489 | int height = VDList[id]->height;
|
1490 | int width = VDList[id]->width;
|
1491 |
|
1492 | processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
|
1493 | VDList[id]->sgCallback->newFrame = false;
|
1494 |
|
1495 | LeaveCriticalSection(&VDList[id]->sgCallback->critSection);
|
1496 |
|
1497 | ResetEvent(VDList[id]->sgCallback->hEvent);
|
1498 |
|
1499 | success = true;
|
1500 |
|
1501 | }
|
1502 | else{
|
1503 |
|
1504 | long bufferSize = VDList[id]->videoSize;
|
1505 | HRESULT hr = VDList[id]->pGrabber->GetCurrentBuffer(&bufferSize, (long *)VDList[id]->pBuffer);
|
1506 | if(hr==S_OK){
|
1507 | int numBytes = VDList[id]->videoSize;
|
1508 | if (numBytes == bufferSize){
|
1509 |
|
1510 | unsigned char * src = (unsigned char * )VDList[id]->pBuffer;
|
1511 | unsigned char * dst = dstBuffer;
|
1512 | int height = VDList[id]->height;
|
1513 | int width = VDList[id]->width;
|
1514 |
|
1515 | processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
|
1516 | success = true;
|
1517 | }else{
|
1518 | if(verbose)printf("ERROR: GetPixels() - bufferSizes do not match!\n");
|
1519 | }
|
1520 | }else{
|
1521 | if(verbose)printf("ERROR: GetPixels() - Unable to grab frame for device %i\n", id);
|
1522 | }
|
1523 | }
|
1524 | }
|
1525 |
|
1526 | return success;
|
1527 | }
|
1528 |
|
1529 |
|
1530 |
|
1531 |
|
1532 |
|
1533 | unsigned char * videoInput::getPixels(int id, bool flipRedAndBlue, bool flipImage){
|
1534 |
|
1535 | if(isDeviceSetup(id)){
|
1536 | getPixels(id, VDList[id]->pixels, flipRedAndBlue, flipImage);
|
1537 | }
|
1538 |
|
1539 | return VDList[id]->pixels;
|
1540 | }
|
1541 |
|
1542 |
|
1543 |
|
1544 |
|
1545 |
|
1546 |
|
1547 |
|
1548 | bool videoInput::isFrameNew(int id){
|
1549 | if(!isDeviceSetup(id)) return false;
|
1550 | if(!bCallback)return true;
|
1551 |
|
1552 | bool result = false;
|
1553 | bool freeze = false;
|
1554 |
|
1555 |
|
1556 | EnterCriticalSection(&VDList[id]->sgCallback->critSection);
|
1557 | result = VDList[id]->sgCallback->newFrame;
|
1558 |
|
1559 |
|
1560 | if(VDList[id]->nFramesRunning > 400 && VDList[id]->sgCallback->freezeCheck > VDList[id]->nFramesForReconnect ){
|
1561 | freeze = true;
|
1562 | }
|
1563 |
|
1564 |
|
1565 |
|
1566 |
|
1567 | VDList[id]->sgCallback->freezeCheck++;
|
1568 | LeaveCriticalSection(&VDList[id]->sgCallback->critSection);
|
1569 |
|
1570 | VDList[id]->nFramesRunning++;
|
1571 |
|
1572 | if(freeze && VDList[id]->autoReconnect){
|
1573 | if(verbose)printf("ERROR: Device seems frozen - attempting to reconnect\n");
|
1574 | if( !restartDevice(VDList[id]->myID) ){
|
1575 | if(verbose)printf("ERROR: Unable to reconnect to device\n");
|
1576 | }else{
|
1577 | if(verbose)printf("SUCCESS: Able to reconnect to device\n");
|
1578 | }
|
1579 | }
|
1580 |
|
1581 | return result;
|
1582 | }
|
1583 |
|
1584 |
|
1585 |
|
1586 |
|
1587 |
|
1588 |
|
1589 |
|
1590 | bool videoInput::isDeviceSetup(int id){
|
1591 |
|
1592 | if(id<devicesFound && VDList[id]->readyToCapture)return true;
|
1593 | else return false;
|
1594 |
|
1595 | }
|
1596 |
|
1597 |
|
1598 |
|
1599 |
|
1600 |
|
1601 |
|
1602 |
|
1603 |
|
1604 | void __cdecl videoInput::basicThread(void * objPtr){
|
1605 |
|
1606 |
|
1607 |
|
1608 | videoDevice * vd = *( (videoDevice **)(objPtr) );
|
1609 | ShowFilterPropertyPages(vd->pVideoInputFilter);
|
1610 |
|
1611 |
|
1612 |
|
1613 |
|
1614 | if(vd->pVideoInputFilter)vd->pVideoInputFilter->Release();
|
1615 | if(vd->pVideoInputFilter)vd->pVideoInputFilter = NULL;
|
1616 |
|
1617 | return;
|
1618 | }
|
1619 |
|
1620 | void videoInput::showSettingsWindow(int id){
|
1621 |
|
1622 | if(isDeviceSetup(id)){
|
1623 | HANDLE myTempThread;
|
1624 |
|
1625 |
|
1626 |
|
1627 |
|
1628 | HRESULT hr = getDevice(&VDList[id]->pVideoInputFilter, id, VDList[id]->wDeviceName, VDList[id]->nDeviceName);
|
1629 | if(hr == S_OK){
|
1630 | myTempThread = (HANDLE)_beginthread(basicThread, 0, (void *)&VDList[id]);
|
1631 | }
|
1632 | }
|
1633 | }
|
1634 |
|
1635 |
|
1636 |
|
1637 | bool videoInput::getVideoSettingFilter(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue){
|
1638 | if( !isDeviceSetup(deviceID) )return false;
|
1639 |
|
1640 | HRESULT hr;
|
1641 |
|
1642 |
|
1643 | videoDevice * VD = VDList[deviceID];
|
1644 |
|
1645 | hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
|
1646 | if (FAILED(hr)){
|
1647 | printf("setVideoSetting - getDevice Error\n");
|
1648 | return false;
|
1649 | }
|
1650 |
|
1651 | IAMVideoProcAmp *pAMVideoProcAmp = NULL;
|
1652 |
|
1653 | hr = VD->pVideoInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pAMVideoProcAmp);
|
1654 | if(FAILED(hr)){
|
1655 | printf("setVideoSetting - QueryInterface Error\n");
|
1656 | if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
|
1657 | if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
|
1658 | return false;
|
1659 | }
|
1660 |
|
1661 | char propStr[16];
|
1662 | getVideoPropertyAsString(Property,propStr);
|
1663 |
|
1664 | if (verbose) printf("Setting video setting %s.\n", propStr);
|
1665 |
|
1666 | pAMVideoProcAmp->GetRange(Property, &min, &max, &SteppingDelta, &defaultValue, &flags);
|
1667 | if (verbose) printf("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, min, max, SteppingDelta, defaultValue, flags);
|
1668 | pAMVideoProcAmp->Get(Property, ¤tValue, &flags);
|
1669 |
|
1670 | if(pAMVideoProcAmp)pAMVideoProcAmp->Release();
|
1671 | if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
|
1672 | if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
|
1673 |
|
1674 | return true;
|
1675 |
|
1676 | }
|
1677 |
|
1678 |
|
1679 |
|
1680 | bool videoInput::setVideoSettingFilterPct(int deviceID, long Property, float pctValue, long Flags){
|
1681 | if( !isDeviceSetup(deviceID) )return false;
|
1682 |
|
1683 | long min, max, currentValue, flags, defaultValue, stepAmnt;
|
1684 |
|
1685 | if( !getVideoSettingFilter(deviceID, Property, min, max, stepAmnt, currentValue, flags, defaultValue) )return false;
|
1686 |
|
1687 | if(pctValue > 1.0)pctValue = 1.0;
|
1688 | else if(pctValue < 0)pctValue = 0.0;
|
1689 |
|
1690 | float range = (float)max - (float)min;
|
1691 | if(range <= 0)return false;
|
1692 | if(stepAmnt == 0) return false;
|
1693 |
|
1694 | long value = (long)( (float)min + range * pctValue );
|
1695 | long rasterValue = value;
|
1696 |
|
1697 |
|
1698 |
|
1699 | if( range == stepAmnt ){
|
1700 | if( pctValue < 0.5)rasterValue = min;
|
1701 | else rasterValue = max;
|
1702 | }else{
|
1703 |
|
1704 | long mod = value % stepAmnt;
|
1705 | float halfStep = (float)stepAmnt * 0.5f;
|
1706 | if( mod < halfStep ) rasterValue -= mod;
|
1707 | else rasterValue += stepAmnt - mod;
|
1708 | printf("RASTER - pctValue is %f - value is %i - step is %i - mod is %i - rasterValue is %i\n", pctValue, value, stepAmnt, mod, rasterValue);
|
1709 | }
|
1710 |
|
1711 | return setVideoSettingFilter(deviceID, Property, rasterValue, Flags, false);
|
1712 | }
|
1713 |
|
1714 |
|
1715 |
|
1716 | bool videoInput::setVideoSettingFilter(int deviceID, long Property, long lValue, long Flags, bool useDefaultValue){
|
1717 | if( !isDeviceSetup(deviceID) )return false;
|
1718 |
|
1719 | HRESULT hr;
|
1720 |
|
1721 |
|
1722 | char propStr[16];
|
1723 | getVideoPropertyAsString(Property,propStr);
|
1724 |
|
1725 | videoDevice * VD = VDList[deviceID];
|
1726 |
|
1727 | hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
|
1728 | if (FAILED(hr)){
|
1729 | printf("setVideoSetting - getDevice Error\n");
|
1730 | return false;
|
1731 | }
|
1732 |
|
1733 | IAMVideoProcAmp *pAMVideoProcAmp = NULL;
|
1734 |
|
1735 | hr = VD->pVideoInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pAMVideoProcAmp);
|
1736 | if(FAILED(hr)){
|
1737 | printf("setVideoSetting - QueryInterface Error\n");
|
1738 | if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
|
1739 | if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
|
1740 | return false;
|
1741 | }
|
1742 |
|
1743 | if (verbose) printf("Setting video setting %s.\n", propStr);
|
1744 | long CurrVal, Min, Max, SteppingDelta, Default, CapsFlags, AvailableCapsFlags = 0;
|
1745 |
|
1746 |
|
1747 | pAMVideoProcAmp->GetRange(Property, &Min, &Max, &SteppingDelta, &Default, &AvailableCapsFlags);
|
1748 | if (verbose) printf("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, Min, Max, SteppingDelta, Default, AvailableCapsFlags);
|
1749 | pAMVideoProcAmp->Get(Property, &CurrVal, &CapsFlags);
|
1750 |
|
1751 | if (verbose) printf("Current value: %ld Flags %ld (%s)\n", CurrVal, CapsFlags, (CapsFlags == 1 ? "Auto" : (CapsFlags == 2 ? "Manual" : "Unknown")));
|
1752 |
|
1753 | if (useDefaultValue) {
|
1754 | pAMVideoProcAmp->Set(Property, Default, VideoProcAmp_Flags_Auto);
|
1755 | }
|
1756 | else{
|
1757 |
|
1758 | pAMVideoProcAmp->Set(Property, lValue, Flags);
|
1759 | }
|
1760 |
|
1761 | if(pAMVideoProcAmp)pAMVideoProcAmp->Release();
|
1762 | if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
|
1763 | if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
|
1764 |
|
1765 | return true;
|
1766 |
|
1767 | }
|
1768 |
|
1769 |
|
1770 | bool videoInput::setVideoSettingCameraPct(int deviceID, long Property, float pctValue, long Flags){
|
1771 | if( !isDeviceSetup(deviceID) )return false;
|
1772 |
|
1773 | long min, max, currentValue, flags, defaultValue, stepAmnt;
|
1774 |
|
1775 | if( !getVideoSettingCamera(deviceID, Property, min, max, stepAmnt, currentValue, flags, defaultValue) )return false;
|
1776 |
|
1777 | if(pctValue > 1.0)pctValue = 1.0;
|
1778 | else if(pctValue < 0)pctValue = 0.0;
|
1779 |
|
1780 | float range = (float)max - (float)min;
|
1781 | if(range <= 0)return false;
|
1782 | if(stepAmnt == 0) return false;
|
1783 |
|
1784 | long value = (long)( (float)min + range * pctValue );
|
1785 | long rasterValue = value;
|
1786 |
|
1787 |
|
1788 |
|
1789 | if( range == stepAmnt ){
|
1790 | if( pctValue < 0.5)rasterValue = min;
|
1791 | else rasterValue = max;
|
1792 | }else{
|
1793 |
|
1794 | long mod = value % stepAmnt;
|
1795 | float halfStep = (float)stepAmnt * 0.5f;
|
1796 | if( mod < halfStep ) rasterValue -= mod;
|
1797 | else rasterValue += stepAmnt - mod;
|
1798 | printf("RASTER - pctValue is %f - value is %i - step is %i - mod is %i - rasterValue is %i\n", pctValue, value, stepAmnt, mod, rasterValue);
|
1799 | }
|
1800 |
|
1801 | return setVideoSettingCamera(deviceID, Property, rasterValue, Flags, false);
|
1802 | }
|
1803 |
|
1804 |
|
1805 | bool videoInput::setVideoSettingCamera(int deviceID, long Property, long lValue, long Flags, bool useDefaultValue){
|
1806 | IAMCameraControl *pIAMCameraControl;
|
1807 | if(isDeviceSetup(deviceID))
|
1808 | {
|
1809 | HRESULT hr;
|
1810 | hr = getDevice(&VDList[deviceID]->pVideoInputFilter, deviceID, VDList[deviceID]->wDeviceName, VDList[deviceID]->nDeviceName);
|
1811 |
|
1812 | char propStr[16];
|
1813 | getVideoPropertyAsString(Property,propStr);
|
1814 |
|
1815 | if (verbose) printf("Setting video setting %s.\n", propStr);
|
1816 | hr = VDList[deviceID]->pVideoInputFilter->QueryInterface(IID_IAMCameraControl, (void**)&pIAMCameraControl);
|
1817 | if (FAILED(hr)) {
|
1818 | printf("Error\n");
|
1819 | return false;
|
1820 | }
|
1821 | else
|
1822 | {
|
1823 | long CurrVal, Min, Max, SteppingDelta, Default, CapsFlags, AvailableCapsFlags;
|
1824 | pIAMCameraControl->GetRange(Property, &Min, &Max, &SteppingDelta, &Default, &AvailableCapsFlags);
|
1825 | if (verbose) printf("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, Min, Max, SteppingDelta, Default, AvailableCapsFlags);
|
1826 | pIAMCameraControl->Get(Property, &CurrVal, &CapsFlags);
|
1827 | if (verbose) printf("Current value: %ld Flags %ld (%s)\n", CurrVal, CapsFlags, (CapsFlags == 1 ? "Auto" : (CapsFlags == 2 ? "Manual" : "Unknown")));
|
1828 | if (useDefaultValue) {
|
1829 | pIAMCameraControl->Set(Property, Default, CameraControl_Flags_Auto);
|
1830 | }
|
1831 | else
|
1832 | {
|
1833 |
|
1834 | pIAMCameraControl->Set(Property, lValue, Flags);
|
1835 | }
|
1836 | pIAMCameraControl->Release();
|
1837 | return true;
|
1838 | }
|
1839 | }
|
1840 | return false;
|
1841 | }
|
1842 |
|
1843 |
|
1844 |
|
1845 | bool videoInput::getVideoSettingCamera(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue){
|
1846 | if( !isDeviceSetup(deviceID) )return false;
|
1847 |
|
1848 | HRESULT hr;
|
1849 |
|
1850 |
|
1851 | videoDevice * VD = VDList[deviceID];
|
1852 |
|
1853 | hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
|
1854 | if (FAILED(hr)){
|
1855 | printf("setVideoSetting - getDevice Error\n");
|
1856 | return false;
|
1857 | }
|
1858 |
|
1859 | IAMCameraControl *pIAMCameraControl = NULL;
|
1860 |
|
1861 | hr = VD->pVideoInputFilter->QueryInterface(IID_IAMCameraControl, (void**)&pIAMCameraControl);
|
1862 | if(FAILED(hr)){
|
1863 | printf("setVideoSetting - QueryInterface Error\n");
|
1864 | if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
|
1865 | if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
|
1866 | return false;
|
1867 | }
|
1868 |
|
1869 | char propStr[16];
|
1870 | getVideoPropertyAsString(Property,propStr);
|
1871 | if (verbose) printf("Setting video setting %s.\n", propStr);
|
1872 |
|
1873 | pIAMCameraControl->GetRange(Property, &min, &max, &SteppingDelta, &defaultValue, &flags);
|
1874 | if (verbose) printf("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, min, max, SteppingDelta, defaultValue, flags);
|
1875 | pIAMCameraControl->Get(Property, ¤tValue, &flags);
|
1876 |
|
1877 | if(pIAMCameraControl)pIAMCameraControl->Release();
|
1878 | if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
|
1879 | if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
|
1880 |
|
1881 | return true;
|
1882 |
|
1883 | }
|
1884 |
|
1885 |
|
1886 |
|
1887 |
|
1888 |
|
1889 |
|
1890 |
|
1891 | void videoInput::stopDevice(int id){
|
1892 | if(id < VI_MAX_CAMERAS)
|
1893 | {
|
1894 | delete VDList[id];
|
1895 | VDList[id] = new videoDevice();
|
1896 | }
|
1897 |
|
1898 | }
|
1899 |
|
1900 |
|
1901 |
|
1902 |
|
1903 |
|
1904 |
|
1905 | bool videoInput::restartDevice(int id){
|
1906 | if(isDeviceSetup(id))
|
1907 | {
|
1908 | int conn = VDList[id]->storeConn;
|
1909 | int tmpW = VDList[id]->width;
|
1910 | int tmpH = VDList[id]->height;
|
1911 |
|
1912 | bool bFormat = VDList[id]->specificFormat;
|
1913 | long format = VDList[id]->formatType;
|
1914 |
|
1915 | int nReconnect = VDList[id]->nFramesForReconnect;
|
1916 | bool bReconnect = VDList[id]->autoReconnect;
|
1917 |
|
1918 | unsigned long avgFrameTime = VDList[id]->requestedFrameTime;
|
1919 |
|
1920 | stopDevice(id);
|
1921 |
|
1922 |
|
1923 | if( avgFrameTime != -1){
|
1924 | VDList[id]->requestedFrameTime = avgFrameTime;
|
1925 | }
|
1926 |
|
1927 | if( setupDevice(id, tmpW, tmpH, conn) ){
|
1928 |
|
1929 | if( bFormat ){
|
1930 | setFormat(id, format);
|
1931 | }
|
1932 | if( bReconnect ){
|
1933 | setAutoReconnectOnFreeze(id, true, nReconnect);
|
1934 | }
|
1935 | return true;
|
1936 | }
|
1937 | }
|
1938 | return false;
|
1939 | }
|
1940 |
|
1941 |
|
1942 |
|
1943 |
|
1944 |
|
1945 | videoInput::~videoInput(){
|
1946 |
|
1947 | for(int i = 0; i < VI_MAX_CAMERAS; i++)
|
1948 | {
|
1949 | delete VDList[i];
|
1950 | }
|
1951 |
|
1952 | comUnInit();
|
1953 | }
|
1954 |
|
1955 |
|
1956 |
|
1957 |
|
1958 |
|
1959 |
|
1960 |
|
1961 |
|
1962 |
|
1963 |
|
1964 |
|
1965 | bool videoInput::comInit(){
|
1966 | HRESULT hr = NOERROR;
|
1967 |
|
1968 |
|
1969 | if(comInitCount == 0 ){
|
1970 |
|
1971 |
|
1972 |
|
1973 | #ifdef VI_COM_MULTI_THREADED
|
1974 | hr = CoInitializeEx(NULL,COINIT_MULTITHREADED);
|
1975 | #else
|
1976 | hr = CoInitialize(NULL);
|
1977 | #endif
|
1978 |
|
1979 |
|
1980 |
|
1981 | if( hr == RPC_E_CHANGED_MODE){
|
1982 | if(verbose)printf("SETUP - COM already setup - threaded VI might not be possible\n");
|
1983 | }
|
1984 | }
|
1985 |
|
1986 | comInitCount++;
|
1987 | return true;
|
1988 | }
|
1989 |
|
1990 |
|
1991 |
|
1992 |
|
1993 |
|
1994 |
|
1995 |
|
1996 | bool videoInput::comUnInit(){
|
1997 | if(comInitCount > 0)comInitCount--;
|
1998 |
|
1999 | if(comInitCount == 0){
|
2000 | CoUninitialize();
|
2001 | return true;
|
2002 | }
|
2003 |
|
2004 | return false;
|
2005 | }
|
2006 |
|
2007 |
|
2008 |
|
2009 |
|
2010 |
|
2011 |
|
2012 |
|
2013 | void videoInput::setAttemptCaptureSize(int id, int w, int h,GUID mediaType){
|
2014 |
|
2015 | VDList[id]->tryWidth = w;
|
2016 | VDList[id]->tryHeight = h;
|
2017 | VDList[id]->tryDiffSize = true;
|
2018 | VDList[id]->tryVideoType = mediaType;
|
2019 |
|
2020 | }
|
2021 |
|
2022 |
|
2023 |
|
2024 |
|
2025 |
|
2026 |
|
2027 | void videoInput::setPhyCon(int id, int conn){
|
2028 |
|
2029 | switch(conn){
|
2030 |
|
2031 | case 0:
|
2032 | VDList[id]->connection = PhysConn_Video_Composite;
|
2033 | break;
|
2034 | case 1:
|
2035 | VDList[id]->connection = PhysConn_Video_SVideo;
|
2036 | break;
|
2037 | case 2:
|
2038 | VDList[id]->connection = PhysConn_Video_Tuner;
|
2039 | break;
|
2040 | case 3:
|
2041 | VDList[id]->connection = PhysConn_Video_USB;
|
2042 | break;
|
2043 | case 4:
|
2044 | VDList[id]->connection = PhysConn_Video_1394;
|
2045 | break;
|
2046 | default:
|
2047 | return;
|
2048 | break;
|
2049 | }
|
2050 |
|
2051 | VDList[id]->storeConn = conn;
|
2052 | VDList[id]->useCrossbar = true;
|
2053 | }
|
2054 |
|
2055 |
|
2056 |
|
2057 |
|
2058 |
|
2059 |
|
2060 |
|
2061 | bool videoInput::setup(int deviceNumber){
|
2062 | devicesFound = getDeviceCount();
|
2063 |
|
2064 | if(deviceNumber>devicesFound-1)
|
2065 | {
|
2066 | if(verbose)printf("SETUP: device[%i] not found - you have %i devices available\n", deviceNumber, devicesFound);
|
2067 | if(devicesFound>=0) if(verbose)printf("SETUP: this means that the last device you can use is device[%i] \n", devicesFound-1);
|
2068 | return false;
|
2069 | }
|
2070 |
|
2071 | if(VDList[deviceNumber]->readyToCapture)
|
2072 | {
|
2073 | if(verbose)printf("SETUP: can't setup, device %i is currently being used\n",VDList[deviceNumber]->myID);
|
2074 | return false;
|
2075 | }
|
2076 |
|
2077 | HRESULT hr = start(deviceNumber, VDList[deviceNumber]);
|
2078 | if(hr == S_OK)return true;
|
2079 | else return false;
|
2080 | }
|
2081 |
|
2082 |
|
2083 |
|
2084 |
|
2085 |
|
2086 |
|
2087 |
|
2088 | void videoInput::processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip){
|
2089 |
|
2090 | int widthInBytes = width * 3;
|
2091 | int numBytes = widthInBytes * height;
|
2092 |
|
2093 | if(!bRGB){
|
2094 |
|
2095 |
|
2096 |
|
2097 |
|
2098 | if(bFlip){
|
2099 | for(int y = 0; y < height; y++){
|
2100 | memcpy(dst + (y * widthInBytes), src + ( (height -y -1) * widthInBytes), widthInBytes);
|
2101 | }
|
2102 |
|
2103 | }else{
|
2104 | memcpy(dst, src, numBytes);
|
2105 | }
|
2106 | }else{
|
2107 | if(bFlip){
|
2108 |
|
2109 | int x = 0;
|
2110 | int y = (height - 1) * widthInBytes;
|
2111 | src += y;
|
2112 |
|
2113 | for(int i = 0; i < numBytes; i+=3){
|
2114 | if(x >= width){
|
2115 | x = 0;
|
2116 | src -= widthInBytes*2;
|
2117 | }
|
2118 |
|
2119 | *dst = *(src+2);
|
2120 | dst++;
|
2121 |
|
2122 | *dst = *(src+1);
|
2123 | dst++;
|
2124 |
|
2125 | *dst = *src;
|
2126 | dst++;
|
2127 |
|
2128 | src+=3;
|
2129 | x++;
|
2130 | }
|
2131 | }
|
2132 | else{
|
2133 | for(int i = 0; i < numBytes; i+=3){
|
2134 | *dst = *(src+2);
|
2135 | dst++;
|
2136 |
|
2137 | *dst = *(src+1);
|
2138 | dst++;
|
2139 |
|
2140 | *dst = *src;
|
2141 | dst++;
|
2142 |
|
2143 | src+=3;
|
2144 | }
|
2145 | }
|
2146 | }
|
2147 | }
|
2148 |
|
2149 |
|
2150 |
|
2151 | void videoInput::getMediaSubtypeAsString(GUID type, char * typeAsString){
|
2152 |
|
2153 | char tmpStr[8];
|
2154 | if( type == MEDIASUBTYPE_RGB24) sprintf(tmpStr, "RGB24");
|
2155 | else if(type == MEDIASUBTYPE_RGB32) sprintf(tmpStr, "RGB32");
|
2156 | else if(type == MEDIASUBTYPE_RGB555)sprintf(tmpStr, "RGB555");
|
2157 | else if(type == MEDIASUBTYPE_RGB565)sprintf(tmpStr, "RGB565");
|
2158 | else if(type == MEDIASUBTYPE_YUY2) sprintf(tmpStr, "YUY2");
|
2159 | else if(type == MEDIASUBTYPE_YVYU) sprintf(tmpStr, "YVYU");
|
2160 | else if(type == MEDIASUBTYPE_YUYV) sprintf(tmpStr, "YUYV");
|
2161 | else if(type == MEDIASUBTYPE_IYUV) sprintf(tmpStr, "IYUV");
|
2162 | else if(type == MEDIASUBTYPE_UYVY) sprintf(tmpStr, "UYVY");
|
2163 | else if(type == MEDIASUBTYPE_YV12) sprintf(tmpStr, "YV12");
|
2164 | else if(type == MEDIASUBTYPE_YVU9) sprintf(tmpStr, "YVU9");
|
2165 | else if(type == MEDIASUBTYPE_Y411) sprintf(tmpStr, "Y411");
|
2166 | else if(type == MEDIASUBTYPE_Y41P) sprintf(tmpStr, "Y41P");
|
2167 | else if(type == MEDIASUBTYPE_Y211) sprintf(tmpStr, "Y211");
|
2168 | else if(type == MEDIASUBTYPE_AYUV) sprintf(tmpStr, "AYUV");
|
2169 | else if(type == MEDIASUBTYPE_MJPG) sprintf(tmpStr, "MJPG");
|
2170 | else if(type == MEDIASUBTYPE_Y800) sprintf(tmpStr, "Y800");
|
2171 | else if(type == MEDIASUBTYPE_Y8) sprintf(tmpStr, "Y8");
|
2172 | else if(type == MEDIASUBTYPE_GREY) sprintf(tmpStr, "GREY");
|
2173 | else sprintf(tmpStr, "OTHER");
|
2174 |
|
2175 | memcpy(typeAsString, tmpStr, sizeof(char)*8);
|
2176 | }
|
2177 |
|
2178 | int videoInput::getFourccFromMediaSubtype(GUID type) {
|
2179 | return type.Data1;
|
2180 | }
|
2181 |
|
2182 | GUID *videoInput::getMediaSubtypeFromFourcc(int fourcc){
|
2183 |
|
2184 | for (int i=0;i<VI_NUM_TYPES;i++) {
|
2185 | if ( fourcc == mediaSubtypes[i].Data1 ) {
|
2186 | return &mediaSubtypes[i];
|
2187 | }
|
2188 | }
|
2189 |
|
2190 | return NULL;
|
2191 | }
|
2192 |
|
2193 |
|
2194 | enum { CV_CAP_PROP_SETTINGS = 28,
|
2195 | CV_CAP_PROP_BACKLIGHT,
|
2196 | CV_CAP_PROP_PAN,
|
2197 | CV_CAP_PROP_TILT,
|
2198 | CV_CAP_PROP_ROLL,
|
2199 | CV_CAP_PROP_ZOOM,
|
2200 | CV_CAP_PROP_IRIS,
|
2201 | CV_CAP_PROP_FOCUS
|
2202 | };
|
2203 |
|
2204 |
|
2205 |
|
2206 | void videoInput::getVideoPropertyAsString(int prop, char * propertyAsString){
|
2207 |
|
2208 | char tmpStr[16];
|
2209 |
|
2210 | if ( prop==VideoProcAmp_Brightness) sprintf(tmpStr, "Brightness");
|
2211 | else if ( prop==VideoProcAmp_Contrast) sprintf(tmpStr, "Contrast");
|
2212 | else if ( prop==VideoProcAmp_Saturation) sprintf(tmpStr, "Saturation");
|
2213 | else if ( prop==VideoProcAmp_Hue) sprintf(tmpStr, "Hue");
|
2214 | else if ( prop==VideoProcAmp_Gain) sprintf(tmpStr, "Gain");
|
2215 | else if ( prop==VideoProcAmp_Gamma) sprintf(tmpStr, "Gamma");
|
2216 | else if ( prop==VideoProcAmp_ColorEnable) sprintf(tmpStr, "ColorEnable");
|
2217 | else if ( prop==VideoProcAmp_Sharpness) sprintf(tmpStr, "Sharpness");
|
2218 | else sprintf(tmpStr, "%u",prop);
|
2219 |
|
2220 | memcpy(propertyAsString, tmpStr, sizeof(char)*16);
|
2221 | }
|
2222 |
|
2223 |
|
2224 | int videoInput::getVideoPropertyFromCV(int cv_property){
|
2225 |
|
2226 |
|
2227 | switch (cv_property) {
|
2228 | case CV_CAP_PROP_BRIGHTNESS:
|
2229 | return VideoProcAmp_Brightness;
|
2230 |
|
2231 | case CV_CAP_PROP_CONTRAST:
|
2232 | return VideoProcAmp_Contrast;
|
2233 |
|
2234 | case CV_CAP_PROP_HUE:
|
2235 | return VideoProcAmp_Hue;
|
2236 |
|
2237 | case CV_CAP_PROP_SATURATION:
|
2238 | return VideoProcAmp_Saturation;
|
2239 |
|
2240 | case CV_CAP_PROP_SHARPNESS:
|
2241 | return VideoProcAmp_Sharpness;
|
2242 |
|
2243 | case CV_CAP_PROP_GAMMA:
|
2244 | return VideoProcAmp_Gamma;
|
2245 |
|
2246 | case CV_CAP_PROP_MONOCROME:
|
2247 | return VideoProcAmp_ColorEnable;
|
2248 |
|
2249 | case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
|
2250 | return VideoProcAmp_WhiteBalance;
|
2251 |
|
2252 | case CV_CAP_PROP_BACKLIGHT:
|
2253 | return VideoProcAmp_BacklightCompensation;
|
2254 |
|
2255 | case CV_CAP_PROP_GAIN:
|
2256 | return VideoProcAmp_Gain;
|
2257 | }
|
2258 | return -1;
|
2259 | }
|
2260 |
|
2261 | int videoInput::getCameraPropertyFromCV(int cv_property){
|
2262 |
|
2263 |
|
2264 | switch (cv_property) {
|
2265 |
|
2266 | case CV_CAP_PROP_PAN:
|
2267 | return CameraControl_Pan;
|
2268 |
|
2269 | case CV_CAP_PROP_TILT:
|
2270 | return CameraControl_Tilt;
|
2271 |
|
2272 | case CV_CAP_PROP_ROLL:
|
2273 | return CameraControl_Roll;
|
2274 |
|
2275 | case CV_CAP_PROP_ZOOM:
|
2276 | return CameraControl_Zoom;
|
2277 |
|
2278 | case CV_CAP_PROP_EXPOSURE:
|
2279 | return CameraControl_Exposure;
|
2280 |
|
2281 | case CV_CAP_PROP_IRIS:
|
2282 | return CameraControl_Iris;
|
2283 |
|
2284 | case CV_CAP_PROP_FOCUS:
|
2285 | return CameraControl_Focus;
|
2286 | }
|
2287 | return -1;
|
2288 | }
|
2289 |
|
2290 | void videoInput::getCameraPropertyAsString(int prop, char * propertyAsString){
|
2291 |
|
2292 | char tmpStr[16];
|
2293 |
|
2294 | if ( prop==CameraControl_Pan) sprintf(tmpStr, "Pan");
|
2295 | else if ( prop==CameraControl_Tilt) sprintf(tmpStr, "Tilt");
|
2296 | else if ( prop==CameraControl_Roll) sprintf(tmpStr, "Roll");
|
2297 | else if ( prop==CameraControl_Zoom) sprintf(tmpStr, "Zoom");
|
2298 | else if ( prop==CameraControl_Exposure) sprintf(tmpStr, "Exposure");
|
2299 | else if ( prop==CameraControl_Iris) sprintf(tmpStr, "Iris");
|
2300 | else if ( prop==CameraControl_Focus) sprintf(tmpStr, "Focus");
|
2301 | else sprintf(tmpStr, "%u",prop);
|
2302 |
|
2303 | memcpy(propertyAsString, tmpStr, sizeof(char)*16);
|
2304 | }
|
2305 |
|
2306 |
|
2307 |
|
2308 | static void findClosestSizeAndSubtype(videoDevice * VD, int widthIn, int heightIn, int &widthOut, int &heightOut, GUID & mediatypeOut){
|
2309 | HRESULT hr;
|
2310 |
|
2311 |
|
2312 | int nearW = 9999999;
|
2313 | int nearH = 9999999;
|
2314 | bool foundClosestMatch = true;
|
2315 |
|
2316 | int iCount = 0;
|
2317 | int iSize = 0;
|
2318 | hr = VD->streamConf->GetNumberOfCapabilities(&iCount, &iSize);
|
2319 |
|
2320 | if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
|
2321 | {
|
2322 |
|
2323 | for (int iFormat = 0; iFormat < iCount; iFormat++)
|
2324 | {
|
2325 | VIDEO_STREAM_CONFIG_CAPS scc;
|
2326 | AM_MEDIA_TYPE *pmtConfig;
|
2327 | hr = VD->streamConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
|
2328 |
|
2329 | if (SUCCEEDED(hr)){
|
2330 |
|
2331 |
|
2332 | int stepX = scc.OutputGranularityX;
|
2333 | int stepY = scc.OutputGranularityY;
|
2334 |
|
2335 | int tempW = 999999;
|
2336 | int tempH = 999999;
|
2337 |
|
2338 |
|
2339 | if(stepX < 1 || stepY < 1) continue;
|
2340 |
|
2341 |
|
2342 |
|
2343 |
|
2344 | bool exactMatch = false;
|
2345 | bool exactMatchX = false;
|
2346 | bool exactMatchY = false;
|
2347 |
|
2348 | for(int x = scc.MinOutputSize.cx; x <= scc.MaxOutputSize.cx; x+= stepX){
|
2349 |
|
2350 | if( widthIn == x ){
|
2351 | exactMatchX = true;
|
2352 | tempW = x;
|
2353 | }
|
2354 |
|
2355 | else if( abs(widthIn-x) < abs(widthIn-tempW) ){
|
2356 | tempW = x;
|
2357 | }
|
2358 | }
|
2359 |
|
2360 | for(int y = scc.MinOutputSize.cy; y <= scc.MaxOutputSize.cy; y+= stepY){
|
2361 |
|
2362 | if( heightIn == y){
|
2363 | exactMatchY = true;
|
2364 | tempH = y;
|
2365 | }
|
2366 |
|
2367 | else if( abs(heightIn-y) < abs(heightIn-tempH) ){
|
2368 | tempH = y;
|
2369 | }
|
2370 | }
|
2371 |
|
2372 |
|
2373 | if(exactMatchX && exactMatchY){
|
2374 | foundClosestMatch = false;
|
2375 | exactMatch = true;
|
2376 |
|
2377 | widthOut = widthIn;
|
2378 | heightOut = heightIn;
|
2379 | mediatypeOut = pmtConfig->subtype;
|
2380 | }
|
2381 |
|
2382 |
|
2383 |
|
2384 |
|
2385 | else if( abs(widthIn - tempW) + abs(heightIn - tempH) < abs(widthIn - nearW) + abs(heightIn - nearH) )
|
2386 | {
|
2387 | nearW = tempW;
|
2388 | nearH = tempH;
|
2389 |
|
2390 | widthOut = nearW;
|
2391 | heightOut = nearH;
|
2392 | mediatypeOut = pmtConfig->subtype;
|
2393 | }
|
2394 |
|
2395 | MyDeleteMediaType(pmtConfig);
|
2396 |
|
2397 |
|
2398 | if(exactMatch)break;
|
2399 | }
|
2400 | }
|
2401 | }
|
2402 |
|
2403 | }
|
2404 |
|
2405 |
|
2406 |
|
2407 | static bool setSizeAndSubtype(videoDevice * VD, int attemptWidth, int attemptHeight, GUID mediatype){
|
2408 | VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER*>(VD->pAmMediaType->pbFormat);
|
2409 |
|
2410 |
|
2411 |
|
2412 |
|
2413 | AM_MEDIA_TYPE * tmpType = NULL;
|
2414 |
|
2415 | HRESULT hr = VD->streamConf->GetFormat(&tmpType);
|
2416 | if(hr != S_OK)return false;
|
2417 |
|
2418 |
|
2419 |
|
2420 | HEADER(pVih)->biWidth = attemptWidth;
|
2421 | HEADER(pVih)->biHeight = attemptHeight;
|
2422 |
|
2423 | VD->pAmMediaType->formattype = FORMAT_VideoInfo;
|
2424 | VD->pAmMediaType->majortype = MEDIATYPE_Video;
|
2425 | VD->pAmMediaType->subtype = mediatype;
|
2426 |
|
2427 |
|
2428 | VD->pAmMediaType->lSampleSize = attemptWidth*attemptHeight*3;
|
2429 |
|
2430 |
|
2431 | if( VD->requestedFrameTime != -1){
|
2432 | pVih->AvgTimePerFrame = VD->requestedFrameTime;
|
2433 | }
|
2434 |
|
2435 |
|
2436 | hr = VD->streamConf->SetFormat(VD->pAmMediaType);
|
2437 | if(hr == S_OK){
|
2438 | if( tmpType != NULL )MyDeleteMediaType(tmpType);
|
2439 | return true;
|
2440 | }else{
|
2441 | VD->streamConf->SetFormat(tmpType);
|
2442 | if( tmpType != NULL )MyDeleteMediaType(tmpType);
|
2443 | }
|
2444 |
|
2445 | return false;
|
2446 | }
|
2447 |
|
2448 |
|
2449 |
|
2450 |
|
2451 |
|
2452 |
|
2453 | int videoInput::start(int deviceID, videoDevice *VD){
|
2454 |
|
2455 | HRESULT hr = NOERROR;
|
2456 | VD->myID = deviceID;
|
2457 | VD->setupStarted = true;
|
2458 | CAPTURE_MODE = PIN_CATEGORY_CAPTURE;
|
2459 | callbackSetCount = 1;
|
2460 |
|
2461 | if(verbose)printf("SETUP: Setting up device %i\n",deviceID);
|
2462 |
|
2463 |
|
2464 |
|
2465 | hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&VD->pCaptureGraph);
|
2466 | if (FAILED(hr))
|
2467 | {
|
2468 | if(verbose)printf("ERROR - Could not create the Filter Graph Manager\n");
|
2469 | return hr;
|
2470 | }
|
2471 |
|
2472 |
|
2473 |
|
2474 | hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&VD->pGraph);
|
2475 | if (FAILED(hr))
|
2476 | {
|
2477 | if(verbose)printf("ERROR - Could not add the graph builder!\n");
|
2478 | stopDevice(deviceID);
|
2479 | return hr;
|
2480 | }
|
2481 |
|
2482 |
|
2483 | hr = VD->pCaptureGraph->SetFiltergraph(VD->pGraph);
|
2484 | if (FAILED(hr))
|
2485 | {
|
2486 | if(verbose)printf("ERROR - Could not set filtergraph\n");
|
2487 | stopDevice(deviceID);
|
2488 | return hr;
|
2489 | }
|
2490 |
|
2491 |
|
2492 |
|
2493 |
|
2494 | hr = VD->pGraph->QueryInterface(IID_IMediaControl, (void **)&VD->pControl);
|
2495 | if (FAILED(hr))
|
2496 | {
|
2497 | if(verbose)printf("ERROR - Could not create the Media Control object\n");
|
2498 | stopDevice(deviceID);
|
2499 | return hr;
|
2500 | }
|
2501 |
|
2502 |
|
2503 |
|
2504 |
|
2505 | hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
|
2506 |
|
2507 | if (SUCCEEDED(hr)){
|
2508 | if(verbose)printf("SETUP: %s\n", VD->nDeviceName);
|
2509 | hr = VD->pGraph->AddFilter(VD->pVideoInputFilter, VD->wDeviceName);
|
2510 | }else{
|
2511 | if(verbose)printf("ERROR - Could not find specified video device\n");
|
2512 | stopDevice(deviceID);
|
2513 | return hr;
|
2514 | }
|
2515 |
|
2516 |
|
2517 | IAMStreamConfig *streamConfTest = NULL;
|
2518 | hr = VD->pCaptureGraph->FindInterface(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, VD->pVideoInputFilter, IID_IAMStreamConfig, (void **)&streamConfTest);
|
2519 | if(FAILED(hr)){
|
2520 | if(verbose)printf("SETUP: Couldn't find preview pin using SmartTee\n");
|
2521 | }else{
|
2522 | CAPTURE_MODE = PIN_CATEGORY_PREVIEW;
|
2523 | streamConfTest->Release();
|
2524 | streamConfTest = NULL;
|
2525 | }
|
2526 |
|
2527 |
|
2528 |
|
2529 |
|
2530 | if(VD->useCrossbar)
|
2531 | {
|
2532 | if(verbose)printf("SETUP: Checking crossbar\n");
|
2533 | routeCrossbar(&VD->pCaptureGraph, &VD->pVideoInputFilter, VD->connection, CAPTURE_MODE);
|
2534 | }
|
2535 |
|
2536 |
|
2537 |
|
2538 | hr = VD->pCaptureGraph->FindInterface(&CAPTURE_MODE, &MEDIATYPE_Video, VD->pVideoInputFilter, IID_IAMStreamConfig, (void **)&VD->streamConf);
|
2539 | if(FAILED(hr)){
|
2540 | if(verbose)printf("ERROR: Couldn't config the stream!\n");
|
2541 | stopDevice(deviceID);
|
2542 | return hr;
|
2543 | }
|
2544 |
|
2545 |
|
2546 | hr = VD->streamConf->GetFormat(&VD->pAmMediaType);
|
2547 | if(FAILED(hr)){
|
2548 | if(verbose)printf("ERROR: Couldn't getFormat for pAmMediaType!\n");
|
2549 | stopDevice(deviceID);
|
2550 | return hr;
|
2551 | }
|
2552 |
|
2553 | VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER*>(VD->pAmMediaType->pbFormat);
|
2554 | int currentWidth = HEADER(pVih)->biWidth;
|
2555 | int currentHeight = HEADER(pVih)->biHeight;
|
2556 |
|
2557 | bool customSize = VD->tryDiffSize;
|
2558 |
|
2559 | bool foundSize = false;
|
2560 |
|
2561 | if(customSize){
|
2562 | if(verbose) printf("SETUP: Default Format is set to %i by %i \n", currentWidth, currentHeight);
|
2563 |
|
2564 | char guidStr[8];
|
2565 |
|
2566 | getMediaSubtypeAsString(VD->tryVideoType, guidStr);
|
2567 | if(verbose)printf("SETUP: trying specified format %s @ %i by %i\n", guidStr, VD->tryWidth, VD->tryHeight);
|
2568 |
|
2569 | if( setSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, VD->tryVideoType) ){
|
2570 | VD->setSize(VD->tryWidth, VD->tryHeight);
|
2571 | foundSize = true;
|
2572 | } else {
|
2573 |
|
2574 | for(int i = 0; i < VI_NUM_TYPES; i++){
|
2575 |
|
2576 | getMediaSubtypeAsString(mediaSubtypes[i], guidStr);
|
2577 |
|
2578 | if(verbose)printf("SETUP: trying format %s @ %i by %i\n", guidStr, VD->tryWidth, VD->tryHeight);
|
2579 | if( setSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, mediaSubtypes[i]) ){
|
2580 | VD->setSize(VD->tryWidth, VD->tryHeight);
|
2581 | foundSize = true;
|
2582 | break;
|
2583 | }
|
2584 | }
|
2585 | }
|
2586 |
|
2587 |
|
2588 |
|
2589 | if( foundSize == false ){
|
2590 | if( verbose )printf("SETUP: couldn't find requested size - searching for closest matching size\n");
|
2591 |
|
2592 | int closestWidth = -1;
|
2593 | int closestHeight = -1;
|
2594 | GUID newMediaSubtype;
|
2595 |
|
2596 | findClosestSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, closestWidth, closestHeight, newMediaSubtype);
|
2597 |
|
2598 | if( closestWidth != -1 && closestHeight != -1){
|
2599 | getMediaSubtypeAsString(newMediaSubtype, guidStr);
|
2600 |
|
2601 | if(verbose)printf("SETUP: closest supported size is %s @ %i %i\n", guidStr, closestWidth, closestHeight);
|
2602 | if( setSizeAndSubtype(VD, closestWidth, closestHeight, newMediaSubtype) ){
|
2603 | VD->setSize(closestWidth, closestHeight);
|
2604 | foundSize = true;
|
2605 | }
|
2606 | }
|
2607 | }
|
2608 | }
|
2609 |
|
2610 |
|
2611 | if(customSize == false || foundSize == false){
|
2612 | if( VD->requestedFrameTime != -1 ){
|
2613 | pVih->AvgTimePerFrame = VD->requestedFrameTime;
|
2614 | hr = VD->streamConf->SetFormat(VD->pAmMediaType);
|
2615 | }
|
2616 | VD->setSize(currentWidth, currentHeight);
|
2617 | }
|
2618 |
|
2619 |
|
2620 |
|
2621 | hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,IID_IBaseFilter, (void**)&VD->pGrabberF);
|
2622 | if (FAILED(hr)){
|
2623 | if(verbose)printf("Could not Create Sample Grabber - CoCreateInstance()\n");
|
2624 | stopDevice(deviceID);
|
2625 | return hr;
|
2626 | }
|
2627 |
|
2628 | hr = VD->pGraph->AddFilter(VD->pGrabberF, L"Sample Grabber");
|
2629 | if (FAILED(hr)){
|
2630 | if(verbose)printf("Could not add Sample Grabber - AddFilter()\n");
|
2631 | stopDevice(deviceID);
|
2632 | return hr;
|
2633 | }
|
2634 |
|
2635 | hr = VD->pGrabberF->QueryInterface(IID_ISampleGrabber, (void**)&VD->pGrabber);
|
2636 | if (FAILED(hr)){
|
2637 | if(verbose)printf("ERROR: Could not query SampleGrabber\n");
|
2638 | stopDevice(deviceID);
|
2639 | return hr;
|
2640 | }
|
2641 |
|
2642 |
|
2643 |
|
2644 | hr = VD->pGrabber->SetOneShot(FALSE);
|
2645 | if(bCallback){
|
2646 | hr = VD->pGrabber->SetBufferSamples(FALSE);
|
2647 | }else{
|
2648 | hr = VD->pGrabber->SetBufferSamples(TRUE);
|
2649 | }
|
2650 |
|
2651 | if(bCallback){
|
2652 |
|
2653 |
|
2654 | hr = VD->pGrabber->SetCallback(VD->sgCallback, 0);
|
2655 | if (FAILED(hr)){
|
2656 | if(verbose)printf("ERROR: problem setting callback\n");
|
2657 | stopDevice(deviceID);
|
2658 | return hr;
|
2659 | }else{
|
2660 | if(verbose)printf("SETUP: Capture callback set\n");
|
2661 | }
|
2662 | }
|
2663 |
|
2664 |
|
2665 |
|
2666 |
|
2667 | AM_MEDIA_TYPE mt;
|
2668 | ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
|
2669 |
|
2670 | mt.majortype = MEDIATYPE_Video;
|
2671 | mt.subtype = MEDIASUBTYPE_RGB24;
|
2672 | mt.formattype = FORMAT_VideoInfo;
|
2673 |
|
2674 |
|
2675 | hr = VD->pGrabber->SetMediaType(&mt);
|
2676 |
|
2677 |
|
2678 |
|
2679 | if(VD->streamConf){
|
2680 | VD->streamConf->Release();
|
2681 | VD->streamConf = NULL;
|
2682 | }else{
|
2683 | if(verbose)printf("ERROR: connecting device - prehaps it is already being used?\n");
|
2684 | stopDevice(deviceID);
|
2685 | return S_FALSE;
|
2686 | }
|
2687 |
|
2688 |
|
2689 |
|
2690 |
|
2691 | hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&VD->pDestFilter));
|
2692 | if (FAILED(hr)){
|
2693 | if(verbose)printf("ERROR: Could not create filter - NullRenderer\n");
|
2694 | stopDevice(deviceID);
|
2695 | return hr;
|
2696 | }
|
2697 |
|
2698 | hr = VD->pGraph->AddFilter(VD->pDestFilter, L"NullRenderer");
|
2699 | if (FAILED(hr)){
|
2700 | if(verbose)printf("ERROR: Could not add filter - NullRenderer\n");
|
2701 | stopDevice(deviceID);
|
2702 | return hr;
|
2703 | }
|
2704 |
|
2705 |
|
2706 |
|
2707 | hr = VD->pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, VD->pVideoInputFilter, VD->pGrabberF, VD->pDestFilter);
|
2708 |
|
2709 | if (FAILED(hr)){
|
2710 | if(verbose)printf("ERROR: Could not connect pins - RenderStream()\n");
|
2711 | stopDevice(deviceID);
|
2712 | return hr;
|
2713 | }
|
2714 |
|
2715 |
|
2716 |
|
2717 | {
|
2718 | IMediaFilter *pMediaFilter = 0;
|
2719 | hr = VD->pGraph->QueryInterface(IID_IMediaFilter, (void**)&pMediaFilter);
|
2720 | if (FAILED(hr)){
|
2721 | if(verbose)printf("ERROR: Could not get IID_IMediaFilter interface\n");
|
2722 | }else{
|
2723 | pMediaFilter->SetSyncSource(NULL);
|
2724 | pMediaFilter->Release();
|
2725 | }
|
2726 | }
|
2727 |
|
2728 |
|
2729 |
|
2730 | hr = VD->pControl->Run();
|
2731 |
|
2732 | if (FAILED(hr)){
|
2733 | if(verbose)printf("ERROR: Could not start graph\n");
|
2734 | stopDevice(deviceID);
|
2735 | return hr;
|
2736 | }
|
2737 |
|
2738 |
|
2739 |
|
2740 | if(!bCallback){
|
2741 |
|
2742 | long bufferSize = VD->videoSize;
|
2743 |
|
2744 | while( hr != S_OK){
|
2745 | hr = VD->pGrabber->GetCurrentBuffer(&bufferSize, (long *)VD->pBuffer);
|
2746 | Sleep(10);
|
2747 | }
|
2748 |
|
2749 | }
|
2750 |
|
2751 | if(verbose)printf("SETUP: Device is setup and ready to capture.\n\n");
|
2752 | VD->readyToCapture = true;
|
2753 |
|
2754 |
|
2755 |
|
2756 |
|
2757 |
|
2758 |
|
2759 |
|
2760 | VD->pVideoInputFilter->Release();
|
2761 | VD->pVideoInputFilter = NULL;
|
2762 |
|
2763 | VD->pGrabberF->Release();
|
2764 | VD->pGrabberF = NULL;
|
2765 |
|
2766 | VD->pDestFilter->Release();
|
2767 | VD->pDestFilter = NULL;
|
2768 |
|
2769 | return S_OK;
|
2770 | }
|
2771 |
|
2772 |
|
2773 |
|
2774 |
|
2775 |
|
2776 |
|
2777 |
|
2778 | int videoInput::getDeviceCount(){
|
2779 |
|
2780 |
|
2781 | ICreateDevEnum *pDevEnum = NULL;
|
2782 | IEnumMoniker *pEnum = NULL;
|
2783 | int deviceCounter = 0;
|
2784 |
|
2785 | HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
|
2786 | CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
|
2787 | reinterpret_cast<void**>(&pDevEnum));
|
2788 |
|
2789 |
|
2790 | if (SUCCEEDED(hr))
|
2791 | {
|
2792 |
|
2793 | hr = pDevEnum->CreateClassEnumerator(
|
2794 | CLSID_VideoInputDeviceCategory,
|
2795 | &pEnum, 0);
|
2796 |
|
2797 | if(hr == S_OK){
|
2798 | IMoniker *pMoniker = NULL;
|
2799 | while (pEnum->Next(1, &pMoniker, NULL) == S_OK){
|
2800 |
|
2801 | IPropertyBag *pPropBag;
|
2802 | hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
|
2803 | (void**)(&pPropBag));
|
2804 |
|
2805 | if (FAILED(hr)){
|
2806 | pMoniker->Release();
|
2807 | continue;
|
2808 | }
|
2809 |
|
2810 | pPropBag->Release();
|
2811 | pPropBag = NULL;
|
2812 |
|
2813 | pMoniker->Release();
|
2814 | pMoniker = NULL;
|
2815 |
|
2816 | deviceCounter++;
|
2817 | }
|
2818 |
|
2819 | pEnum->Release();
|
2820 | pEnum = NULL;
|
2821 | }
|
2822 |
|
2823 | pDevEnum->Release();
|
2824 | pDevEnum = NULL;
|
2825 | }
|
2826 | return deviceCounter;
|
2827 | }
|
2828 |
|
2829 |
|
2830 |
|
2831 |
|
2832 |
|
2833 |
|
2834 |
|
2835 |
|
2836 |
|
2837 | HRESULT videoInput::getDevice(IBaseFilter** gottaFilter, int deviceId, WCHAR * wDeviceName, char * nDeviceName){
|
2838 | BOOL done = false;
|
2839 | int deviceCounter = 0;
|
2840 |
|
2841 |
|
2842 | ICreateDevEnum *pSysDevEnum = NULL;
|
2843 | HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum);
|
2844 | if (FAILED(hr))
|
2845 | {
|
2846 | return hr;
|
2847 | }
|
2848 |
|
2849 |
|
2850 | IEnumMoniker *pEnumCat = NULL;
|
2851 | hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);
|
2852 |
|
2853 | if (hr == S_OK)
|
2854 | {
|
2855 |
|
2856 | IMoniker *pMoniker = NULL;
|
2857 | ULONG cFetched;
|
2858 | while ((pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) && (!done))
|
2859 | {
|
2860 | if(deviceCounter == deviceId)
|
2861 | {
|
2862 |
|
2863 | IPropertyBag *pPropBag;
|
2864 | hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
|
2865 | if (SUCCEEDED(hr))
|
2866 | {
|
2867 |
|
2868 | VARIANT varName;
|
2869 | VariantInit(&varName);
|
2870 | hr = pPropBag->Read(L"FriendlyName", &varName, 0);
|
2871 | if (SUCCEEDED(hr))
|
2872 | {
|
2873 |
|
2874 |
|
2875 | int count = 0;
|
2876 | while( varName.bstrVal[count] != 0x00 ) {
|
2877 | wDeviceName[count] = varName.bstrVal[count];
|
2878 | nDeviceName[count] = (char)varName.bstrVal[count];
|
2879 | count++;
|
2880 | }
|
2881 |
|
2882 |
|
2883 | hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)gottaFilter);
|
2884 | done = true;
|
2885 | }
|
2886 | VariantClear(&varName);
|
2887 | pPropBag->Release();
|
2888 | pPropBag = NULL;
|
2889 | pMoniker->Release();
|
2890 | pMoniker = NULL;
|
2891 | }
|
2892 | }
|
2893 | deviceCounter++;
|
2894 | }
|
2895 | pEnumCat->Release();
|
2896 | pEnumCat = NULL;
|
2897 | }
|
2898 | pSysDevEnum->Release();
|
2899 | pSysDevEnum = NULL;
|
2900 |
|
2901 | if (done) {
|
2902 | return hr;
|
2903 | } else {
|
2904 | return VFW_E_NOT_FOUND;
|
2905 | }
|
2906 | }
|
2907 |
|
2908 |
|
2909 |
|
2910 |
|
2911 |
|
2912 |
|
2913 |
|
2914 | HRESULT videoInput::ShowFilterPropertyPages(IBaseFilter *pFilter){
|
2915 |
|
2916 | ISpecifyPropertyPages *pProp;
|
2917 |
|
2918 | HRESULT hr = pFilter->QueryInterface(IID_ISpecifyPropertyPages, (void **)&pProp);
|
2919 | if (SUCCEEDED(hr))
|
2920 | {
|
2921 |
|
2922 | FILTER_INFO FilterInfo;
|
2923 | hr = pFilter->QueryFilterInfo(&FilterInfo);
|
2924 | IUnknown *pFilterUnk;
|
2925 | pFilter->QueryInterface(IID_IUnknown, (void **)&pFilterUnk);
|
2926 |
|
2927 |
|
2928 | CAUUID caGUID;
|
2929 | pProp->GetPages(&caGUID);
|
2930 | pProp->Release();
|
2931 | OleCreatePropertyFrame(
|
2932 | NULL,
|
2933 | 0, 0,
|
2934 | FilterInfo.achName,
|
2935 | 1,
|
2936 | &pFilterUnk,
|
2937 | caGUID.cElems,
|
2938 | caGUID.pElems,
|
2939 | 0,
|
2940 | 0, NULL
|
2941 | );
|
2942 |
|
2943 |
|
2944 | if(pFilterUnk)pFilterUnk->Release();
|
2945 | if(FilterInfo.pGraph)FilterInfo.pGraph->Release();
|
2946 | CoTaskMemFree(caGUID.pElems);
|
2947 | }
|
2948 | return hr;
|
2949 | }
|
2950 |
|
2951 | HRESULT videoInput::ShowStreamPropertyPages(IAMStreamConfig *pStream){
|
2952 |
|
2953 | HRESULT hr = NOERROR;
|
2954 | return hr;
|
2955 | }
|
2956 |
|
2957 |
|
2958 |
|
2959 |
|
2960 |
|
2961 |
|
2962 | HRESULT videoInput::SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath) {
|
2963 | const WCHAR wszStreamName[] = L"ActiveMovieGraph";
|
2964 | HRESULT hr;
|
2965 | IStorage *pStorage = NULL;
|
2966 |
|
2967 |
|
2968 | hr = StgCreateDocfile(
|
2969 | wszPath,
|
2970 | STGM_CREATE | STGM_TRANSACTED | STGM_READWRITE | STGM_SHARE_EXCLUSIVE,
|
2971 | 0, &pStorage);
|
2972 | if(FAILED(hr))
|
2973 | {
|
2974 | return hr;
|
2975 | }
|
2976 |
|
2977 |
|
2978 | IStream *pStream;
|
2979 | hr = pStorage->CreateStream(
|
2980 | wszStreamName,
|
2981 | STGM_WRITE | STGM_CREATE | STGM_SHARE_EXCLUSIVE,
|
2982 | 0, 0, &pStream);
|
2983 | if (FAILED(hr))
|
2984 | {
|
2985 | pStorage->Release();
|
2986 | return hr;
|
2987 | }
|
2988 |
|
2989 |
|
2990 | IPersistStream *pPersist = NULL;
|
2991 | pGraph->QueryInterface(IID_IPersistStream, reinterpret_cast<void**>(&pPersist));
|
2992 | hr = pPersist->Save(pStream, TRUE);
|
2993 | pStream->Release();
|
2994 | pPersist->Release();
|
2995 | if (SUCCEEDED(hr))
|
2996 | {
|
2997 | hr = pStorage->Commit(STGC_DEFAULT);
|
2998 | }
|
2999 | pStorage->Release();
|
3000 | return hr;
|
3001 | }
|
3002 |
|
3003 |
|
3004 |
|
3005 |
|
3006 |
|
3007 |
|
3008 |
|
3009 | HRESULT videoInput::routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter **pVidInFilter, int conType, GUID captureMode){
|
3010 |
|
3011 |
|
3012 | ICaptureGraphBuilder2 *pBuild = NULL;
|
3013 | pBuild = *ppBuild;
|
3014 |
|
3015 |
|
3016 | IBaseFilter *pVidFilter = NULL;
|
3017 | pVidFilter = * pVidInFilter;
|
3018 |
|
3019 |
|
3020 | IAMCrossbar *pXBar1 = NULL;
|
3021 | HRESULT hr = pBuild->FindInterface(&LOOK_UPSTREAM_ONLY, NULL, pVidFilter,
|
3022 | IID_IAMCrossbar, (void**)&pXBar1);
|
3023 | if (SUCCEEDED(hr))
|
3024 | {
|
3025 |
|
3026 | bool foundDevice = false;
|
3027 |
|
3028 | if(verbose)printf("SETUP: You are not a webcam! Setting Crossbar\n");
|
3029 | pXBar1->Release();
|
3030 |
|
3031 | IAMCrossbar *Crossbar;
|
3032 | hr = pBuild->FindInterface(&captureMode, &MEDIATYPE_Interleaved, pVidFilter, IID_IAMCrossbar, (void **)&Crossbar);
|
3033 |
|
3034 | if(hr != NOERROR){
|
3035 | hr = pBuild->FindInterface(&captureMode, &MEDIATYPE_Video, pVidFilter, IID_IAMCrossbar, (void **)&Crossbar);
|
3036 | }
|
3037 |
|
3038 | LONG lInpin, lOutpin;
|
3039 | hr = Crossbar->get_PinCounts(&lOutpin , &lInpin);
|
3040 |
|
3041 | BOOL IPin=TRUE; LONG pIndex=0 , pRIndex=0 , pType=0;
|
3042 |
|
3043 | while( pIndex < lInpin)
|
3044 | {
|
3045 | hr = Crossbar->get_CrossbarPinInfo( IPin , pIndex , &pRIndex , &pType);
|
3046 |
|
3047 | if( pType == conType){
|
3048 | if(verbose)printf("SETUP: Found Physical Interface");
|
3049 |
|
3050 | switch(conType){
|
3051 |
|
3052 | case PhysConn_Video_Composite:
|
3053 | if(verbose)printf(" - Composite\n");
|
3054 | break;
|
3055 | case PhysConn_Video_SVideo:
|
3056 | if(verbose)printf(" - S-Video\n");
|
3057 | break;
|
3058 | case PhysConn_Video_Tuner:
|
3059 | if(verbose)printf(" - Tuner\n");
|
3060 | break;
|
3061 | case PhysConn_Video_USB:
|
3062 | if(verbose)printf(" - USB\n");
|
3063 | break;
|
3064 | case PhysConn_Video_1394:
|
3065 | if(verbose)printf(" - Firewire\n");
|
3066 | break;
|
3067 | }
|
3068 |
|
3069 | foundDevice = true;
|
3070 | break;
|
3071 | }
|
3072 | pIndex++;
|
3073 |
|
3074 | }
|
3075 |
|
3076 | if(foundDevice){
|
3077 | BOOL OPin=FALSE; LONG pOIndex=0 , pORIndex=0 , pOType=0;
|
3078 | while( pOIndex < lOutpin)
|
3079 | {
|
3080 | hr = Crossbar->get_CrossbarPinInfo( OPin , pOIndex , &pORIndex , &pOType);
|
3081 | if( pOType == PhysConn_Video_VideoDecoder)
|
3082 | break;
|
3083 | }
|
3084 | Crossbar->Route(pOIndex,pIndex);
|
3085 | }else{
|
3086 | if(verbose)printf("SETUP: Didn't find specified Physical Connection type. Using Defualt. \n");
|
3087 | }
|
3088 |
|
3089 |
|
3090 |
|
3091 |
|
3092 |
|
3093 |
|
3094 | if(pXBar1)pXBar1->Release();
|
3095 | if(pXBar1)pXBar1 = NULL;
|
3096 |
|
3097 | }else{
|
3098 | if(verbose)printf("SETUP: You are a webcam or snazzy firewire cam! No Crossbar needed\n");
|
3099 | return hr;
|
3100 | }
|
3101 |
|
3102 | return hr;
|
3103 | }
|
3104 |
|
3105 |
|
3106 |
|
3107 | class CvCaptureCAM_DShow : public CvCapture
|
3108 | {
|
3109 | public:
|
3110 | CvCaptureCAM_DShow();
|
3111 | virtual ~CvCaptureCAM_DShow() { close(); }
|
3112 |
|
3113 | virtual bool open( int index );
|
3114 | virtual void close();
|
3115 | virtual double getProperty(int);
|
3116 | virtual bool setProperty(int, double);
|
3117 | virtual bool grabFrame();
|
3118 | virtual IplImage* retrieveFrame(int);
|
3119 | virtual int getCaptureDomain() { return CV_CAP_DSHOW; }
|
3120 |
|
3121 |
|
3122 |
|
3123 | protected:
|
3124 | void init();
|
3125 |
|
3126 | int index, width, height,fourcc;
|
3127 | IplImage* frame;
|
3128 | static videoInput VI;
|
3129 | };
|
3130 |
|
3131 |
|
3132 | struct SuppressVideoInputMessages
|
3133 | {
|
3134 | SuppressVideoInputMessages() { videoInput::setVerbose(false); }
|
3135 | };
|
3136 |
|
3137 | static SuppressVideoInputMessages do_it;
|
3138 | videoInput CvCaptureCAM_DShow::VI;
|
3139 |
|
3140 |
|
3141 |
|
3142 | CvCaptureCAM_DShow::CvCaptureCAM_DShow()
|
3143 | {
|
3144 | index = -1;
|
3145 | frame = 0;
|
3146 | width = height = fourcc = -1;
|
3147 |
|
3148 | }
|
3149 |
|
3150 | void CvCaptureCAM_DShow::close()
|
3151 | {
|
3152 | if( index >= 0 )
|
3153 | {
|
3154 | VI.stopDevice(index);
|
3155 | index = -1;
|
3156 | cvReleaseImage(&frame);
|
3157 | }
|
3158 | width = height = -1;
|
3159 | }
|
3160 |
|
3161 |
|
3162 | bool CvCaptureCAM_DShow::open( int _index )
|
3163 | {
|
3164 | int try_index = _index;
|
3165 | int devices = 0;
|
3166 |
|
3167 | close();
|
3168 | devices = VI.listDevices(true);
|
3169 | if (devices == 0)
|
3170 | return false;
|
3171 | try_index = try_index < 0 ? 0 : (try_index > devices-1 ? devices-1 : try_index);
|
3172 | VI.setupDevice(try_index);
|
3173 | if( !VI.isDeviceSetup(try_index) )
|
3174 | return false;
|
3175 | index = try_index;
|
3176 | return true;
|
3177 | }
|
3178 |
|
3179 | bool CvCaptureCAM_DShow::grabFrame()
|
3180 | {
|
3181 | return true;
|
3182 | }
|
3183 |
|
3184 |
|
3185 | IplImage* CvCaptureCAM_DShow::retrieveFrame(int)
|
3186 | {
|
3187 | if( !frame || VI.getWidth(index) != frame->width || VI.getHeight(index) != frame->height )
|
3188 | {
|
3189 | if (frame)
|
3190 | cvReleaseImage( &frame );
|
3191 | int w = VI.getWidth(index), h = VI.getHeight(index);
|
3192 | frame = cvCreateImage( cvSize(w,h), 8, 3 );
|
3193 | }
|
3194 |
|
3195 | VI.getPixels( index, (uchar*)frame->imageData, false, true );
|
3196 | return frame;
|
3197 | }
|
3198 |
|
3199 | double CvCaptureCAM_DShow::getProperty( int property_id )
|
3200 | {
|
3201 |
|
3202 | long min_value,max_value,stepping_delta,current_value,flags,defaultValue;
|
3203 |
|
3204 |
|
3205 | switch( property_id )
|
3206 | {
|
3207 | case CV_CAP_PROP_FRAME_WIDTH:
|
3208 | return VI.getWidth(index);
|
3209 |
|
3210 | case CV_CAP_PROP_FRAME_HEIGHT:
|
3211 | return VI.getHeight(index);
|
3212 |
|
3213 | case CV_CAP_PROP_FOURCC:
|
3214 | return VI.getFourcc(index);
|
3215 |
|
3216 | case CV_CAP_PROP_FPS:
|
3217 | return VI.getFPS(index);
|
3218 | }
|
3219 |
|
3220 |
|
3221 | switch( property_id )
|
3222 | {
|
3223 | case CV_CAP_PROP_BRIGHTNESS:
|
3224 | if ( VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BRIGHTNESS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3225 |
|
3226 | case CV_CAP_PROP_CONTRAST:
|
3227 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_CONTRAST),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3228 |
|
3229 | case CV_CAP_PROP_HUE:
|
3230 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_HUE),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3231 |
|
3232 | case CV_CAP_PROP_SATURATION:
|
3233 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SATURATION),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3234 |
|
3235 | case CV_CAP_PROP_SHARPNESS:
|
3236 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SHARPNESS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3237 |
|
3238 | case CV_CAP_PROP_GAMMA:
|
3239 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAMMA),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3240 |
|
3241 | case CV_CAP_PROP_MONOCROME:
|
3242 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_MONOCROME),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3243 |
|
3244 | case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
|
3245 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_WHITE_BALANCE_BLUE_U),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3246 |
|
3247 | case CV_CAP_PROP_BACKLIGHT:
|
3248 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BACKLIGHT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3249 |
|
3250 | case CV_CAP_PROP_GAIN:
|
3251 | if (VI.getVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAIN),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3252 | }
|
3253 |
|
3254 |
|
3255 | switch( property_id )
|
3256 | {
|
3257 |
|
3258 | case CV_CAP_PROP_BACKLIGHT:
|
3259 | if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_BACKLIGHT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3260 |
|
3261 | case CV_CAP_PROP_PAN:
|
3262 | if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_PAN),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3263 |
|
3264 | case CV_CAP_PROP_TILT:
|
3265 | if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_TILT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3266 |
|
3267 | case CV_CAP_PROP_ROLL:
|
3268 | if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_ROLL),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3269 |
|
3270 | case CV_CAP_PROP_ZOOM:
|
3271 | if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_BACKLIGHT),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3272 |
|
3273 | case CV_CAP_PROP_IRIS:
|
3274 | if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_IRIS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3275 |
|
3276 | case CV_CAP_PROP_FOCUS:
|
3277 | if (VI.getVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_FOCUS),min_value,max_value,stepping_delta,current_value,flags,defaultValue) ) return (double)current_value;
|
3278 |
|
3279 | }
|
3280 |
|
3281 |
|
3282 | return -1;
|
3283 | }
|
3284 |
|
3285 | bool CvCaptureCAM_DShow::setProperty( int property_id, double value )
|
3286 | {
|
3287 |
|
3288 | bool handled = false;
|
3289 |
|
3290 | switch( property_id )
|
3291 | {
|
3292 | case CV_CAP_PROP_FRAME_WIDTH:
|
3293 | width = cvRound(value);
|
3294 | handled = true;
|
3295 | break;
|
3296 |
|
3297 | case CV_CAP_PROP_FRAME_HEIGHT:
|
3298 | height = cvRound(value);
|
3299 | handled = true;
|
3300 | break;
|
3301 |
|
3302 | case CV_CAP_PROP_FOURCC:
|
3303 | fourcc = cvRound(value);
|
3304 | if ( fourcc < 0 ) {
|
3305 |
|
3306 |
|
3307 | }
|
3308 | handled = true;
|
3309 | break;
|
3310 |
|
3311 | case CV_CAP_PROP_FPS:
|
3312 | VI.setIdealFramerate(index,value);
|
3313 | handled = true;
|
3314 | break;
|
3315 |
|
3316 | }
|
3317 |
|
3318 | if ( handled ) {
|
3319 |
|
3320 | if( width > 0 && height > 0 )
|
3321 | {
|
3322 | if( width != VI.getWidth(index) || height != VI.getHeight(index) )
|
3323 | {
|
3324 | VI.stopDevice(index);
|
3325 | VI.setupDeviceFourcc(index, width, height,fourcc);
|
3326 | }
|
3327 | width = height = fourcc = -1;
|
3328 | return VI.isDeviceSetup(index);
|
3329 | }
|
3330 | return true;
|
3331 | }
|
3332 |
|
3333 |
|
3334 | if ( property_id == CV_CAP_PROP_SETTINGS ) {
|
3335 | VI.showSettingsWindow(index);
|
3336 | return true;
|
3337 | }
|
3338 |
|
3339 |
|
3340 | switch( property_id )
|
3341 | {
|
3342 |
|
3343 | case CV_CAP_PROP_BRIGHTNESS:
|
3344 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BRIGHTNESS),(long)value);
|
3345 |
|
3346 | case CV_CAP_PROP_CONTRAST:
|
3347 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_CONTRAST),(long)value);
|
3348 |
|
3349 | case CV_CAP_PROP_HUE:
|
3350 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_HUE),(long)value);
|
3351 |
|
3352 | case CV_CAP_PROP_SATURATION:
|
3353 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SATURATION),(long)value);
|
3354 |
|
3355 | case CV_CAP_PROP_SHARPNESS:
|
3356 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_SHARPNESS),(long)value);
|
3357 |
|
3358 | case CV_CAP_PROP_GAMMA:
|
3359 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAMMA),(long)value);
|
3360 |
|
3361 | case CV_CAP_PROP_MONOCROME:
|
3362 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_MONOCROME),(long)value);
|
3363 |
|
3364 | case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
|
3365 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_WHITE_BALANCE_BLUE_U),(long)value);
|
3366 |
|
3367 | case CV_CAP_PROP_BACKLIGHT:
|
3368 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_BACKLIGHT),(long)value);
|
3369 |
|
3370 | case CV_CAP_PROP_GAIN:
|
3371 | return VI.setVideoSettingFilter(index,VI.getVideoPropertyFromCV(CV_CAP_PROP_GAIN),(long)value);
|
3372 |
|
3373 | default:
|
3374 | ;
|
3375 | }
|
3376 |
|
3377 |
|
3378 | switch( property_id )
|
3379 | {
|
3380 | case CV_CAP_PROP_PAN:
|
3381 | return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_PAN),(long)value);
|
3382 |
|
3383 | case CV_CAP_PROP_TILT:
|
3384 | return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_TILT),(long)value);
|
3385 |
|
3386 | case CV_CAP_PROP_ROLL:
|
3387 | return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_ROLL),(long)value);
|
3388 |
|
3389 | case CV_CAP_PROP_ZOOM:
|
3390 | return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_ZOOM),(long)value);
|
3391 |
|
3392 | case CV_CAP_PROP_EXPOSURE:
|
3393 | return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_EXPOSURE),(long)value);
|
3394 |
|
3395 | case CV_CAP_PROP_IRIS:
|
3396 | return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_IRIS),(long)value);
|
3397 |
|
3398 | case CV_CAP_PROP_FOCUS:
|
3399 | return VI.setVideoSettingCamera(index,VI.getCameraPropertyFromCV(CV_CAP_PROP_FOCUS),(long)value);
|
3400 | }
|
3401 |
|
3402 |
|
3403 | return false;
|
3404 | }
|
3405 |
|
3406 |
|
3407 | CvCapture* cvCreateCameraCapture_DShow( int index )
|
3408 | {
|
3409 | CvCaptureCAM_DShow* capture = new CvCaptureCAM_DShow;
|
3410 |
|
3411 | if( capture->open( index ))
|
3412 | return capture;
|
3413 |
|
3414 | delete capture;
|
3415 | return 0;
|
3416 | }
|
3417 |
|
3418 | #endif
|