1 | #if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1) && \
|
2 | !defined(ANDROID_r4_0_0) && !defined(ANDROID_r4_0_3) && !defined(ANDROID_r4_1_1) && \
|
3 | !defined(ANDROID_r4_2_0) && !defined(ANDROID_r4_3_0) && !defined(ANDROID_r4_4_0)
|
4 | # error Building camera wrapper for your version of Android is not supported by OpenCV.\
|
5 | You need to modify OpenCV sources in order to compile camera wrapper for your version of Android.
|
6 | #endif
|
7 |
|
8 | #include <camera/Camera.h>
|
9 | #include <camera/CameraParameters.h>
|
10 |
|
11 | #if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
|
12 | # include <system/camera.h>
|
13 | #endif
|
14 |
|
15 | #include "camera_wrapper.h"
|
16 | #include "../include/camera_properties.h"
|
17 |
|
18 | #if defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) || defined(ANDROID_r4_1_1)
|
19 |
|
20 | # include <gui/SurfaceTexture.h>
|
21 | # define MAGIC_OPENCV_TEXTURE_ID (0x10)
|
22 | #elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0)
|
23 | # include <gui/ISurface.h>
|
24 | # include <gui/BufferQueue.h>
|
25 | #elif defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
|
26 | # include <gui/IGraphicBufferProducer.h>
|
27 | # include <gui/BufferQueue.h>
|
28 | #else
|
29 | # include <surfaceflinger/ISurface.h>
|
30 | #endif
|
31 |
|
32 | #include <string>
|
33 | #include <fstream>
|
34 |
|
35 |
|
36 | #ifdef LOGD
|
37 | # undef LOGD
|
38 | #endif
|
39 |
|
40 | #ifdef LOGI
|
41 | # undef LOGI
|
42 | #endif
|
43 |
|
44 | #ifdef LOGW
|
45 | # undef LOGW
|
46 | #endif
|
47 |
|
48 | #ifdef LOGE
|
49 | # undef LOGE
|
50 | #endif
|
51 |
|
52 |
|
53 | #include <android/log.h>
|
54 | #define CAMERA_LOG_TAG "OpenCV_NativeCamera"
|
55 | #define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, CAMERA_LOG_TAG, __VA_ARGS__))
|
56 | #define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, CAMERA_LOG_TAG, __VA_ARGS__))
|
57 | #define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, CAMERA_LOG_TAG, __VA_ARGS__))
|
58 | #define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, CAMERA_LOG_TAG, __VA_ARGS__))
|
59 |
|
60 | #include <dlfcn.h>
|
61 |
|
62 | using namespace android;
|
63 |
|
64 |
|
65 |
|
66 |
|
67 | #define MAGIC_TAIL 16384
|
68 |
|
69 |
|
70 | void debugShowFPS();
|
71 |
|
72 | #if defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
|
73 | class ConsumerListenerStub: public BufferQueue::ConsumerListener
|
74 | {
|
75 | public:
|
76 | virtual void onFrameAvailable()
|
77 | {
|
78 | }
|
79 | virtual void onBuffersReleased()
|
80 | {
|
81 | }
|
82 | };
|
83 | #elif defined(ANDROID_r4_4_0)
|
84 | class ConsumerListenerStub: public android::BnConsumerListener
|
85 | {
|
86 | public:
|
87 | virtual void onFrameAvailable()
|
88 | {
|
89 | }
|
90 | virtual void onBuffersReleased()
|
91 | {
|
92 | }
|
93 | virtual ~ConsumerListenerStub()
|
94 | {
|
95 | }
|
96 | };
|
97 | #endif
|
98 |
|
99 |
|
100 | std::string getProcessName()
|
101 | {
|
102 | std::string result;
|
103 | std::ifstream f;
|
104 |
|
105 | f.open("/proc/self/cmdline");
|
106 | if (f.is_open())
|
107 | {
|
108 | std::string fullPath;
|
109 | std::getline(f, fullPath, '\0');
|
110 | if (!fullPath.empty())
|
111 | {
|
112 | int i = fullPath.size()-1;
|
113 | while ((i >= 0) && (fullPath[i] != '/')) i--;
|
114 | result = fullPath.substr(i+1, std::string::npos);
|
115 | }
|
116 | }
|
117 |
|
118 | f.close();
|
119 |
|
120 | return result;
|
121 | }
|
122 |
|
123 | void debugShowFPS()
|
124 | {
|
125 | static int mFrameCount = 0;
|
126 | static int mLastFrameCount = 0;
|
127 | static nsecs_t mLastFpsTime = systemTime();
|
128 | static float mFps = 0;
|
129 |
|
130 | mFrameCount++;
|
131 |
|
132 | if (( mFrameCount % 30 ) != 0)
|
133 | return;
|
134 |
|
135 | nsecs_t now = systemTime();
|
136 | nsecs_t diff = now - mLastFpsTime;
|
137 |
|
138 | if (diff==0)
|
139 | return;
|
140 |
|
141 | mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
|
142 | mLastFpsTime = now;
|
143 | mLastFrameCount = mFrameCount;
|
144 | LOGI("### Camera FPS ### [%d] Frames, %.2f FPS", mFrameCount, mFps);
|
145 | }
|
146 |
|
147 | class CameraHandler: public CameraListener
|
148 | {
|
149 | protected:
|
150 | int cameraId;
|
151 | sp<Camera> camera;
|
152 | #if defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
|
153 | sp<SurfaceTexture> surface;
|
154 | #endif
|
155 | #if defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
|
156 | sp<BufferQueue> queue;
|
157 | sp<ConsumerListenerStub> listener;
|
158 | #endif
|
159 | CameraParameters* params;
|
160 | CameraCallback cameraCallback;
|
161 | void* userData;
|
162 |
|
163 | int emptyCameraCallbackReported;
|
164 |
|
165 | int width;
|
166 | int height;
|
167 |
|
168 | static const char* flashModesNames[ANDROID_CAMERA_FLASH_MODES_NUM];
|
169 | static const char* focusModesNames[ANDROID_CAMERA_FOCUS_MODES_NUM];
|
170 | static const char* whiteBalanceModesNames[ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM];
|
171 | static const char* antibandingModesNames[ANDROID_CAMERA_ANTIBANDING_MODES_NUM];
|
172 |
|
173 | void doCall(void* buffer, size_t bufferSize)
|
174 | {
|
175 | if (cameraCallback == 0)
|
176 | {
|
177 | if (!emptyCameraCallbackReported)
|
178 | LOGE("CameraHandler::doCall(void*, size_t): Camera callback is empty!");
|
179 |
|
180 | emptyCameraCallbackReported++;
|
181 | }
|
182 | else
|
183 | {
|
184 | bool res = (*cameraCallback)(buffer, bufferSize, userData);
|
185 |
|
186 | if(!res)
|
187 | {
|
188 | LOGE("CameraHandler::doCall(void*, size_t): cameraCallback returns false (camera connection will be closed)");
|
189 | closeCameraConnect();
|
190 | }
|
191 | }
|
192 | }
|
193 |
|
194 | void doCall(const sp<IMemory>& dataPtr)
|
195 | {
|
196 | if (dataPtr == NULL)
|
197 | {
|
198 | LOGE("CameraHandler::doCall(const sp<IMemory>&): dataPtr==NULL (no frame to handle)");
|
199 | return;
|
200 | }
|
201 |
|
202 | size_t size = dataPtr->size();
|
203 | if (size <= 0)
|
204 | {
|
205 | LOGE("CameraHandler::doCall(const sp<IMemory>&): IMemory object is of zero size");
|
206 | return;
|
207 | }
|
208 |
|
209 | void* buffer = (void *)dataPtr->pointer();
|
210 | if (!buffer)
|
211 | {
|
212 | LOGE("CameraHandler::doCall(const sp<IMemory>&): Buffer pointer is NULL");
|
213 | return;
|
214 | }
|
215 |
|
216 | doCall(buffer, size);
|
217 | }
|
218 |
|
219 | virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr)
|
220 | {
|
221 | static uint32_t count = 0;
|
222 | count++;
|
223 |
|
224 | LOGE("Recording cb: %d %lld %%p Offset:%%d Stride:%%d\n", msgType, timestamp);
|
225 |
|
226 | if (dataPtr == NULL)
|
227 | {
|
228 | LOGE("postDataTimestamp: dataPtr IS ZERO -- returning");
|
229 | camera->releaseRecordingFrame(dataPtr);
|
230 | LOGE("postDataTimestamp: camera->releaseRecordingFrame(dataPtr) is done");
|
231 | return;
|
232 | }
|
233 |
|
234 | uint8_t *ptr = (uint8_t*) dataPtr->pointer();
|
235 | if (ptr)
|
236 | LOGE("VID_CB: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x", ptr[0], ptr[1], ptr[2], ptr[3], ptr[4], ptr[5], ptr[6], ptr[7], ptr[8], ptr[9]);
|
237 | else
|
238 | LOGE("postDataTimestamp: Ptr is zero");
|
239 |
|
240 | camera->releaseRecordingFrame(dataPtr);
|
241 | }
|
242 |
|
243 |
|
244 | static int split_float(const char *str, float* out, char delim, int max_elem_num,
|
245 | char **endptr = NULL)
|
246 | {
|
247 |
|
248 | char *end = const_cast<char*>(str);
|
249 | int elem_num = 0;
|
250 | for(; elem_num < max_elem_num; elem_num++ ){
|
251 | char* curr_end;
|
252 | out[elem_num] = (float)strtof(end, &curr_end);
|
253 |
|
254 | if(end == curr_end){
|
255 | break;
|
256 | }
|
257 | if (*curr_end != delim) {
|
258 |
|
259 | if (*curr_end == 0){
|
260 | elem_num++;
|
261 | break;
|
262 | }
|
263 | else {
|
264 | LOGE("Cannot find delimeter (%c) in str=%s", delim, str);
|
265 | return -1;
|
266 | }
|
267 | }
|
268 |
|
269 | end = curr_end + 1;
|
270 | }
|
271 | if (endptr)
|
272 | *endptr = end;
|
273 | return elem_num;
|
274 | }
|
275 |
|
276 | int is_supported(const char* supp_modes_key, const char* mode)
|
277 | {
|
278 | const char* supported_modes = params->get(supp_modes_key);
|
279 | return (supported_modes && mode && (strstr(supported_modes, mode) > 0));
|
280 | }
|
281 |
|
282 | float getFocusDistance(int focus_distance_type)
|
283 | {
|
284 | #if !defined(ANDROID_r2_2_0)
|
285 | if (focus_distance_type >= 0 && focus_distance_type < 3)
|
286 | {
|
287 | float focus_distances[3];
|
288 | const char* output = params->get(CameraParameters::KEY_FOCUS_DISTANCES);
|
289 | int val_num = CameraHandler::split_float(output, focus_distances, ',', 3);
|
290 | if(val_num == 3)
|
291 | {
|
292 | return focus_distances[focus_distance_type];
|
293 | }
|
294 | else
|
295 | {
|
296 | LOGE("Invalid focus distances.");
|
297 | }
|
298 | }
|
299 | #endif
|
300 | return -1;
|
301 | }
|
302 |
|
303 | static int getModeNum(const char** modes, const int modes_num, const char* mode_name)
|
304 | {
|
305 | for (int i = 0; i < modes_num; i++){
|
306 | if(!strcmp(modes[i],mode_name))
|
307 | return i;
|
308 | }
|
309 | return -1;
|
310 | }
|
311 |
|
312 | public:
|
313 | CameraHandler(CameraCallback callback = 0, void* _userData = 0):
|
314 | cameraId(0),
|
315 | cameraCallback(callback),
|
316 | userData(_userData),
|
317 | emptyCameraCallbackReported(0)
|
318 | {
|
319 | LOGD("Instantiated new CameraHandler (%p, %p)", callback, _userData);
|
320 | void* params_buffer = operator new(sizeof(CameraParameters) + MAGIC_TAIL);
|
321 | params = new(params_buffer) CameraParameters();
|
322 | }
|
323 |
|
324 | virtual ~CameraHandler()
|
325 | {
|
326 | if (params)
|
327 | params->~CameraParameters();
|
328 | operator delete(params);
|
329 | LOGD("CameraHandler destructor is called");
|
330 | }
|
331 |
|
332 | virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2)
|
333 | {
|
334 | LOGE("CameraHandler::Notify: msgType=%d ext1=%d ext2=%d\n", msgType, ext1, ext2);
|
335 | |
336 | if ( msgType & CAMERA_MSG_FOCUS )
|
337 | LOGE("CameraHandler::Notify AutoFocus %s in %llu us\n", (ext1) ? "OK" : "FAIL", timevalDelay(&autofocus_start));
|
338 |
|
339 | if ( msgType & CAMERA_MSG_SHUTTER )
|
340 | LOGE("CameraHandler::Notify Shutter done in %llu us\n", timeval_delay(&picture_start));
|
341 | #endif
|
342 | }
|
343 |
|
344 | virtual void postData(int32_t msgType, const sp<IMemory>& dataPtr
|
345 | #if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) || defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) \
|
346 | || defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
|
347 | ,camera_frame_metadata_t*
|
348 | #endif
|
349 | )
|
350 | {
|
351 | debugShowFPS();
|
352 |
|
353 | if ( msgType & CAMERA_MSG_PREVIEW_FRAME )
|
354 | {
|
355 | doCall(dataPtr);
|
356 | return;
|
357 | }
|
358 |
|
359 |
|
360 |
|
361 |
|
362 | if ( msgType & CAMERA_MSG_RAW_IMAGE )
|
363 | LOGE("CameraHandler::postData Unexpected data format: RAW\n");
|
364 |
|
365 | if (msgType & CAMERA_MSG_POSTVIEW_FRAME)
|
366 | LOGE("CameraHandler::postData Unexpected data format: Postview frame\n");
|
367 |
|
368 | if (msgType & CAMERA_MSG_COMPRESSED_IMAGE )
|
369 | LOGE("CameraHandler::postData Unexpected data format: JPEG");
|
370 | }
|
371 |
|
372 | static CameraHandler* initCameraConnect(const CameraCallback& callback, int cameraId, void* userData, CameraParameters* prevCameraParameters);
|
373 | void closeCameraConnect();
|
374 | double getProperty(int propIdx);
|
375 | void setProperty(int propIdx, double value);
|
376 | static void applyProperties(CameraHandler** ppcameraHandler);
|
377 |
|
378 | std::string cameraPropertySupportedPreviewSizesString;
|
379 | std::string cameraPropertyPreviewFormatString;
|
380 | };
|
381 |
|
382 | const char* CameraHandler::flashModesNames[ANDROID_CAMERA_FLASH_MODES_NUM] =
|
383 | {
|
384 | CameraParameters::FLASH_MODE_AUTO,
|
385 | CameraParameters::FLASH_MODE_OFF,
|
386 | CameraParameters::FLASH_MODE_ON,
|
387 | CameraParameters::FLASH_MODE_RED_EYE,
|
388 | CameraParameters::FLASH_MODE_TORCH
|
389 | };
|
390 |
|
391 | const char* CameraHandler::focusModesNames[ANDROID_CAMERA_FOCUS_MODES_NUM] =
|
392 | {
|
393 | CameraParameters::FOCUS_MODE_AUTO,
|
394 | #if !defined(ANDROID_r2_2_0)
|
395 | CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO,
|
396 | #else
|
397 | CameraParameters::FOCUS_MODE_AUTO,
|
398 | #endif
|
399 | CameraParameters::FOCUS_MODE_EDOF,
|
400 | CameraParameters::FOCUS_MODE_FIXED,
|
401 | CameraParameters::FOCUS_MODE_INFINITY,
|
402 | CameraParameters::FOCUS_MODE_MACRO,
|
403 | #if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1)
|
404 | CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE
|
405 | #else
|
406 | CameraParameters::FOCUS_MODE_AUTO
|
407 | #endif
|
408 | };
|
409 |
|
410 | const char* CameraHandler::whiteBalanceModesNames[ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM] =
|
411 | {
|
412 | CameraParameters::WHITE_BALANCE_AUTO,
|
413 | CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT,
|
414 | CameraParameters::WHITE_BALANCE_DAYLIGHT,
|
415 | CameraParameters::WHITE_BALANCE_FLUORESCENT,
|
416 | CameraParameters::WHITE_BALANCE_INCANDESCENT,
|
417 | CameraParameters::WHITE_BALANCE_SHADE,
|
418 | CameraParameters::WHITE_BALANCE_TWILIGHT
|
419 | };
|
420 |
|
421 | const char* CameraHandler::antibandingModesNames[ANDROID_CAMERA_ANTIBANDING_MODES_NUM] =
|
422 | {
|
423 | CameraParameters::ANTIBANDING_50HZ,
|
424 | CameraParameters::ANTIBANDING_60HZ,
|
425 | CameraParameters::ANTIBANDING_AUTO
|
426 | };
|
427 |
|
428 |
|
429 | CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, int cameraId, void* userData, CameraParameters* prevCameraParameters)
|
430 | {
|
431 |
|
432 | typedef sp<Camera> (*Android22ConnectFuncType)();
|
433 | typedef sp<Camera> (*Android23ConnectFuncType)(int);
|
434 | typedef sp<Camera> (*Android3DConnectFuncType)(int, int);
|
435 | typedef sp<Camera> (*Android43ConnectFuncType)(int, const String16&, int);
|
436 |
|
437 | const int ANY_CAMERA_INDEX = -1;
|
438 | const int BACK_CAMERA_INDEX = 99;
|
439 | const int FRONT_CAMERA_INDEX = 98;
|
440 |
|
441 | enum {
|
442 | CAMERA_SUPPORT_MODE_2D = 0x01,
|
443 | CAMERA_SUPPORT_MODE_3D = 0x02,
|
444 | CAMERA_SUPPORT_MODE_NONZSL = 0x04,
|
445 | CAMERA_SUPPORT_MODE_ZSL = 0x08
|
446 | };
|
447 |
|
448 |
|
449 | enum {
|
450 | USE_CALLING_UID = -1
|
451 | };
|
452 |
|
453 | const char Android22ConnectName[] = "_ZN7android6Camera7connectEv";
|
454 | const char Android23ConnectName[] = "_ZN7android6Camera7connectEi";
|
455 | const char Android3DConnectName[] = "_ZN7android6Camera7connectEii";
|
456 | const char Android43ConnectName[] = "_ZN7android6Camera7connectEiRKNS_8String16Ei";
|
457 |
|
458 | int localCameraIndex = cameraId;
|
459 |
|
460 | if (cameraId == ANY_CAMERA_INDEX)
|
461 | {
|
462 | localCameraIndex = 0;
|
463 | }
|
464 | #if !defined(ANDROID_r2_2_0)
|
465 | else if (cameraId == BACK_CAMERA_INDEX)
|
466 | {
|
467 | LOGD("Back camera selected");
|
468 | for (int i = 0; i < Camera::getNumberOfCameras(); i++)
|
469 | {
|
470 | CameraInfo info;
|
471 | Camera::getCameraInfo(i, &info);
|
472 | if (info.facing == CAMERA_FACING_BACK)
|
473 | {
|
474 | localCameraIndex = i;
|
475 | break;
|
476 | }
|
477 | }
|
478 | }
|
479 | else if (cameraId == FRONT_CAMERA_INDEX)
|
480 | {
|
481 | LOGD("Front camera selected");
|
482 | for (int i = 0; i < Camera::getNumberOfCameras(); i++)
|
483 | {
|
484 | CameraInfo info;
|
485 | Camera::getCameraInfo(i, &info);
|
486 | if (info.facing == CAMERA_FACING_FRONT)
|
487 | {
|
488 | localCameraIndex = i;
|
489 | break;
|
490 | }
|
491 | }
|
492 | }
|
493 |
|
494 | if (localCameraIndex == BACK_CAMERA_INDEX)
|
495 | {
|
496 | LOGE("Back camera not found!");
|
497 | return NULL;
|
498 | }
|
499 | else if (localCameraIndex == FRONT_CAMERA_INDEX)
|
500 | {
|
501 | LOGE("Front camera not found!");
|
502 | return NULL;
|
503 | }
|
504 | #endif
|
505 |
|
506 | LOGD("Re-compiled Library - Check");
|
507 | LOGD("CameraHandler::initCameraConnect(%p, %d, %p, %p)", callback, localCameraIndex, userData, prevCameraParameters);
|
508 |
|
509 | sp<Camera> camera = 0;
|
510 |
|
511 | void* CameraHALHandle = dlopen("libcamera_client.so", RTLD_LAZY);
|
512 |
|
513 | if (!CameraHALHandle)
|
514 | {
|
515 | LOGE("Cannot link to \"libcamera_client.so\"");
|
516 | return NULL;
|
517 | }
|
518 |
|
519 |
|
520 | dlerror();
|
521 |
|
522 | if (Android22ConnectFuncType Android22Connect = (Android22ConnectFuncType)dlsym(CameraHALHandle, Android22ConnectName))
|
523 | {
|
524 | LOGD("Connecting to CameraService v 2.2");
|
525 | camera = Android22Connect();
|
526 | }
|
527 | else if (Android23ConnectFuncType Android23Connect = (Android23ConnectFuncType)dlsym(CameraHALHandle, Android23ConnectName))
|
528 | {
|
529 | LOGD("Connecting to CameraService v 2.3");
|
530 | camera = Android23Connect(localCameraIndex);
|
531 | }
|
532 | else if (Android3DConnectFuncType Android3DConnect = (Android3DConnectFuncType)dlsym(CameraHALHandle, Android3DConnectName))
|
533 | {
|
534 | LOGD("Connecting to CameraService v 3D");
|
535 | camera = Android3DConnect(localCameraIndex, CAMERA_SUPPORT_MODE_2D);
|
536 | }
|
537 | else if (Android43ConnectFuncType Android43Connect = (Android43ConnectFuncType)dlsym(CameraHALHandle, Android43ConnectName))
|
538 | {
|
539 | std::string currentProcName = getProcessName();
|
540 | LOGD("Current process name for camera init: %s", currentProcName.c_str());
|
541 | camera = Android43Connect(localCameraIndex, String16(currentProcName.c_str()), USE_CALLING_UID);
|
542 | }
|
543 | else
|
544 | {
|
545 | dlclose(CameraHALHandle);
|
546 | LOGE("Cannot connect to CameraService. Connect method was not found!");
|
547 | return NULL;
|
548 | }
|
549 |
|
550 | dlclose(CameraHALHandle);
|
551 |
|
552 | if ( 0 == camera.get() )
|
553 | {
|
554 | LOGE("initCameraConnect: Unable to connect to CameraService\n");
|
555 | return 0;
|
556 | }
|
557 |
|
558 | CameraHandler* handler = new CameraHandler(callback, userData);
|
559 | camera->setListener(handler);
|
560 |
|
561 | handler->camera = camera;
|
562 | handler->cameraId = localCameraIndex;
|
563 |
|
564 | if (prevCameraParameters != NULL)
|
565 | {
|
566 | LOGI("initCameraConnect: Setting paramers from previous camera handler");
|
567 | camera->setParameters(prevCameraParameters->flatten());
|
568 | handler->params->unflatten(prevCameraParameters->flatten());
|
569 | }
|
570 | else
|
571 | {
|
572 | android::String8 params_str = camera->getParameters();
|
573 | LOGI("initCameraConnect: [%s]", params_str.string());
|
574 |
|
575 | handler->params->unflatten(params_str);
|
576 |
|
577 | LOGD("Supported Cameras: %s", handler->params->get("camera-indexes"));
|
578 | LOGD("Supported Picture Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES));
|
579 | LOGD("Supported Picture Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS));
|
580 | LOGD("Supported Preview Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
|
581 | LOGD("Supported Preview Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS));
|
582 | LOGD("Supported Preview Frame Rates: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES));
|
583 | LOGD("Supported Thumbnail Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES));
|
584 | LOGD("Supported Whitebalance Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE));
|
585 | LOGD("Supported Effects: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_EFFECTS));
|
586 | LOGD("Supported Scene Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_SCENE_MODES));
|
587 | LOGD("Supported Focus Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
|
588 | LOGD("Supported Antibanding Options: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_ANTIBANDING));
|
589 | LOGD("Supported Flash Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FLASH_MODES));
|
590 |
|
591 | #if !defined(ANDROID_r2_2_0)
|
592 |
|
593 | const char* available_focus_modes = handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
|
594 | if (available_focus_modes != 0)
|
595 | {
|
596 | if (strstr(available_focus_modes, "continuous-video") != NULL)
|
597 | {
|
598 | handler->params->set(CameraParameters::KEY_FOCUS_MODE, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
|
599 |
|
600 | status_t resParams = handler->camera->setParameters(handler->params->flatten());
|
601 |
|
602 | if (resParams != 0)
|
603 | {
|
604 | LOGE("initCameraConnect: failed to set autofocus mode to \"continuous-video\"");
|
605 | }
|
606 | else
|
607 | {
|
608 | LOGD("initCameraConnect: autofocus is set to mode \"continuous-video\"");
|
609 | }
|
610 | }
|
611 | }
|
612 | #endif
|
613 |
|
614 |
|
615 | const char* available_formats = handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
|
616 | if (available_formats != 0)
|
617 | {
|
618 | const char* format_to_set = 0;
|
619 | const char* pos = available_formats;
|
620 | const char* ptr = pos;
|
621 | while(true)
|
622 | {
|
623 | while(*ptr != 0 && *ptr != ',') ++ptr;
|
624 | if (ptr != pos)
|
625 | {
|
626 | if (0 == strncmp(pos, "yuv420sp", ptr - pos))
|
627 | {
|
628 | format_to_set = "yuv420sp";
|
629 | break;
|
630 | }
|
631 | if (0 == strncmp(pos, "yvu420sp", ptr - pos))
|
632 | format_to_set = "yvu420sp";
|
633 | }
|
634 | if (*ptr == 0)
|
635 | break;
|
636 | pos = ++ptr;
|
637 | }
|
638 |
|
639 | if (0 != format_to_set)
|
640 | {
|
641 | handler->params->setPreviewFormat(format_to_set);
|
642 |
|
643 | status_t resParams = handler->camera->setParameters(handler->params->flatten());
|
644 |
|
645 | if (resParams != 0)
|
646 | LOGE("initCameraConnect: failed to set preview format to %s", format_to_set);
|
647 | else
|
648 | LOGD("initCameraConnect: preview format is set to %s", format_to_set);
|
649 | }
|
650 | }
|
651 |
|
652 | handler->params->setPreviewSize(640, 360);
|
653 | status_t resParams = handler->camera->setParameters(handler->params->flatten());
|
654 | if (resParams != 0)
|
655 | LOGE("initCameraConnect: failed to set preview resolution to 640x360");
|
656 | else
|
657 | LOGD("initCameraConnect: preview format is set to 640x360");
|
658 | LOGD("DEBUG: Step0");
|
659 | }
|
660 |
|
661 | LOGD("DEBUG: Step1");
|
662 | status_t bufferStatus;
|
663 | #if defined(ANDROID_r2_2_0)
|
664 | bufferStatus = camera->setPreviewDisplay(sp<ISurface>(0 ));
|
665 | if (bufferStatus != 0)
|
666 | LOGE("initCameraConnect: failed setPreviewDisplay(0) call (status %d); camera might not work correctly on some devices", bufferStatus);
|
667 | #elif defined(ANDROID_r2_3_3)
|
668 |
|
669 | #elif defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
|
670 | void* surface_texture_obj = operator new(sizeof(SurfaceTexture) + MAGIC_TAIL);
|
671 | handler->surface = new(surface_texture_obj) SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
|
672 | bufferStatus = camera->setPreviewTexture(handler->surface);
|
673 | if (bufferStatus != 0)
|
674 | LOGE("initCameraConnect: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus);
|
675 | #elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
|
676 | void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
677 | handler->queue = new(buffer_queue_obj) BufferQueue();
|
678 | void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL);
|
679 | handler->listener = new(consumer_listener_obj) ConsumerListenerStub();
|
680 | handler->queue->consumerConnect(handler->listener);
|
681 | bufferStatus = camera->setPreviewTexture(handler->queue);
|
682 | if (bufferStatus != 0)
|
683 | LOGE("initCameraConnect: failed setPreviewTexture call; camera might not work correctly");
|
684 | # elif defined(ANDROID_r4_4_0)
|
685 | LOGD("DEBUG: Step2");
|
686 | void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
687 | LOGD("DEBUG: Step3");
|
688 | handler->queue = new(buffer_queue_obj) BufferQueue();
|
689 | LOGD("DEBUG: Step4");
|
690 | void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL);
|
691 | LOGD("DEBUG: Step5");
|
692 | handler->listener = new(consumer_listener_obj) ConsumerListenerStub();
|
693 | LOGD("DEBUG: Step6");
|
694 | handler->queue->consumerConnect(handler->listener, true);
|
695 | LOGD("DEBUG: Step7");
|
696 | bufferStatus = handler->camera->setPreviewTarget(handler->queue);
|
697 | LOGD("DEBUG: Step8");
|
698 | if (bufferStatus != 0)
|
699 | LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
|
700 | # endif
|
701 | LOGD("DEBUG: Step9");
|
702 |
|
703 | #if (defined(ANDROID_r2_2_0) || defined(ANDROID_r2_3_3) || defined(ANDROID_r3_0_1))
|
704 | # if 1
|
705 |
|
706 |
|
707 | camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK | FRAME_CALLBACK_FLAG_COPY_OUT_MASK);
|
708 | # else
|
709 | camera->setPreviewCallbackFlags( FRAME_CALLBACK_FLAG_ENABLE_MASK );
|
710 | # endif
|
711 | #else
|
712 | camera->setPreviewCallbackFlags( CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK | CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK);
|
713 | #endif
|
714 |
|
715 | LOGD("Starting preview");
|
716 | status_t previewStatus = camera->startPreview();
|
717 |
|
718 | if (previewStatus != 0)
|
719 | {
|
720 | LOGE("initCameraConnect: startPreview() fails. Closing camera connection...");
|
721 | handler->closeCameraConnect();
|
722 | handler = 0;
|
723 | }
|
724 | else
|
725 | {
|
726 | LOGD("Preview started successfully");
|
727 | }
|
728 |
|
729 | return handler;
|
730 | }
|
731 |
|
732 | void CameraHandler::closeCameraConnect()
|
733 | {
|
734 | if (camera == NULL)
|
735 | {
|
736 | LOGI("... camera is already NULL");
|
737 | return;
|
738 | }
|
739 |
|
740 | camera->stopPreview();
|
741 | #if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) || defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) \
|
742 | || defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
|
743 | camera->setPreviewCallbackFlags(CAMERA_FRAME_CALLBACK_FLAG_NOOP);
|
744 | #endif
|
745 | camera->disconnect();
|
746 | camera.clear();
|
747 | camera=NULL;
|
748 |
|
749 |
|
750 |
|
751 |
|
752 |
|
753 |
|
754 |
|
755 |
|
756 |
|
757 |
|
758 |
|
759 |
|
760 |
|
761 |
|
762 |
|
763 |
|
764 |
|
765 |
|
766 |
|
767 |
|
768 | }
|
769 |
|
770 | double CameraHandler::getProperty(int propIdx)
|
771 | {
|
772 | LOGD("CameraHandler::getProperty(%d)", propIdx);
|
773 |
|
774 | switch (propIdx)
|
775 | {
|
776 | case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
|
777 | {
|
778 | int w,h;
|
779 | params->getPreviewSize(&w, &h);
|
780 | return w;
|
781 | }
|
782 | case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
|
783 | {
|
784 | int w,h;
|
785 | params->getPreviewSize(&w, &h);
|
786 | return h;
|
787 | }
|
788 | case ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING:
|
789 | {
|
790 | cameraPropertySupportedPreviewSizesString = params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
|
791 | union {const char* str;double res;} u;
|
792 | memset(&u.res, 0, sizeof(u.res));
|
793 | u.str = cameraPropertySupportedPreviewSizesString.c_str();
|
794 | return u.res;
|
795 | }
|
796 | case ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING:
|
797 | {
|
798 | const char* fmt = params->get(CameraParameters::KEY_PREVIEW_FORMAT);
|
799 | if (fmt == CameraParameters::PIXEL_FORMAT_YUV422SP)
|
800 | fmt = "yuv422sp";
|
801 | else if (fmt == CameraParameters::PIXEL_FORMAT_YUV420SP)
|
802 | fmt = "yuv420sp";
|
803 | else if (fmt == CameraParameters::PIXEL_FORMAT_YUV422I)
|
804 | fmt = "yuv422i";
|
805 | else if (fmt == CameraParameters::PIXEL_FORMAT_RGB565)
|
806 | fmt = "rgb565";
|
807 | else if (fmt == CameraParameters::PIXEL_FORMAT_JPEG)
|
808 | fmt = "jpeg";
|
809 | cameraPropertyPreviewFormatString = fmt;
|
810 |
|
811 | union {const char* str;double res;} u;
|
812 | memset(&u.res, 0, sizeof(u.res));
|
813 | u.str = cameraPropertyPreviewFormatString.c_str();
|
814 | return u.res;
|
815 | }
|
816 | case ANDROID_CAMERA_PROPERTY_EXPOSURE:
|
817 | {
|
818 | int exposure = params->getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
|
819 | return exposure;
|
820 | }
|
821 | case ANDROID_CAMERA_PROPERTY_FPS:
|
822 | {
|
823 | return params->getPreviewFrameRate();
|
824 | }
|
825 | case ANDROID_CAMERA_PROPERTY_FLASH_MODE:
|
826 | {
|
827 | int flash_mode = getModeNum(CameraHandler::flashModesNames,
|
828 | ANDROID_CAMERA_FLASH_MODES_NUM,
|
829 | params->get(CameraParameters::KEY_FLASH_MODE));
|
830 | return flash_mode;
|
831 | }
|
832 | case ANDROID_CAMERA_PROPERTY_FOCUS_MODE:
|
833 | {
|
834 | int focus_mode = getModeNum(CameraHandler::focusModesNames,
|
835 | ANDROID_CAMERA_FOCUS_MODES_NUM,
|
836 | params->get(CameraParameters::KEY_FOCUS_MODE));
|
837 | return focus_mode;
|
838 | }
|
839 | case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE:
|
840 | {
|
841 | int white_balance = getModeNum(CameraHandler::whiteBalanceModesNames,
|
842 | ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM,
|
843 | params->get(CameraParameters::KEY_WHITE_BALANCE));
|
844 | return white_balance;
|
845 | }
|
846 | case ANDROID_CAMERA_PROPERTY_ANTIBANDING:
|
847 | {
|
848 | int antibanding = getModeNum(CameraHandler::antibandingModesNames,
|
849 | ANDROID_CAMERA_ANTIBANDING_MODES_NUM,
|
850 | params->get(CameraParameters::KEY_ANTIBANDING));
|
851 | return antibanding;
|
852 | }
|
853 | case ANDROID_CAMERA_PROPERTY_FOCAL_LENGTH:
|
854 | {
|
855 | float focal_length = params->getFloat(CameraParameters::KEY_FOCAL_LENGTH);
|
856 | return focal_length;
|
857 | }
|
858 | case ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_NEAR:
|
859 | {
|
860 | return getFocusDistance(ANDROID_CAMERA_FOCUS_DISTANCE_NEAR_INDEX);
|
861 | }
|
862 | case ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_OPTIMAL:
|
863 | {
|
864 | return getFocusDistance(ANDROID_CAMERA_FOCUS_DISTANCE_OPTIMAL_INDEX);
|
865 | }
|
866 | case ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_FAR:
|
867 | {
|
868 | return getFocusDistance(ANDROID_CAMERA_FOCUS_DISTANCE_FAR_INDEX);
|
869 | }
|
870 | #if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1)
|
871 | case ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK:
|
872 | {
|
873 | const char* status = params->get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
|
874 | if (status == CameraParameters::TRUE)
|
875 | return 1.;
|
876 | else
|
877 | return 0.;
|
878 | }
|
879 | case ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK:
|
880 | {
|
881 | const char* status = params->get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
|
882 | if (status == CameraParameters::TRUE)
|
883 | return 1.;
|
884 | else
|
885 | return 0.;
|
886 | }
|
887 | #endif
|
888 | default:
|
889 | LOGW("CameraHandler::getProperty - Unsupported property.");
|
890 | };
|
891 | return -1;
|
892 | }
|
893 |
|
894 | void CameraHandler::setProperty(int propIdx, double value)
|
895 | {
|
896 | LOGD("CameraHandler::setProperty(%d, %f)", propIdx, value);
|
897 |
|
898 | android::String8 params_str;
|
899 | params_str = camera->getParameters();
|
900 | LOGI("Params before set: [%s]", params_str.string());
|
901 |
|
902 | switch (propIdx)
|
903 | {
|
904 | case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
|
905 | {
|
906 | int w,h;
|
907 | params->getPreviewSize(&w, &h);
|
908 | width = (int)value;
|
909 | }
|
910 | break;
|
911 | case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
|
912 | {
|
913 | int w,h;
|
914 | params->getPreviewSize(&w, &h);
|
915 | height = (int)value;
|
916 | }
|
917 | break;
|
918 | case ANDROID_CAMERA_PROPERTY_EXPOSURE:
|
919 | {
|
920 | int max_exposure = params->getInt("max-exposure-compensation");
|
921 | int min_exposure = params->getInt("min-exposure-compensation");
|
922 | if(max_exposure && min_exposure)
|
923 | {
|
924 | int exposure = (int)value;
|
925 | if(exposure >= min_exposure && exposure <= max_exposure)
|
926 | params->set("exposure-compensation", exposure);
|
927 | else
|
928 | LOGE("Exposure compensation not in valid range (%i,%i).", min_exposure, max_exposure);
|
929 | } else
|
930 | LOGE("Exposure compensation adjust is not supported.");
|
931 |
|
932 | camera->setParameters(params->flatten());
|
933 | }
|
934 | break;
|
935 | case ANDROID_CAMERA_PROPERTY_FLASH_MODE:
|
936 | {
|
937 | int new_val = (int)value;
|
938 | if(new_val >= 0 && new_val < ANDROID_CAMERA_FLASH_MODES_NUM)
|
939 | {
|
940 | const char* mode_name = flashModesNames[new_val];
|
941 | if(is_supported(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mode_name))
|
942 | params->set(CameraParameters::KEY_FLASH_MODE, mode_name);
|
943 | else
|
944 | LOGE("Flash mode %s is not supported.", mode_name);
|
945 | }
|
946 | else
|
947 | LOGE("Flash mode value not in valid range.");
|
948 |
|
949 | camera->setParameters(params->flatten());
|
950 | }
|
951 | break;
|
952 | case ANDROID_CAMERA_PROPERTY_FOCUS_MODE:
|
953 | {
|
954 | int new_val = (int)value;
|
955 | if(new_val >= 0 && new_val < ANDROID_CAMERA_FOCUS_MODES_NUM)
|
956 | {
|
957 | const char* mode_name = focusModesNames[new_val];
|
958 | if(is_supported(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mode_name))
|
959 | params->set(CameraParameters::KEY_FOCUS_MODE, mode_name);
|
960 | else
|
961 | LOGE("Focus mode %s is not supported.", mode_name);
|
962 | }
|
963 | else
|
964 | LOGE("Focus mode value not in valid range.");
|
965 |
|
966 | camera->setParameters(params->flatten());
|
967 | }
|
968 | break;
|
969 | case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE:
|
970 | {
|
971 | int new_val = (int)value;
|
972 | if(new_val >= 0 && new_val < ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM)
|
973 | {
|
974 | const char* mode_name = whiteBalanceModesNames[new_val];
|
975 | if(is_supported(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mode_name))
|
976 | params->set(CameraParameters::KEY_WHITE_BALANCE, mode_name);
|
977 | else
|
978 | LOGE("White balance mode %s is not supported.", mode_name);
|
979 | }
|
980 | else
|
981 | LOGE("White balance mode value not in valid range.");
|
982 |
|
983 | camera->setParameters(params->flatten());
|
984 | }
|
985 | break;
|
986 | case ANDROID_CAMERA_PROPERTY_ANTIBANDING:
|
987 | {
|
988 | int new_val = (int)value;
|
989 | if(new_val >= 0 && new_val < ANDROID_CAMERA_ANTIBANDING_MODES_NUM)
|
990 | {
|
991 | const char* mode_name = antibandingModesNames[new_val];
|
992 | if(is_supported(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mode_name))
|
993 | params->set(CameraParameters::KEY_ANTIBANDING, mode_name);
|
994 | else
|
995 | LOGE("Antibanding mode %s is not supported.", mode_name);
|
996 | }
|
997 | else
|
998 | LOGE("Antibanding mode value not in valid range.");
|
999 |
|
1000 | camera->setParameters(params->flatten());
|
1001 | }
|
1002 | break;
|
1003 | #if !defined(ANDROID_r2_2_0) && !defined(ANDROID_r2_3_3) && !defined(ANDROID_r3_0_1)
|
1004 | case ANDROID_CAMERA_PROPERTY_EXPOSE_LOCK:
|
1005 | {
|
1006 | if (is_supported(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true"))
|
1007 | {
|
1008 | if (value != 0)
|
1009 | params->set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, CameraParameters::TRUE);
|
1010 | else
|
1011 | params->set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, CameraParameters::FALSE);
|
1012 | LOGE("Expose lock is set");
|
1013 | }
|
1014 | else
|
1015 | LOGE("Expose lock is not supported");
|
1016 |
|
1017 | camera->setParameters(params->flatten());
|
1018 | }
|
1019 | break;
|
1020 | case ANDROID_CAMERA_PROPERTY_WHITEBALANCE_LOCK:
|
1021 | {
|
1022 | if (is_supported(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true"))
|
1023 | {
|
1024 | if (value != 0)
|
1025 | params->set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, CameraParameters::TRUE);
|
1026 | else
|
1027 | params->set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, CameraParameters::FALSE);
|
1028 | LOGE("White balance lock is set");
|
1029 | }
|
1030 | else
|
1031 | LOGE("White balance lock is not supported");
|
1032 |
|
1033 | camera->setParameters(params->flatten());
|
1034 | }
|
1035 | break;
|
1036 | #endif
|
1037 | default:
|
1038 | LOGW("CameraHandler::setProperty - Unsupported property.");
|
1039 | };
|
1040 |
|
1041 | params_str = camera->getParameters();
|
1042 | LOGI("Params after set: [%s]", params_str.string());
|
1043 | }
|
1044 |
|
1045 | void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
|
1046 | {
|
1047 | LOGD("CameraHandler::applyProperties()");
|
1048 |
|
1049 | if (ppcameraHandler == 0)
|
1050 | {
|
1051 | LOGE("applyProperties: Passed NULL ppcameraHandler");
|
1052 | return;
|
1053 | }
|
1054 |
|
1055 | if (*ppcameraHandler == 0)
|
1056 | {
|
1057 | LOGE("applyProperties: Passed NULL *ppcameraHandler");
|
1058 | return;
|
1059 | }
|
1060 |
|
1061 |
|
1062 |
|
1063 | if (((*ppcameraHandler)->width != 0) && ((*ppcameraHandler)->height != 0))
|
1064 | (*ppcameraHandler)->params->setPreviewSize((*ppcameraHandler)->width, (*ppcameraHandler)->height);
|
1065 |
|
1066 | #if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) || defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) \
|
1067 | || defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
|
1068 | CameraHandler* handler=*ppcameraHandler;
|
1069 |
|
1070 | handler->camera->stopPreview();
|
1071 | handler->camera->setPreviewCallbackFlags(CAMERA_FRAME_CALLBACK_FLAG_NOOP);
|
1072 |
|
1073 | status_t reconnectStatus = handler->camera->reconnect();
|
1074 | if (reconnectStatus != 0)
|
1075 | {
|
1076 | LOGE("applyProperties: failed to reconnect camera (status %d)", reconnectStatus);
|
1077 | return;
|
1078 | }
|
1079 |
|
1080 | handler->camera->setParameters((*ppcameraHandler)->params->flatten());
|
1081 |
|
1082 | status_t bufferStatus;
|
1083 | # if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
|
1084 | void* surface_texture_obj = operator new(sizeof(SurfaceTexture) + MAGIC_TAIL);
|
1085 | handler->surface = new(surface_texture_obj) SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
|
1086 | bufferStatus = handler->camera->setPreviewTexture(handler->surface);
|
1087 | if (bufferStatus != 0)
|
1088 | LOGE("applyProperties: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus);
|
1089 | # elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
|
1090 | void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
1091 | handler->queue = new(buffer_queue_obj) BufferQueue();
|
1092 | handler->queue->consumerConnect(handler->listener);
|
1093 | bufferStatus = handler->camera->setPreviewTexture(handler->queue);
|
1094 | if (bufferStatus != 0)
|
1095 | LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
|
1096 | # elif defined(ANDROID_r4_4_0)
|
1097 | void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
|
1098 | handler->queue = new(buffer_queue_obj) BufferQueue();
|
1099 | handler->queue->consumerConnect(handler->listener, true);
|
1100 | bufferStatus = handler->camera->setPreviewTarget(handler->queue);
|
1101 | if (bufferStatus != 0)
|
1102 | LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
|
1103 | # endif
|
1104 |
|
1105 | handler->camera->setPreviewCallbackFlags( CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK | CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK);
|
1106 |
|
1107 | LOGD("Starting preview");
|
1108 | status_t previewStatus = handler->camera->startPreview();
|
1109 |
|
1110 | if (previewStatus != 0)
|
1111 | {
|
1112 | LOGE("initCameraConnect: startPreview() fails. Closing camera connection...");
|
1113 | handler->closeCameraConnect();
|
1114 | handler = NULL;
|
1115 | }
|
1116 | else
|
1117 | {
|
1118 | LOGD("Preview started successfully");
|
1119 | }
|
1120 | #else
|
1121 | CameraHandler* previousCameraHandler=*ppcameraHandler;
|
1122 | CameraCallback cameraCallback=previousCameraHandler->cameraCallback;
|
1123 | void* userData=previousCameraHandler->userData;
|
1124 | int cameraId=previousCameraHandler->cameraId;
|
1125 |
|
1126 | LOGD("CameraHandler::applyProperties(): before previousCameraHandler->closeCameraConnect");
|
1127 | previousCameraHandler->closeCameraConnect();
|
1128 | LOGD("CameraHandler::applyProperties(): after previousCameraHandler->closeCameraConnect");
|
1129 |
|
1130 | LOGD("CameraHandler::applyProperties(): before initCameraConnect");
|
1131 | CameraHandler* handler=initCameraConnect(cameraCallback, cameraId, userData, (*ppcameraHandler)->params);
|
1132 | LOGD("CameraHandler::applyProperties(): after initCameraConnect, handler=0x%x", (int)handler);
|
1133 | if (handler == NULL) {
|
1134 | LOGE("ERROR in applyProperties --- cannot reinit camera");
|
1135 | handler=initCameraConnect(cameraCallback, cameraId, userData, NULL);
|
1136 | LOGD("CameraHandler::applyProperties(): repeate initCameraConnect after ERROR, handler=0x%x", (int)handler);
|
1137 | if (handler == NULL) {
|
1138 | LOGE("ERROR in applyProperties --- cannot reinit camera AGAIN --- cannot do anything else");
|
1139 | }
|
1140 | }
|
1141 | (*ppcameraHandler)=handler;
|
1142 | #endif
|
1143 | }
|
1144 |
|
1145 |
|
1146 | extern "C" {
|
1147 |
|
1148 | void* initCameraConnectC(void* callback, int cameraId, void* userData)
|
1149 | {
|
1150 | return CameraHandler::initCameraConnect((CameraCallback)callback, cameraId, userData, NULL);
|
1151 | }
|
1152 |
|
1153 | void closeCameraConnectC(void** camera)
|
1154 | {
|
1155 | CameraHandler** cc = (CameraHandler**)camera;
|
1156 | (*cc)->closeCameraConnect();
|
1157 | *cc = 0;
|
1158 | }
|
1159 |
|
1160 | double getCameraPropertyC(void* camera, int propIdx)
|
1161 | {
|
1162 | return ((CameraHandler*)camera)->getProperty(propIdx);
|
1163 | }
|
1164 |
|
1165 | void setCameraPropertyC(void* camera, int propIdx, double value)
|
1166 | {
|
1167 | ((CameraHandler*)camera)->setProperty(propIdx,value);
|
1168 | }
|
1169 |
|
1170 | void applyCameraPropertiesC(void** camera)
|
1171 | {
|
1172 | CameraHandler::applyProperties((CameraHandler**)camera);
|
1173 | }
|
1174 |
|
1175 | }
|