summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xAndroid.mk1
-rw-r--r--CameraHal/Android.mk369
-rw-r--r--CameraHal/Android.mk~369
-rwxr-xr-xCameraHal/AppMsgNotifier.cpp2335
-rwxr-xr-xCameraHal/CameraAdapter.cpp876
-rw-r--r--CameraHal/CameraBuffer.cpp274
-rw-r--r--CameraHal/CameraGL.h4
-rwxr-xr-xCameraHal/CameraHal.cpp1376
-rwxr-xr-xCameraHal/CameraHal.h1984
-rwxr-xr-xCameraHal/CameraHalUtil.cpp1697
-rwxr-xr-xCameraHal/CameraHalUtil.cpp~1697
-rwxr-xr-xCameraHal/CameraHal_MediaProfile.cpp852
-rw-r--r--CameraHal/CameraHal_Mem.cpp1583
-rw-r--r--CameraHal/CameraHal_Mem.h238
-rwxr-xr-xCameraHal/CameraHal_Module.cpp1532
-rwxr-xr-xCameraHal/CameraHal_Module.h49
-rwxr-xr-xCameraHal/CameraHal_Tracer.c26
-rwxr-xr-xCameraHal/CameraHal_Tracer.h57
-rwxr-xr-xCameraHal/CameraHal_board_xml_parse.cpp2515
-rw-r--r--CameraHal/CameraHal_board_xml_parse.h525
-rwxr-xr-xCameraHal/CameraIspAdapter.cpp3372
-rw-r--r--CameraHal/CameraIspAdapter.h271
-rwxr-xr-xCameraHal/CameraIspSOCAdapter.cpp424
-rw-r--r--CameraHal/CameraIspTunning.cpp775
-rw-r--r--CameraHal/CameraIspTunning.h124
-rwxr-xr-xCameraHal/CameraSocAdapter.cpp1279
-rwxr-xr-xCameraHal/CameraUSBAdapter.cpp1041
-rwxr-xr-xCameraHal/DisplayAdapter.cpp872
-rwxr-xr-xCameraHal/FaceDetector.h78
-rw-r--r--CameraHal/FakeCameraAdapter.cpp336
-rw-r--r--CameraHal/FakeCameraAdapter.h43
-rwxr-xr-xCameraHal/Jpeg_soft_encode.cpp177
-rw-r--r--CameraHal/MessageQueue.cpp247
-rwxr-xr-xCameraHal/MessageQueue.h38
-rw-r--r--CameraHal/MutliFrameDenoise.h49
-rwxr-xr-xCameraHal/Semaphore.cpp231
-rwxr-xr-xCameraHal/Semaphore.h59
-rwxr-xr-xCameraHal/SensorListener.cpp249
-rwxr-xr-xCameraHal/SensorListener.h101
-rw-r--r--CameraHal/camera_mem.cpp42
-rw-r--r--CameraHal/camera_mem.h73
-rw-r--r--CameraHal/camera_mem_gralloc.cpp532
-rw-r--r--CameraHal/common_type.h25
-rw-r--r--CameraHal/lib/arm/libMFDenoise.sobin0 -> 526048 bytes
-rw-r--r--CameraHal/lib/arm/libMFDenoise_android7.1.sobin0 -> 513720 bytes
-rw-r--r--CameraHal/lib/arm/libcameragl.sobin0 -> 108020 bytes
-rw-r--r--CameraHal/lib/arm/libcameragl_android7.1.sobin0 -> 185928 bytes
-rw-r--r--CameraHal/lib/arm/libopencv_java3.sobin0 -> 8620732 bytes
-rw-r--r--CameraHal/lib/arm/librkwdr.sobin0 -> 22144 bytes
-rw-r--r--CameraHal/lib/arm64/libMFDenoise.sobin0 -> 969072 bytes
-rw-r--r--CameraHal/lib/arm64/libMFDenoise_android7.1.sobin0 -> 911504 bytes
-rw-r--r--CameraHal/lib/arm64/libcameragl.sobin0 -> 161552 bytes
-rw-r--r--CameraHal/lib/arm64/libcameragl_android7.1.sobin0 -> 284584 bytes
-rw-r--r--CameraHal/lib/arm64/libopencv_java3.sobin0 -> 14793584 bytes
-rw-r--r--CameraHal/lib/arm64/librkwdr.sobin0 -> 26432 bytes
-rw-r--r--Config/Android.mk103
-rw-r--r--Config/cam_board_rk3288.xml1063
-rw-r--r--Config/cam_board_rk3366.xml916
-rw-r--r--Config/cam_board_rk3368.xml1056
-rw-r--r--Config/cam_board_rk3399.xml660
-rw-r--r--Config/libisp_silicomimageisp_api_5x_32bit.sobin0 -> 1163844 bytes
-rw-r--r--Config/libisp_silicomimageisp_api_5x_64bit.sobin0 -> 1624784 bytes
-rw-r--r--Config/libisp_silicomimageisp_api_6x_32bit.sobin0 -> 1289012 bytes
-rw-r--r--Config/libisp_silicomimageisp_api_6x_64bit.sobin0 -> 1902400 bytes
-rw-r--r--Config/libisp_silicomimageisp_api_7x_32bit.sobin0 -> 1241560 bytes
-rw-r--r--Config/libisp_silicomimageisp_api_7x_64bit.sobin0 -> 1689088 bytes
-rw-r--r--Config/rk32xx_camera.mk219
-rwxr-xr-xConfig/user.mk99
-rw-r--r--SiliconImage/Android.mk1
-rwxr-xr-xSiliconImage/include/adpcc/adpcc.h323
-rwxr-xr-xSiliconImage/include/adpf/adpf.h442
-rwxr-xr-xSiliconImage/include/aec/aec.h634
-rwxr-xr-xSiliconImage/include/af/af.h443
-rwxr-xr-xSiliconImage/include/avs/avs.h440
-rwxr-xr-xSiliconImage/include/awb/awb.h714
-rwxr-xr-xSiliconImage/include/bufferpool/media_buffer.h276
-rwxr-xr-xSiliconImage/include/bufferpool/media_buffer_pool.h358
-rwxr-xr-xSiliconImage/include/bufferpool/media_buffer_queue.h524
-rwxr-xr-xSiliconImage/include/bufferpool/media_buffer_queue_ex.h650
-rwxr-xr-xSiliconImage/include/bufsync_ctrl/bufsync_ctrl_api.h194
-rwxr-xr-xSiliconImage/include/bufsync_ctrl/bufsync_ctrl_common.h115
-rwxr-xr-xSiliconImage/include/cam_calibdb/cam_calibdb_api.h901
-rwxr-xr-xSiliconImage/include/cam_calibdb/cam_calibdb_common.h131
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_aaa_api.h1018
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_api.h601
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_common.h370
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_cproc_api.h287
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_drv_api.h90
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_imgeffects_api.h118
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_isp_api.h907
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_jpe_api.h89
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_mi_api.h161
-rwxr-xr-xSiliconImage/include/cam_engine/cam_engine_simp_api.h127
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_cproc_drv_api.h431
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_drv_api.h792
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_drv_common.h147
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_dual_cropping_drv_api.h122
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_ie_drv_api.h393
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_afm_drv_api.h359
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_awb_drv_api.h308
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_bls_drv_api.h364
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_cac_drv_api.h247
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_cnr_drv_api.h189
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_degamma_drv_api.h187
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_dpcc_drv_api.h251
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_dpf_drv_api.h327
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_drv_api.h1118
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_elawb_drv_api.h312
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_exp_drv_api.h286
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_flash_drv_api.h77
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_flt_drv_api.h163
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_hist_drv_api.h325
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_is_drv_api.h379
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_lsc_drv_api.h342
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_vsm_drv_api.h285
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_isp_wdr_drv_api.h264
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_jpe_drv_api.h348
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_mi_drv_api.h467
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_mipi_drv_api.h277
-rwxr-xr-xSiliconImage/include/cameric_drv/cameric_simp_drv_api.h207
-rwxr-xr-xSiliconImage/include/cameric_drv/mainpage.h179
-rwxr-xr-xSiliconImage/include/cameric_reg_drv/cameric_reg_description.h276
-rw-r--r--SiliconImage/include/camsys_head.h276
-rwxr-xr-xSiliconImage/include/common/align.h28
-rwxr-xr-xSiliconImage/include/common/array_size.h22
-rwxr-xr-xSiliconImage/include/common/cam_types.h1170
-rwxr-xr-xSiliconImage/include/common/cea_861.h84
-rwxr-xr-xSiliconImage/include/common/hdmi_3d.h87
-rwxr-xr-xSiliconImage/include/common/list.h462
-rwxr-xr-xSiliconImage/include/common/mipi.h150
-rwxr-xr-xSiliconImage/include/common/misc.h50
-rwxr-xr-xSiliconImage/include/common/picture_buffer.h217
-rwxr-xr-xSiliconImage/include/common/return_codes.h46
-rwxr-xr-xSiliconImage/include/common/utl_fixfloat.h82
-rw-r--r--SiliconImage/include/common_head.h29
-rwxr-xr-xSiliconImage/include/dom_ctrl/dom_ctrl_api.h148
-rwxr-xr-xSiliconImage/include/dom_ctrl/dom_ctrl_common.h97
-rwxr-xr-xSiliconImage/include/ebase/builtins.h40
-rwxr-xr-xSiliconImage/include/ebase/dct_assert.h100
-rwxr-xr-xSiliconImage/include/ebase/ext_types.h10
-rwxr-xr-xSiliconImage/include/ebase/hashtable.h313
-rwxr-xr-xSiliconImage/include/ebase/linux_compat.h44
-rwxr-xr-xSiliconImage/include/ebase/list.h351
-rwxr-xr-xSiliconImage/include/ebase/mainpage.h41
-rwxr-xr-xSiliconImage/include/ebase/queue.h106
-rwxr-xr-xSiliconImage/include/ebase/return_types.h93
-rwxr-xr-xSiliconImage/include/ebase/slist.h347
-rwxr-xr-xSiliconImage/include/ebase/sort.frag.h187
-rwxr-xr-xSiliconImage/include/ebase/trace.h272
-rwxr-xr-xSiliconImage/include/ebase/types.h109
-rwxr-xr-xSiliconImage/include/exa_ctrl/exa_ctrl_api.h107
-rwxr-xr-xSiliconImage/include/exa_ctrl/exa_ctrl_common.h107
-rwxr-xr-xSiliconImage/include/hal/hal_altera_pci.h208
-rw-r--r--SiliconImage/include/hal/hal_api.h702
-rwxr-xr-xSiliconImage/include/hal/hal_cosim.h650
-rw-r--r--SiliconImage/include/hal/hal_mockup.h175
-rwxr-xr-xSiliconImage/include/i2c_drv/i2c_drv.h256
-rwxr-xr-xSiliconImage/include/ibd/ibd_api.h177
-rwxr-xr-xSiliconImage/include/ibd/ibd_common.h189
-rw-r--r--SiliconImage/include/isi/isi.h1372
-rw-r--r--SiliconImage/include/isi/isi_common.h948
-rw-r--r--SiliconImage/include/isi/isi_iss.h321
-rw-r--r--SiliconImage/include/isp_cam_api/calib_xml/calibdb.h143
-rwxr-xr-xSiliconImage/include/isp_cam_api/calib_xml/calibtreewidget.h115
-rw-r--r--SiliconImage/include/isp_cam_api/cam_api/cam_engine_interface.h710
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/camdevice.h113
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/dom_ctrl_interface.h79
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/dom_ctrl_vidplay_api.h97
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/exa_ctrl_interface.h79
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/halholder.h74
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/impexinfo.h149
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/mapbuffer.h281
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/mapcaps.h75
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/readpgmraw.h64
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/som_ctrl_interface.h84
-rwxr-xr-xSiliconImage/include/isp_cam_api/cam_api/vom_ctrl_interface.h79
-rwxr-xr-xSiliconImage/include/mim_ctrl/mim_ctrl_api.h194
-rwxr-xr-xSiliconImage/include/mim_ctrl/mim_ctrl_common.h56
-rwxr-xr-xSiliconImage/include/mipi_drv/mipi_drv_api.h145
-rwxr-xr-xSiliconImage/include/mipi_drv/mipi_drv_common.h94
-rwxr-xr-xSiliconImage/include/mom_ctrl/mom_ctrl_api.h284
-rwxr-xr-xSiliconImage/include/mom_ctrl/mom_ctrl_common.h141
-rwxr-xr-xSiliconImage/include/oslayer/oslayer.h1022
-rwxr-xr-xSiliconImage/include/oslayer/oslayer_linux.h159
-rwxr-xr-xSiliconImage/include/oslayer/oslayer_mfs.h131
-rwxr-xr-xSiliconImage/include/oslayer/oslayer_systemc.h88
-rwxr-xr-xSiliconImage/include/oslayer/oslayer_ucosii.h89
-rwxr-xr-xSiliconImage/include/oslayer/oslayer_win32.h119
-rwxr-xr-xSiliconImage/include/som_ctrl/som_ctrl_api.h94
-rwxr-xr-xSiliconImage/include/som_ctrl/som_ctrl_common.h119
-rwxr-xr-xSiliconImage/include/version/version.h20
-rwxr-xr-xSiliconImage/include/version/version_number.h3
-rwxr-xr-xSiliconImage/include/vom_ctrl/vom_ctrl_api.h98
-rwxr-xr-xSiliconImage/include/vom_ctrl/vom_ctrl_common.h82
-rw-r--r--SiliconImage/isi/Android.mk5
-rwxr-xr-xSiliconImage/isi/drv/Android.mk2
-rw-r--r--SiliconImage/isi/drv/GC0308/Android.mk38
-rw-r--r--SiliconImage/isi/drv/GC0308/include_priv/GC0308_priv.h96
-rw-r--r--SiliconImage/isi/drv/GC0308/source/GC0308_PARREL.c2987
-rwxr-xr-xSiliconImage/isi/drv/GC0308/source/GC0308_tables.c930
-rw-r--r--SiliconImage/isi/drv/GC2035/Android.mk39
-rw-r--r--SiliconImage/isi/drv/GC2035/include_priv/GC2035_priv.h93
-rw-r--r--SiliconImage/isi/drv/GC2035/source/GC2035_PARREL.c3005
-rwxr-xr-xSiliconImage/isi/drv/GC2035/source/GC2035_tables.c760
-rw-r--r--SiliconImage/isi/drv/GC2155_CIF/Android.mk39
-rw-r--r--SiliconImage/isi/drv/GC2155_CIF/include_priv/GC2155_priv.h101
-rw-r--r--SiliconImage/isi/drv/GC2155_CIF/source/GC2155_PARREL.c3059
-rw-r--r--SiliconImage/isi/drv/GC2155_CIF/source/GC2155_tables.c986
-rw-r--r--SiliconImage/isi/drv/GS8604/Android.mk39
-rw-r--r--SiliconImage/isi/drv/GS8604/calib/GS8604.xml4350
-rwxr-xr-xSiliconImage/isi/drv/GS8604/calib/GS8604_lens_40100A.xml4350
-rwxr-xr-xSiliconImage/isi/drv/GS8604/calib/GS8604_lens_9569A2.xml4344
-rw-r--r--SiliconImage/isi/drv/GS8604/include_priv/GS8604_MIPI_priv.h130
-rw-r--r--SiliconImage/isi/drv/GS8604/source/GS8604_MIPI.c4592
-rw-r--r--SiliconImage/isi/drv/GS8604/source/GS8604_tables.c526
-rw-r--r--SiliconImage/isi/drv/HM2057/Android.mk39
-rw-r--r--SiliconImage/isi/drv/HM2057/include_priv/HM2057_priv.h94
-rw-r--r--SiliconImage/isi/drv/HM2057/source/HM2057_PARREL.c2998
-rwxr-xr-xSiliconImage/isi/drv/HM2057/source/HM2057_tables.c646
-rw-r--r--SiliconImage/isi/drv/HM5040/Android.mk37
-rw-r--r--SiliconImage/isi/drv/HM5040/calib/HM5040.xml4880
-rw-r--r--SiliconImage/isi/drv/HM5040/include_priv/sensor_MIPI_priv.h125
-rw-r--r--SiliconImage/isi/drv/HM5040/source/sensor_MIPI.c4247
-rw-r--r--SiliconImage/isi/drv/HM5040/source/sensor_tables.c1082
-rw-r--r--SiliconImage/isi/drv/HM8040/Android.mk39
-rw-r--r--SiliconImage/isi/drv/HM8040/calib/HM8040_lens_MTD8040.xml4350
-rw-r--r--SiliconImage/isi/drv/HM8040/calib/HM8040_lens_MTD8040_OTP.xml4607
-rw-r--r--SiliconImage/isi/drv/HM8040/include_priv/HM8040_MIPI_priv.h101
-rw-r--r--SiliconImage/isi/drv/HM8040/source/HM8040_MIPI.c4359
-rw-r--r--SiliconImage/isi/drv/HM8040/source/HM8040_tables.c1170
-rw-r--r--SiliconImage/isi/drv/IMX179/Android.mk38
-rw-r--r--SiliconImage/isi/drv/IMX179/calib/IMX179.xml4413
-rw-r--r--SiliconImage/isi/drv/IMX179/include_priv/IMX179_MIPI_priv.h110
-rw-r--r--SiliconImage/isi/drv/IMX179/source/IMX179_MIPI.c4242
-rw-r--r--SiliconImage/isi/drv/IMX179/source/IMX179_tables.c296
-rw-r--r--SiliconImage/isi/drv/IMX214/Android.mk38
-rw-r--r--SiliconImage/isi/drv/IMX214/calib/IMX214.xml1442
-rw-r--r--SiliconImage/isi/drv/IMX214/calib/IMX214_4BAD06P1.xml4413
-rw-r--r--SiliconImage/isi/drv/IMX214/calib/IMX214_LG-50013A7.xml4413
-rw-r--r--SiliconImage/isi/drv/IMX214/calib/IMX214_lens_50013A7_OTP.xml4413
-rw-r--r--SiliconImage/isi/drv/IMX214/include_priv/IMX214_MIPI_priv.h137
-rw-r--r--SiliconImage/isi/drv/IMX214/source/IMX214_MIPI.c4455
-rw-r--r--SiliconImage/isi/drv/IMX214/source/IMX214_tables.c1012
-rw-r--r--SiliconImage/isi/drv/IMX214_B/Android.mk38
-rw-r--r--SiliconImage/isi/drv/IMX214_B/calib/IMX214_B.xml1
-rw-r--r--SiliconImage/isi/drv/IMX214_B/calib/IMX214_B_4BAD06P1.xml4413
-rw-r--r--SiliconImage/isi/drv/IMX214_B/calib/IMX214_B_LG-50013A7.xml4413
-rw-r--r--SiliconImage/isi/drv/IMX214_B/include_priv/IMX214_MIPI_priv.h133
-rw-r--r--SiliconImage/isi/drv/IMX214_B/source/IMX214_MIPI.c4428
-rw-r--r--SiliconImage/isi/drv/IMX214_B/source/IMX214_tables.c1012
-rw-r--r--SiliconImage/isi/drv/IMX278/Android.mk38
-rw-r--r--SiliconImage/isi/drv/IMX278/calib/IMX278.xml4413
-rw-r--r--SiliconImage/isi/drv/IMX278/include_priv/IMX278_MIPI_priv.h119
-rw-r--r--SiliconImage/isi/drv/IMX278/source/IMX278_MIPI.c4264
-rw-r--r--SiliconImage/isi/drv/IMX278/source/IMX278_tables.c593
-rw-r--r--SiliconImage/isi/drv/JX507/Android.mk38
-rw-r--r--SiliconImage/isi/drv/JX507/calib/JX507.xml4347
-rw-r--r--SiliconImage/isi/drv/JX507/include_priv/JX507_MIPI_priv.h105
-rw-r--r--SiliconImage/isi/drv/JX507/source/JX507_MIPI.c4002
-rw-r--r--SiliconImage/isi/drv/JX507/source/JX507_tables.c1023
-rw-r--r--SiliconImage/isi/drv/NT99252/Android.mk39
-rw-r--r--SiliconImage/isi/drv/NT99252/include_priv/NT99252_priv.h103
-rw-r--r--SiliconImage/isi/drv/NT99252/source/NT99252_PARREL.c3055
-rwxr-xr-xSiliconImage/isi/drv/NT99252/source/NT99252_tables.c558
-rw-r--r--SiliconImage/isi/drv/OV13850/Android.mk39
-rwxr-xr-xSiliconImage/isi/drv/OV13850/calib/OV13850.xml4222
-rw-r--r--SiliconImage/isi/drv/OV13850/include_priv/OV13850_MIPI_priv.h143
-rw-r--r--SiliconImage/isi/drv/OV13850/source/OV13850_MIPI.c4167
-rwxr-xr-xSiliconImage/isi/drv/OV13850/source/OV13850_tables.c1288
-rw-r--r--SiliconImage/isi/drv/OV13860/Android.mk37
-rw-r--r--SiliconImage/isi/drv/OV13860/calib/OV13860.xml4223
-rw-r--r--SiliconImage/isi/drv/OV13860/include_priv/OV13860_MIPI_priv.h132
-rw-r--r--SiliconImage/isi/drv/OV13860/source/OV13860_MIPI.c3915
-rw-r--r--SiliconImage/isi/drv/OV13860/source/OV13860_tables.c1487
-rwxr-xr-xSiliconImage/isi/drv/OV14825/CMakeLists.txt81
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/CMakeLists.txt43
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_02.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_08.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_09.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_11.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_12.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_13.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_14.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/calib/OV14825/OV14825_sample_15.xml3086
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/doxyfile.cmake1526
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/images/dct_header.html15
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/images/dct_logo.pngbin0 -> 153933 bytes
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/images/simg_header.html15
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/images/simg_logo.pngbin0 -> 9900 bytes
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/dct_favicon.pngbin0 -> 3676 bytes
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/dct_header.tex.in28
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/dct_logo.pngbin0 -> 153933 bytes
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/dct_logo_small.pngbin0 -> 58827 bytes
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/dreamchip.sty425
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/siliconimage.sty425
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/simg_favicon.pngbin0 -> 5176 bytes
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/simg_header.tex.in28
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/doxygen/latex/simg_logo.pngbin0 -> 59403 bytes
-rwxr-xr-xSiliconImage/isi/drv/OV14825/codeanalysis/pc_lint/private.lnt11
-rwxr-xr-xSiliconImage/isi/drv/OV14825/include_priv/OV14825_MIPI_a.h443
-rwxr-xr-xSiliconImage/isi/drv/OV14825/include_priv/OV14825_MIPI_d50.h441
-rwxr-xr-xSiliconImage/isi/drv/OV14825/include_priv/OV14825_MIPI_d65.h446
-rwxr-xr-xSiliconImage/isi/drv/OV14825/include_priv/OV14825_MIPI_d75.h218
-rwxr-xr-xSiliconImage/isi/drv/OV14825/include_priv/OV14825_MIPI_f11.h445
-rwxr-xr-xSiliconImage/isi/drv/OV14825/include_priv/OV14825_MIPI_f2.h445
-rw-r--r--SiliconImage/isi/drv/OV14825/include_priv/OV14825_MIPI_priv.h1074
-rw-r--r--SiliconImage/isi/drv/OV14825/source/OV14825.c4319
-rwxr-xr-xSiliconImage/isi/drv/OV14825/source/OV14825_tables.c578
-rw-r--r--SiliconImage/isi/drv/OV2659/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV2659/include_priv/OV2659_priv.h98
-rw-r--r--SiliconImage/isi/drv/OV2659/source/OV2659_PARREL.c3002
-rwxr-xr-xSiliconImage/isi/drv/OV2659/source/OV2659_tables.c673
-rw-r--r--SiliconImage/isi/drv/OV2680/Android.mk38
-rw-r--r--SiliconImage/isi/drv/OV2680/calib/OV2680.xml4900
-rw-r--r--SiliconImage/isi/drv/OV2680/include_priv/OV2680_MIPI_priv.h126
-rw-r--r--SiliconImage/isi/drv/OV2680/source/OV2680_MIPI.c4147
-rw-r--r--SiliconImage/isi/drv/OV2680/source/OV2680_tables.c349
-rw-r--r--SiliconImage/isi/drv/OV2685_MIPI_YUV/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV2685_MIPI_YUV/include_priv/OV2685_priv.h95
-rw-r--r--SiliconImage/isi/drv/OV2685_MIPI_YUV/source/OV2685_PARREL.c3064
-rwxr-xr-xSiliconImage/isi/drv/OV2685_MIPI_YUV/source/OV2685_tables.c546
-rw-r--r--SiliconImage/isi/drv/OV2686/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV2686/include_priv/OV2686_priv.h92
-rw-r--r--SiliconImage/isi/drv/OV2686/source/OV2686_PARREL.c2997
-rw-r--r--SiliconImage/isi/drv/OV2686/source/OV2686_tables.c382
-rw-r--r--SiliconImage/isi/drv/OV2715/CMakeLists.txt81
-rw-r--r--SiliconImage/isi/drv/OV2715/calib/CMakeLists.txt43
-rw-r--r--SiliconImage/isi/drv/OV2715/calib/OV2715_Cypress/OV2715_CHR8020.xml1748
-rw-r--r--SiliconImage/isi/drv/OV2715/calib/OV2715_Cypress/OV2715_EL-8020B.xml1748
-rw-r--r--SiliconImage/isi/drv/OV2715/calib/OV2715_Cypress/OV2715_OV2710_01.xml1748
-rw-r--r--SiliconImage/isi/drv/OV2715/calib/OV2715_Cypress/OV2715_OV2710_02.xml1748
-rw-r--r--SiliconImage/isi/drv/OV2715/calib/OV2715_Cypress/OV2715_pentax.xml1748
-rw-r--r--SiliconImage/isi/drv/OV2715/calib/OV2715_Cypress/OV2715_tamron.xml1748
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/doxygen/doxyfile.cmake1526
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/doxygen/images/dct_header.html15
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/doxygen/images/simg_header.html15
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/doxygen/latex/dct_header.tex.in28
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/doxygen/latex/dreamchip.sty425
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/doxygen/latex/siliconimage.sty425
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/doxygen/latex/simg_header.tex.in28
-rw-r--r--SiliconImage/isi/drv/OV2715/codeanalysis/pc_lint/private.lnt11
-rw-r--r--SiliconImage/isi/drv/OV2715/include_priv/OV2715_a.h331
-rw-r--r--SiliconImage/isi/drv/OV2715/include_priv/OV2715_d50.h327
-rw-r--r--SiliconImage/isi/drv/OV2715/include_priv/OV2715_d65.h331
-rw-r--r--SiliconImage/isi/drv/OV2715/include_priv/OV2715_d75.h218
-rw-r--r--SiliconImage/isi/drv/OV2715/include_priv/OV2715_f11.h330
-rw-r--r--SiliconImage/isi/drv/OV2715/include_priv/OV2715_f2.h329
-rw-r--r--SiliconImage/isi/drv/OV2715/include_priv/OV2715_priv.h1276
-rw-r--r--SiliconImage/isi/drv/OV2715/source/OV2715.c4150
-rw-r--r--SiliconImage/isi/drv/OV2715/source/OV2715_tables.c591
-rw-r--r--SiliconImage/isi/drv/OV4188/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV4188/calib/OV4188.xml4131
-rw-r--r--SiliconImage/isi/drv/OV4188/include_priv/OV4188_MIPI_priv.h137
-rw-r--r--SiliconImage/isi/drv/OV4188/source/OV4188_MIPI.c4248
-rw-r--r--SiliconImage/isi/drv/OV4188/source/OV4188_tables.c2031
-rw-r--r--SiliconImage/isi/drv/OV4689/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV4689/calib/OV4689.xml4131
-rw-r--r--SiliconImage/isi/drv/OV4689/include_priv/OV4689_MIPI_priv.h137
-rw-r--r--SiliconImage/isi/drv/OV4689/source/OV4689_MIPI.c4249
-rw-r--r--SiliconImage/isi/drv/OV4689/source/OV4689_tables.c2031
-rw-r--r--SiliconImage/isi/drv/OV5630/CMakeLists.txt81
-rw-r--r--SiliconImage/isi/drv/OV5630/calib/CMakeLists.txt43
-rw-r--r--SiliconImage/isi/drv/OV5630/calib/OV5630/OV5630_sample01.xml1053
-rw-r--r--SiliconImage/isi/drv/OV5630/calib/OV5630/OV5630_sample01_list.xml1041
-rw-r--r--SiliconImage/isi/drv/OV5630/calib/OV5630/OV5630_sample01_tree.xml1004
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/doxygen/doxyfile.cmake1526
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/doxygen/images/dct_header.html15
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/doxygen/images/simg_header.html15
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/doxygen/latex/dct_header.tex.in28
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/doxygen/latex/dreamchip.sty425
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/doxygen/latex/siliconimage.sty425
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/doxygen/latex/simg_header.tex.in28
-rw-r--r--SiliconImage/isi/drv/OV5630/codeanalysis/pc_lint/private.lnt11
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_a.h889
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_d120.h463
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_d50.h458
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_d60.h458
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_d65.h460
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_d75.h460
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_d80.h457
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_f11.h457
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_f12.h459
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_f2.h457
-rw-r--r--SiliconImage/isi/drv/OV5630/include_priv/OV5630_priv.h1171
-rw-r--r--SiliconImage/isi/drv/OV5630/source/OV5630.c4011
-rw-r--r--SiliconImage/isi/drv/OV5630/source/OV5630_tables.c565
-rw-r--r--SiliconImage/isi/drv/OV5640/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV5640/include_priv/OV5640_priv.h105
-rw-r--r--SiliconImage/isi/drv/OV5640/source/OV5640_PARREL.c3179
-rwxr-xr-xSiliconImage/isi/drv/OV5640/source/OV5640_af_firmware.c11940
-rwxr-xr-xSiliconImage/isi/drv/OV5640/source/OV5640_tables.c4636
-rw-r--r--SiliconImage/isi/drv/OV5645_MIPI_YUV/Android.mk38
-rw-r--r--SiliconImage/isi/drv/OV5645_MIPI_YUV/include_priv/OV5645_priv.h84
-rw-r--r--SiliconImage/isi/drv/OV5645_MIPI_YUV/source/OV5645_PARREL.c3062
-rw-r--r--SiliconImage/isi/drv/OV5645_MIPI_YUV/source/OV5645_tables.c474
-rwxr-xr-xSiliconImage/isi/drv/OV5647/calib/OV5647.xml2991
-rwxr-xr-xSiliconImage/isi/drv/OV5647/include_priv/OV5647_a.h331
-rwxr-xr-xSiliconImage/isi/drv/OV5647/include_priv/OV5647_d50.h327
-rwxr-xr-xSiliconImage/isi/drv/OV5647/include_priv/OV5647_d65.h331
-rwxr-xr-xSiliconImage/isi/drv/OV5647/include_priv/OV5647_d75.h331
-rwxr-xr-xSiliconImage/isi/drv/OV5647/include_priv/OV5647_f11.h330
-rwxr-xr-xSiliconImage/isi/drv/OV5647/include_priv/OV5647_f2.h329
-rw-r--r--SiliconImage/isi/drv/OV5647/include_priv/OV5647_priv.h90
-rw-r--r--SiliconImage/isi/drv/OV5647/source/OV5647_PARREL.c3544
-rwxr-xr-xSiliconImage/isi/drv/OV5647/source/OV5647_tables.c476
-rw-r--r--SiliconImage/isi/drv/OV5648/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648.xml4908
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648_000V30-F415B-BY0A1.xml4903
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648_CHT-842B-MD.xml4901
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648_CMK-CW2005-FV1.xml4908
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648_XY-LE001B1.xml4908
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648_lens_CHT-842B-MD.xml4897
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648_lens_CHT-842B-MD_OTP.xml4895
-rw-r--r--SiliconImage/isi/drv/OV5648/calib/OV5648_lens_XY-LE001B1.xml4902
-rw-r--r--SiliconImage/isi/drv/OV5648/include_priv/OV5648_MIPI_priv.h132
-rw-r--r--SiliconImage/isi/drv/OV5648/source/OV5648_MIPI.c4619
-rwxr-xr-xSiliconImage/isi/drv/OV5648/source/OV5648_tables.c1618
-rw-r--r--SiliconImage/isi/drv/OV5693/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV5693/calib/OV5693.xml4902
-rw-r--r--SiliconImage/isi/drv/OV5693/calib/OV5693_OTP.xml4902
-rw-r--r--SiliconImage/isi/drv/OV5693/include_priv/OV5693_MIPI_priv.h85
-rw-r--r--SiliconImage/isi/drv/OV5693/source/OV5693_MIPI.c4688
-rw-r--r--SiliconImage/isi/drv/OV5693/source/OV5693_tables.c504
-rw-r--r--SiliconImage/isi/drv/OV5695/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV5695/calib/OV5695.xml4897
-rw-r--r--SiliconImage/isi/drv/OV5695/calib/OV5695_lens_CHT-842B-MD.xml4897
-rw-r--r--SiliconImage/isi/drv/OV5695/calib/OV5695_lens_CHT-842B-MD_OTP.xml4895
-rw-r--r--SiliconImage/isi/drv/OV5695/calib/OV5695_lens_XY-LE001B1.xml4902
-rw-r--r--SiliconImage/isi/drv/OV5695/include_priv/OV5695_MIPI_priv.h124
-rw-r--r--SiliconImage/isi/drv/OV5695/source/OV5695_MIPI.c4313
-rw-r--r--SiliconImage/isi/drv/OV5695/source/OV5695_tables.c361
-rw-r--r--SiliconImage/isi/drv/OV8810/CMakeLists.txt81
-rw-r--r--SiliconImage/isi/drv/OV8810/calib/CMakeLists.txt43
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/doxygen/doxyfile.cmake1526
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/doxygen/images/dct_header.html15
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/doxygen/images/simg_header.html15
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/doxygen/latex/dct_header.tex.in28
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/doxygen/latex/dreamchip.sty425
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/doxygen/latex/siliconimage.sty425
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/doxygen/latex/simg_header.tex.in28
-rw-r--r--SiliconImage/isi/drv/OV8810/codeanalysis/pc_lint/private.lnt11
-rw-r--r--SiliconImage/isi/drv/OV8810/include_priv/OV8810_a.h331
-rw-r--r--SiliconImage/isi/drv/OV8810/include_priv/OV8810_d50.h327
-rw-r--r--SiliconImage/isi/drv/OV8810/include_priv/OV8810_d65.h331
-rw-r--r--SiliconImage/isi/drv/OV8810/include_priv/OV8810_d75.h331
-rw-r--r--SiliconImage/isi/drv/OV8810/include_priv/OV8810_f11.h330
-rw-r--r--SiliconImage/isi/drv/OV8810/include_priv/OV8810_f2.h329
-rw-r--r--SiliconImage/isi/drv/OV8810/include_priv/OV8810_priv.h1234
-rw-r--r--SiliconImage/isi/drv/OV8810/source/OV8810.c3597
-rw-r--r--SiliconImage/isi/drv/OV8810/source/OV8810_tables.c563
-rw-r--r--SiliconImage/isi/drv/OV8820/Android.mk39
-rwxr-xr-xSiliconImage/isi/drv/OV8820/calib/OV8820.xml4122
-rw-r--r--SiliconImage/isi/drv/OV8820/include_priv/OV8820_MIPI_priv.h1298
-rwxr-xr-xSiliconImage/isi/drv/OV8820/include_priv/OV8820_a.h331
-rwxr-xr-xSiliconImage/isi/drv/OV8820/include_priv/OV8820_d50.h327
-rwxr-xr-xSiliconImage/isi/drv/OV8820/include_priv/OV8820_d65.h331
-rwxr-xr-xSiliconImage/isi/drv/OV8820/include_priv/OV8820_d75.h331
-rwxr-xr-xSiliconImage/isi/drv/OV8820/include_priv/OV8820_f11.h330
-rwxr-xr-xSiliconImage/isi/drv/OV8820/include_priv/OV8820_f2.h329
-rw-r--r--SiliconImage/isi/drv/OV8820/source/OV8820_MIPI.c4543
-rwxr-xr-xSiliconImage/isi/drv/OV8820/source/OV8820_tables.c866
-rw-r--r--SiliconImage/isi/drv/OV8825/Android.mk39
-rwxr-xr-xSiliconImage/isi/drv/OV8825/calib/OV8825.xml4634
-rw-r--r--SiliconImage/isi/drv/OV8825/include_priv/OV8825_MIPI_priv.h1311
-rwxr-xr-xSiliconImage/isi/drv/OV8825/include_priv/OV8825_a.h331
-rwxr-xr-xSiliconImage/isi/drv/OV8825/include_priv/OV8825_d50.h327
-rwxr-xr-xSiliconImage/isi/drv/OV8825/include_priv/OV8825_d65.h331
-rwxr-xr-xSiliconImage/isi/drv/OV8825/include_priv/OV8825_d75.h331
-rwxr-xr-xSiliconImage/isi/drv/OV8825/include_priv/OV8825_f11.h330
-rwxr-xr-xSiliconImage/isi/drv/OV8825/include_priv/OV8825_f2.h329
-rw-r--r--SiliconImage/isi/drv/OV8825/source/OV8825_MIPI.c4548
-rwxr-xr-xSiliconImage/isi/drv/OV8825/source/OV8825_tables.c866
-rw-r--r--SiliconImage/isi/drv/OV8856/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV8856/calib/OV8856_lens_GEGR160163-2.xml257
-rw-r--r--SiliconImage/isi/drv/OV8856/include_priv/OV8856_MIPI_priv.h105
-rw-r--r--SiliconImage/isi/drv/OV8856/source/OV8856_MIPI.c4558
-rw-r--r--SiliconImage/isi/drv/OV8856/source/OV8856_tables.c782
-rw-r--r--SiliconImage/isi/drv/OV8858/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858.xml4747
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_000V47-F415B-BW5B0.xml4356
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_CMK-CB0407-FV3.xml4356
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_CMK-CB0692-FV1.xml4613
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_LG-40108A7.xml4356
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_LG-9569A2.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_OIN8A32.xml4356
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_R1A_lens_LG-9569A2.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_R1A_lens_LG-9569A2_OTP.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_R1A_lens_SUNNY-3813A.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_R1A_lens_SUNNY-3813A_OTP.xml4607
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_R2A_lens_LG-40108A7_OTP.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_R2A_lens_LG-9569A2_OTP.xml4607
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_R5AV08.xml4607
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_SUNNY-3813A.xml4356
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_lens_LG-40108A7_OTP.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_lens_LG-9569A2.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_lens_LG-9569A2_OTP.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_lens_R5AV08.xml4607
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_lens_SUNNY-3813A.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8858/calib/OV8858_lens_SUNNY-3813A_OTP.xml4607
-rw-r--r--SiliconImage/isi/drv/OV8858/include_priv/OV8858_MIPI_priv.h187
-rw-r--r--SiliconImage/isi/drv/OV8858/source/OV8858_MIPI.c5182
-rw-r--r--SiliconImage/isi/drv/OV8858/source/OV8858_tables.c2369
-rw-r--r--SiliconImage/isi/drv/OV8865/Android.mk39
-rw-r--r--SiliconImage/isi/drv/OV8865/calib/OV8865.xml4350
-rw-r--r--SiliconImage/isi/drv/OV8865/include_priv/OV8865_MIPI_priv.h114
-rw-r--r--SiliconImage/isi/drv/OV8865/source/OV8865_MIPI.c4532
-rw-r--r--SiliconImage/isi/drv/OV8865/source/OV8865_tables.c1339
-rw-r--r--SiliconImage/isi/drv/SP2518/Android.mk39
-rw-r--r--SiliconImage/isi/drv/SP2518/include_priv/SP2518_priv.h95
-rw-r--r--SiliconImage/isi/drv/SP2518/source/SP2518_PARREL.c3004
-rwxr-xr-xSiliconImage/isi/drv/SP2518/source/SP2518_tables.c499
-rw-r--r--SiliconImage/isi/drv/TC358749XBG_MIPI_YUV/Android.mk39
-rw-r--r--SiliconImage/isi/drv/TC358749XBG_MIPI_YUV/include_priv/TC358749XBG_priv.h81
-rw-r--r--SiliconImage/isi/drv/TC358749XBG_MIPI_YUV/source/TC358749XBG_PARREL.c3478
-rw-r--r--SiliconImage/isi/drv/TC358749XBG_MIPI_YUV/source/TC358749XBG_tables.c593
-rw-r--r--SiliconImage/isi/include/isi.h1372
-rw-r--r--SiliconImage/isi/include/isi_common.h948
-rw-r--r--SiliconImage/isi/include/isi_iss.h321
-rwxr-xr-xSiliconImage/isi/include_priv/isi_priv.h234
-rwxr-xr-xSiliconImage/isi/source/Android.mk36
-rw-r--r--SiliconImage/isi/source/isi.c2559
-rwxr-xr-xSiliconImage/isi/source/isisup.c595
522 files changed, 612997 insertions, 0 deletions
diff --git a/Android.mk b/Android.mk
new file mode 100755
index 0000000..5053e7d
--- /dev/null
+++ b/Android.mk
@@ -0,0 +1 @@
+include $(call all-subdir-makefiles)
diff --git a/CameraHal/Android.mk b/CameraHal/Android.mk
new file mode 100644
index 0000000..63e45d3
--- /dev/null
+++ b/CameraHal/Android.mk
@@ -0,0 +1,369 @@
+#
+# RockChip Camera HAL
+#
+LOCAL_PATH:= $(call my-dir)
+include $(call all-subdir-makefiles)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libMFDenoise
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_CLASS := SHARED_LIBRARIES
+LOCAL_MODULE_STEM := $(LOCAL_MODULE)
+LOCAL_MODULE_SUFFIX := .so
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+else
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+endif
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libcameragl
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_CLASS := SHARED_LIBRARIES
+LOCAL_MODULE_STEM := $(LOCAL_MODULE)
+LOCAL_MODULE_SUFFIX := .so
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+else
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+endif
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libopencv_java3
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_CLASS := SHARED_LIBRARIES
+LOCAL_MODULE_STEM := $(LOCAL_MODULE)
+LOCAL_MODULE_SUFFIX := .so
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES:=\
+ CameraHalUtil.cpp\
+ MessageQueue.cpp\
+ Semaphore.cpp\
+ CameraHal_Module.cpp\
+ CameraHal_Mem.cpp\
+ CameraBuffer.cpp\
+ AppMsgNotifier.cpp\
+ DisplayAdapter.cpp\
+ CameraAdapter.cpp\
+ CameraSocAdapter.cpp\
+ CameraUSBAdapter.cpp\
+ CameraIspAdapter.cpp\
+ CameraIspSOCAdapter.cpp\
+ FakeCameraAdapter.cpp\
+ CameraHal.cpp\
+ CameraHal_board_xml_parse.cpp\
+ CameraHal_Tracer.c\
+ CameraIspTunning.cpp \
+ SensorListener.cpp\
+
+ifeq ($(strip $(BOARD_USE_DRM)), true)
+ifneq ($(filter rk3368 rk3399 rk3288 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_SRC_FILES += \
+ camera_mem_gralloc.cpp\
+ camera_mem.cpp
+endif
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk30board)
+LOCAL_C_INCLUDES += \
+ frameworks/base/include/ui \
+ frameworks/av/include \
+ frameworks/native/include \
+ frameworks/native/include/media/hardware \
+ frameworks/native/include/media/openmax \
+ external/libjpeg-turbo \
+ external/jpeg \
+ external/jhead \
+ hardware/rockchip/hwcomposer \
+ hardware/rockchip/libgralloc_ump/ump/include \
+ hardware/rockchip/librkvpu \
+ hardware/rockchip/libgralloc \
+ hardware/libhardware/include \
+ $(LOCAL_PATH)/../SiliconImage/include \
+ $(LOCAL_PATH)/../SiliconImage/include/isp_cam_api \
+ bionic \
+ external/tinyxml2 \
+ system/media/camera/include \
+ system/core/libion/include/ion \
+ system/core/libion/kernel-headers/linux
+
+#has no "external/stlport" from Android 6.0 on
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \< 6.0)))
+LOCAL_C_INCLUDES += \
+ external/stlport/stlport
+endif
+
+LOCAL_C_INCLUDES += \
+ external/skia/include/core \
+ external/skia/include/effects \
+ external/skia/include/images \
+ external/skia/src/ports \
+ external/skia/include/utils \
+ external/expat/lib
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libgui\
+ libjpeg\
+ libjpeghwenc\
+ libion\
+ libvpu\
+ libdl\
+ libisp_silicomimageisp_api \
+ libexpat \
+ libskia \
+ libhardware \
+ libcameragl \
+ libopencv_java3 \
+ libMFDenoise
+#has no "external/stlport" from Android 6.0 on
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \< 6.0)))
+LOCAL_SHARED_LIBRARIES += \
+ libstlport
+endif
+
+#LOCAL_STATIC_LIBRARIES := libisp_calibdb libtinyxml2 libisp_cam_calibdb libisp_ebase \
+# libisp_oslayer libisp_common libisp_hal libisp_isi\
+# libisp_cam_engine libisp_version libisp_cameric_reg_drv \
+
+#LOCAL_PREBUILT_LIBS := libisp_silicomimageisp_api.so
+endif
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk2928board)
+LOCAL_C_INCLUDES += \
+ frameworks/base/include/ui \
+ external/jpeg \
+ external/jhead\
+ hardware/rockchip/hwcomposer_rga\
+ hardware/rockchip/librkvpu\
+ hardware/rockchip/libgralloc_ump/ump/include
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libgui\
+ libjpeg\
+ libjpeghwenc\
+ libyuvtorgb\
+ libion\
+ libvpu\
+ libdl
+
+
+endif
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk29board)
+LOCAL_C_INCLUDES += \
+ #frameworks/base/include/ui \
+ frameworks/native/include/media/hardware \
+ frameworks/native/include/media/openmax \
+ external/jpeg \
+ external/jhead
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libgui\
+ libjpeghwenc\
+ libjpeg\
+ libyuvtorgb
+endif
+
+ifeq ($(strip $(BOARD_USE_DRM)), true)
+LOCAL_C_INCLUDES += hardware/rockchip/librga
+LOCAL_SHARED_LIBRARIES +=librga
+LOCAL_SHARED_LIBRARIES +=libdrm
+endif
+
+LOCAL_CPPFLAGS := -fpermissive
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+LOCAL_CFLAGS += -DLINUX -DMIPI_USE_CAMERIC -DHAL_MOCKUP -DCAM_ENGINE_DRAW_DOM_ONLY -D_FILE_OFFSET_BITS=64 -DHAS_STDINT_H
+
+ifeq ($(strip $(GRAPHIC_MEMORY_PROVIDER)),dma_buf)
+LOCAL_CFLAGS += -DUSE_DMA_BUF
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk30board)
+LOCAL_CFLAGS += -DTARGET_RK30
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk2928board)
+LOCAL_CFLAGS += -DTARGET_RK30
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk29board)
+LOCAL_CFLAGS += -DTARGET_RK29
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3288)
+#LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK3288
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_GPU)), mali-t720)
+LOCAL_CFLAGS += -DMALI_PRODUCT_ID_T72X=1
+LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=0
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_GPU)), mali-t760)
+LOCAL_CFLAGS += -DMALI_PRODUCT_ID_T76X=1
+# we use mali_afbc_gralloc, only if macro MALI_AFBC_GRALLOC is 1
+LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=1
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_GPU)), mali-t860)
+LOCAL_CFLAGS += -DMALI_PRODUCT_ID_T86X=1
+LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=1
+endif
+
+ifneq ($(filter rk3366 rk3399 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_CFLAGS += -DTARGET_RK3368
+ifeq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_PRODUCT)), laptop)
+ LOCAL_CFLAGS += -DLAPTOP
+endif
+endif
+
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3368)
+LOCAL_CFLAGS += -DTARGET_RK3368
+ifeq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3036)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifneq ($(filter rk322x rk312x , $(strip $(TARGET_BOARD_PLATFORM))), )
+#LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK312x
+LOCAL_CFLAGS += -DHAL_MOCKUP
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3328)
+#LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DTARGET_RK3328
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifneq ($(filter rk322x rk3328 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_CFLAGS += -DTARGET_RK322x
+LOCAL_SRC_FILES += Jpeg_soft_encode.cpp
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3188)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK3188
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3026)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk30xx)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk319x)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+endif
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk2928)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK2928
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk30xxb)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XXB
+endif
+
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 5.0)))
+LOCAL_CFLAGS += -DANDROID_5_X
+endif
+
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 6.0)))
+LOCAL_CFLAGS += -DANDROID_6_X
+endif
+
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 7.0)))
+LOCAL_CFLAGS += -DANDROID_7_X
+endif
+
+ifeq ($(strip $(BOARD_USE_DRM)), true)
+ifneq ($(filter rk3368 rk3399 rk3288 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_CFLAGS +=-DRK_DRM_GRALLOC=1
+endif
+endif
+
+#LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+ifneq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 5.0)))
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+else
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+endif
+LOCAL_MODULE_RELATIVE_PATH := hw
+endif
+
+LOCAL_MODULE:=camera.rk30board
+
+LOCAL_MODULE_TAGS:= optional
+include $(BUILD_SHARED_LIBRARY)
+
+
diff --git a/CameraHal/Android.mk~ b/CameraHal/Android.mk~
new file mode 100644
index 0000000..f9bec81
--- /dev/null
+++ b/CameraHal/Android.mk~
@@ -0,0 +1,369 @@
+#
+# RockChip Camera HAL
+#
+LOCAL_PATH:= $(call my-dir)
+include $(call all-subdir-makefiles)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libMFDenoise
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_CLASS := SHARED_LIBRARIES
+LOCAL_MODULE_STEM := $(LOCAL_MODULE)
+LOCAL_MODULE_SUFFIX := .so
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+else
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+endif
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libcameragl
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_CLASS := SHARED_LIBRARIES
+LOCAL_MODULE_STEM := $(LOCAL_MODULE)
+LOCAL_MODULE_SUFFIX := .so
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+else
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \> 7.0)))
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)_android7.1$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+endif
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libopencv_java3
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_CLASS := SHARED_LIBRARIES
+LOCAL_MODULE_STEM := $(LOCAL_MODULE)
+LOCAL_MODULE_SUFFIX := .so
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+LOCAL_SRC_FILES_$(TARGET_ARCH) := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+LOCAL_SRC_FILES_$(TARGET_2ND_ARCH) := lib/$(TARGET_2ND_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+else
+LOCAL_SRC_FILES := lib/$(TARGET_ARCH)/$(LOCAL_MODULE)$(LOCAL_MODULE_SUFFIX)
+endif
+include $(BUILD_PREBUILT)
+
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES:=\
+ CameraHalUtil.cpp\
+ MessageQueue.cpp\
+ Semaphore.cpp\
+ CameraHal_Module.cpp\
+ CameraHal_Mem.cpp\
+ CameraBuffer.cpp\
+ AppMsgNotifier.cpp\
+ DisplayAdapter.cpp\
+ CameraAdapter.cpp\
+ CameraSocAdapter.cpp\
+ CameraUSBAdapter.cpp\
+ CameraIspAdapter.cpp\
+ CameraIspSOCAdapter.cpp\
+ FakeCameraAdapter.cpp\
+ CameraHal.cpp\
+ CameraHal_board_xml_parse.cpp\
+ CameraHal_Tracer.c\
+ CameraIspTunning.cpp \
+ SensorListener.cpp\
+
+ifeq ($(strip $(BOARD_USE_DRM)), true)
+ifneq ($(filter rk3368 rk3399 rk3288 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_SRC_FILES += \
+ camera_mem_gralloc.cpp\
+ camera_mem.cpp
+endif
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk30board)
+LOCAL_C_INCLUDES += \
+ frameworks/base/include/ui \
+ frameworks/av/include \
+ frameworks/native/include \
+ frameworks/native/include/media/hardware \
+ frameworks/native/include/media/openmax \
+ external/libjpeg-turbo \
+ external/jpeg \
+ external/jhead \
+ hardware/rockchip/hwcomposer \
+ hardware/rockchip/libgralloc_ump/ump/include \
+ hardware/rockchip/librkvpu \
+ hardware/rockchip/libgralloc \
+ hardware/libhardware/include \
+ $(LOCAL_PATH)/../SiliconImage/include \
+ $(LOCAL_PATH)/../SiliconImage/include/isp_cam_api \
+ bionic \
+ external/tinyxml2 \
+ system/media/camera/include \
+ system/core/libion/include/ion \
+ system/core/libion/kernel-headers/linux
+
+#has no "external/stlport" from Android 6.0 on
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \< 6.0)))
+LOCAL_C_INCLUDES += \
+ external/stlport/stlport
+endif
+
+LOCAL_C_INCLUDES += \
+ external/skia/include/core \
+ external/skia/include/effects \
+ external/skia/include/images \
+ external/skia/src/ports \
+ external/skia/include/utils \
+ external/expat/lib
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libgui\
+ libjpeg\
+ libjpeghwenc\
+ libion\
+ libvpu\
+ libdl\
+ libisp_silicomimageisp_api \
+ libexpat \
+ libskia \
+ libhardware \
+ libcameragl \
+ libopencv_java3 \
+ libMFDenoise
+#has no "external/stlport" from Android 6.0 on
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \< 6.0)))
+LOCAL_SHARED_LIBRARIES += \
+ libstlport
+endif
+
+#LOCAL_STATIC_LIBRARIES := libisp_calibdb libtinyxml2 libisp_cam_calibdb libisp_ebase \
+# libisp_oslayer libisp_common libisp_hal libisp_isi\
+# libisp_cam_engine libisp_version libisp_cameric_reg_drv \
+
+#LOCAL_PREBUILT_LIBS := libisp_silicomimageisp_api.so
+endif
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk2928board)
+LOCAL_C_INCLUDES += \
+ frameworks/base/include/ui \
+ external/jpeg \
+ external/jhead\
+ hardware/rockchip/hwcomposer_rga\
+ hardware/rockchip/librkvpu\
+ hardware/rockchip/libgralloc_ump/ump/include
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libgui\
+ libjpeg\
+ libjpeghwenc\
+ libyuvtorgb\
+ libion\
+ libvpu\
+ libdl
+
+
+endif
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk29board)
+LOCAL_C_INCLUDES += \
+ #frameworks/base/include/ui \
+ frameworks/native/include/media/hardware \
+ frameworks/native/include/media/openmax \
+ external/jpeg \
+ external/jhead
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libgui\
+ libjpeghwenc\
+ libjpeg\
+ libyuvtorgb
+endif
+
+ifeq ($(strip $(BOARD_USE_DRM)), true)
+LOCAL_C_INCLUDES += hardware/rockchip/librga
+LOCAL_SHARED_LIBRARIES +=librga
+LOCAL_SHARED_LIBRARIES +=libdrm
+endif
+
+LOCAL_CPPFLAGS := -fpermissive
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+LOCAL_CFLAGS += -DLINUX -DMIPI_USE_CAMERIC -DHAL_MOCKUP -DCAM_ENGINE_DRAW_DOM_ONLY -D_FILE_OFFSET_BITS=64 -DHAS_STDINT_H
+
+ifeq ($(strip $(GRAPHIC_MEMORY_PROVIDER)),dma_buf)
+LOCAL_CFLAGS += -DUSE_DMA_BUF
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk30board)
+LOCAL_CFLAGS += -DTARGET_RK30
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk2928board)
+LOCAL_CFLAGS += -DTARGET_RK30
+endif
+
+ifeq ($(strip $(TARGET_BOARD_HARDWARE)),rk29board)
+LOCAL_CFLAGS += -DTARGET_RK29
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3288)
+#LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK3288
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_GPU)), mali-t720)
+LOCAL_CFLAGS += -DMALI_PRODUCT_ID_T72X=1
+LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=0
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_GPU)), mali-t760)
+LOCAL_CFLAGS += -DMALI_PRODUCT_ID_T76X=1
+# we use mali_afbc_gralloc, only if macro MALI_AFBC_GRALLOC is 1
+LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=1
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_GPU)), mali-t860)
+LOCAL_CFLAGS += -DMALI_PRODUCT_ID_T86X=1
+LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=1
+endif
+
+ifneq ($(filter rk3366 rk3399 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_CFLAGS += -DTARGET_RK3368
+ifeq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+ifeq ($(strip $(TARGET_BOARD_PLATFORM_PRODUCT)), laptop)
+ LOCAL_CFLAGS += -DLAPTOP
+endif
+endif
+
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3368)
+LOCAL_CFLAGS += -DTARGET_RK3368
+ifeq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3036)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifneq ($(filter rk322x rk312x , $(strip $(TARGET_BOARD_PLATFORM))), )
+#LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK312x
+LOCAL_CFLAGS += -DHAL_MOCKUP
+LOCAL_CFLAGS += -DHAVE_ARM_NEON
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3328)
+#LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK32
+LOCAL_CFLAGS += -DTARGET_RK3328
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifneq ($(filter rk322x rk3328 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_CFLAGS += -DTARGET_RK322x
+LOCAL_SRC_FILES += Jpeg_soft_encode.cpp
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3188)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+LOCAL_CFLAGS += -DTARGET_RK3188
+LOCAL_CFLAGS += -DHAL_MOCKUP
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk3026)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk30xx)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk319x)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XX
+endif
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk2928)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK2928
+endif
+
+ifeq ($(strip $(TARGET_BOARD_PLATFORM)),rk30xxb)
+LOCAL_CFLAGS += -DTARGET_BOARD_PLATFORM_RK30XXB
+endif
+
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 5.0)))
+LOCAL_CFLAGS += -DANDROID_5_X
+endif
+
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 6.0)))
+LOCAL_CFLAGS += -DANDROID_6_X
+endif
+
+ifeq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 7.0)))
+LOCAL_CFLAGS += -DANDROID_7_X
+endif
+
+ifeq ($(strip $(BOARD_USE_DRM)), true)
+ifneq ($(filter rk3368 rk3399 rk3288 , $(strip $(TARGET_BOARD_PLATFORM))), )
+LOCAL_CFLAGS +=-DRK_DRM_GRALLOC=1
+endif
+endif
+
+#LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+ifneq (1,$(strip $(shell expr $(PLATFORM_VERSION) \>= 5.0)))
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+else
+ifneq ($(strip $(TARGET_2ND_ARCH)), )
+LOCAL_MULTILIB := both
+endif
+LOCAL_MODULE_RELATIVE_PATH := hw
+endif
+
+LOCAL_MODULE:=camera.rk30board
+
+LOCAL_MODULE_TAGS:= optional
+include $(BUILD_SHARED_LIBRARY)
+
+
diff --git a/CameraHal/AppMsgNotifier.cpp b/CameraHal/AppMsgNotifier.cpp
new file mode 100755
index 0000000..856b695
--- /dev/null
+++ b/CameraHal/AppMsgNotifier.cpp
@@ -0,0 +1,2335 @@
+#include "CameraHal.h"
+#include <MetadataBufferType.h>
+#include <media/hardware/HardwareAPI.h>
+
+#include <binder/IMemory.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+
+namespace android {
+
+
+#define EXIF_DEF_MAKER "rockchip"
+#define EXIF_DEF_MODEL "rk29sdk"
+
+
+static char ExifMaker[32];
+static char ExifModel[32];
+static char ExifSelfDefine[640];
+#define FACEDETECT_INIT_BIAS (-20)
+#define FACEDETECT_BIAS_INERVAL (5)
+#define FACEDETECT_FRAME_INTERVAL (1)
+/* ddl@rock-chips.com: v1.0xb.0 */
+AppMsgNotifier::AppMsgNotifier(CameraAdapter *camAdp)
+ :mCamAdp(camAdp),
+ encProcessThreadCommandQ("pictureEncThreadQ"),
+ eventThreadCommandQ("eventThreadQ")
+{
+ LOG_FUNCTION_NAME
+ if (create_vpu_memory_pool_allocator(&pool, 10, 200*200*2) < 0) {
+ LOGE("Create vpu memory pool for post process failed\n");
+ pool = NULL;
+ } else {
+ LOG1("============ create_vpu_memory_pool_allocator 10*80kB ==========");
+ }
+
+ mMsgTypeEnabled = 0;
+// mReceivePictureFrame = false;
+ mRunningState = 0;
+ mEncPictureNum = 0;
+// mRecordingRunning = 0;
+ mRecordW = 0;
+ mRecordH = 0;
+ mRawBufferProvider = NULL;
+ mJpegBufferProvider = NULL;
+ mVideoBufferProvider =NULL;
+ mFrameProvider = NULL;
+ mNotifyCb = NULL;
+ mDataCb = NULL;
+ mDataCbTimestamp = NULL;
+ mRequestMemory = NULL;
+ mCallbackCookie = NULL;
+ strcpy(mCallingProcess, getCallingProcess());
+
+ mPreviewDataW = 0;
+ mPreviewDataH = 0;
+ mPicSize =0;
+ mPicture = NULL;
+ mCurOrintation = 0;
+ mFaceDetecInit = false;
+ mFaceDetecW = 0;
+ mFaceDetectH = 0;
+ mFaceFrameNum = 0;
+ mFaceNum = 0;
+ mFaceDetectionDone = true;
+ mCurBiasAngle = FACEDETECT_INIT_BIAS;
+ mFaceContext = NULL;
+ mRecPrevCbDataEn = true;
+ mRecMetaDataEn = true;
+ mIsStoreMD = false;
+ memset(&mFaceDetectorFun,0,sizeof(struct face_detector_func_s));
+ int i ;
+ //request mVideoBufs
+ for (i=0; i<CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++) {
+ mVideoBufs[i] = NULL;
+ mGrallocVideoBuf[i] = NULL;
+ }
+ mGrallocAllocDev = NULL;
+ mGrallocModule = NULL;
+ //create thread
+ mCameraAppMsgThread = new CameraAppMsgThread(this);
+ mCameraAppMsgThread->run("CamHalAppEventThread",ANDROID_PRIORITY_DISPLAY);
+ mEncProcessThread = new EncProcessThread(this);
+ mEncProcessThread->run("CamHalAppEncThread",ANDROID_PRIORITY_NORMAL);
+ mFaceDetThread = new CameraAppFaceDetThread(this);
+ mFaceDetThread->run("CamHalAppFaceThread",ANDROID_PRIORITY_NORMAL);
+ mCallbackThread = new CameraAppCallbackThread(this);
+ mCallbackThread->run("CamHalCallbckThread",ANDROID_PRIORITY_NORMAL);
+ LOG_FUNCTION_NAME_EXIT
+}
+AppMsgNotifier::~AppMsgNotifier()
+{
+ LOG_FUNCTION_NAME
+ //stop thread
+ Message_cam msg;
+ Semaphore sem,sem1;
+ if(mCameraAppMsgThread != NULL){
+ msg.command = CameraAppMsgThread::CMD_EVENT_EXIT;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ eventThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ mCameraAppMsgThread->requestExitAndWait();
+ mCameraAppMsgThread.clear();
+ }
+
+ if(mEncProcessThread != NULL){
+ msg.command = EncProcessThread::CMD_ENCPROCESS_EXIT;
+ sem1.Create();
+ msg.arg1 = (void*)(&sem1);
+ encProcessThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem1.Wait();
+ }
+ mEncProcessThread->requestExitAndWait();
+ mEncProcessThread.clear();
+ }
+ if(mFaceDetThread != NULL){
+ msg.command = CameraAppFaceDetThread::CMD_FACEDET_EXIT;
+ sem1.Create();
+ msg.arg1 = (void*)(&sem1);
+ faceDetThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem1.Wait();
+ }
+ mFaceDetThread->requestExitAndWait();
+ mFaceDetThread.clear();
+ }
+ if(mCallbackThread != NULL){
+ msg.command = CameraAppCallbackThread::CMD_CALLBACK_EXIT;
+ sem1.Create();
+ msg.arg1 = (void*)(&sem1);
+ callbackThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem1.Wait();
+ }
+ mCallbackThread->requestExitAndWait();
+ mCallbackThread.clear();
+ }
+
+ int i = 0;
+ //release mVideoBufs
+ for (int i=0; i < CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++) {
+
+ if(mVideoBufs[i]!= NULL){
+ //free(mVideoBufs[i]);
+ mVideoBufs[i]->release(mVideoBufs[i]);
+ mVideoBufs[i] = NULL;
+ }
+ }
+
+ //destroy buffer
+ if(mRawBufferProvider)
+ mRawBufferProvider->freeBuffer();
+ if(mJpegBufferProvider)
+ mJpegBufferProvider->freeBuffer();
+ if(mVideoBufferProvider)
+ mVideoBufferProvider->freeBuffer();
+// if(mPicture){
+// mPicture->release(mPicture);
+// }
+ if (pool) {
+ release_vpu_memory_pool_allocator(pool);
+ pool = NULL;
+ }
+
+ deInitializeFaceDetec();
+ LOG_FUNCTION_NAME_EXIT
+}
+
+int AppMsgNotifier::startFaceDection(int width,int height, int faceNum)
+{
+ int ret = 0;
+ Mutex::Autolock lock(mFaceDecLock);
+ if(!(mRunningState & STA_RECEIVE_FACEDEC_FRAME)){
+ if((ret = initializeFaceDetec(width, height)) == 0){
+ mRunningState |= STA_RECEIVE_FACEDEC_FRAME;
+ mFaceFrameNum = 0;
+ mFaceNum = faceNum < 0 ? 1 : faceNum;
+ LOG1("start face detection !!");
+ }
+ mRecMetaDataEn = true;
+ }
+ return ret;
+}
+
+void AppMsgNotifier::stopFaceDection()
+{
+ Message_cam msg;
+ Semaphore sem;
+ LOG_FUNCTION_NAME
+
+ {
+ Mutex::Autolock lock(mFaceDecLock);
+ mRecMetaDataEn = false;
+ mRunningState &= ~STA_RECEIVE_FACEDEC_FRAME;
+ }
+ //send msg to stop recording
+ msg.command = CameraAppFaceDetThread::CMD_FACEDET_PAUSE;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ faceDetThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ LOG1("stop face detection !!");
+
+ LOG_FUNCTION_NAME_EXIT
+}
+int AppMsgNotifier::initializeFaceDetec(int width,int height){
+ if(!mFaceDetecInit){
+ //load face detection lib
+ dlerror();
+ mFaceDetectorFun.mLibFaceDetectLibHandle = dlopen("libcam_facedetection.so", RTLD_NOW);
+ if (mFaceDetectorFun.mLibFaceDetectLibHandle == NULL) {
+ LOGE("%s(%d): open libcam_facedetection.so fail",__FUNCTION__,__LINE__);
+ const char *errmsg;
+ if ((errmsg = dlerror()) != NULL) {
+ LOGE("dlopen fail errmsg: %s", errmsg);
+ }
+ return -1;
+ } else {
+ LOGE("%s(%d): open libcam_facedetection.so success",__FUNCTION__,__LINE__);
+ mFaceDetectorFun.mFaceDectStartFunc = (FaceDetector_start_func)dlsym(mFaceDetectorFun.mLibFaceDetectLibHandle, "FaceDetector_start");
+
+ if (mFaceDetectorFun.mFaceDectStartFunc == NULL) {
+ LOGE("FaceDetector_start not found");
+ const char *errmsg;
+ if ((errmsg = dlerror()) != NULL) {
+ LOGE("dlsym FaceDetector_start fail errmsg: %s", errmsg);
+ }
+ return -1;
+ }
+
+ mFaceDetectorFun.mFaceDectStopFunc = (FaceDetector_stop_func)dlsym(mFaceDetectorFun.mLibFaceDetectLibHandle, "FaceDetector_stop");
+ mFaceDetectorFun.mFaceDectprepareFunc = (FaceDetector_prepare_func)dlsym(mFaceDetectorFun.mLibFaceDetectLibHandle, "FaceDetector_prepare");
+ if (mFaceDetectorFun.mFaceDectprepareFunc == NULL) {
+ LOGE("FaceDetector_start not found");
+ const char *errmsg;
+ if ((errmsg = dlerror()) != NULL) {
+ LOGE("dlsym FaceDetector_prepare fail errmsg: %s", errmsg);
+ }
+ return -1;
+ } else {
+ LOGE("dlsym FaceDetector_prepare success");
+ }
+
+ mFaceDetectorFun.mFaceDectFindFaceFun = (FaceDetector_findFaces_func)dlsym(mFaceDetectorFun.mLibFaceDetectLibHandle, "FaceDetector_findFaces");
+ if (mFaceDetectorFun.mFaceDectFindFaceFun == NULL) {
+ LOGE("FaceDetector_start not found");
+ const char *errmsg;
+ if ((errmsg = dlerror()) != NULL) {
+ LOGE("dlsym FaceDetector_start fail errmsg: %s", errmsg);
+ }
+ return -1;
+ } else {
+ LOGE("dlsym FaceDetector_find success");
+ }
+
+ mFaceDetectorFun.mFaceDetector_initizlize_func = (FaceDetector_initizlize_func)dlsym(mFaceDetectorFun.mLibFaceDetectLibHandle, "FaceDetector_initizlize");
+ mFaceDetectorFun.mFaceDetector_destory_func = (FaceDetector_destory_func)dlsym(mFaceDetectorFun.mLibFaceDetectLibHandle, "FaceDetector_destory");
+ mFaceContext = (*mFaceDetectorFun.mFaceDetector_initizlize_func)(DETECTOR_OPENCL, 15.0f , 1);
+ if(mFaceContext){
+ (*mFaceDetectorFun.mFaceDectStartFunc)(mFaceContext,width, height, IMAGE_YUV420SP);
+ mFaceDetecInit = true;
+ }else{
+ dlclose(mFaceDetectorFun.mLibFaceDetectLibHandle);
+ LOGE("%s(%d): open libface init fail",__FUNCTION__,__LINE__);
+ return -1;
+ }
+ }
+ }else if((mFaceDetecW != width) || (mFaceDetectH != height)){
+ (*mFaceDetectorFun.mFaceDectStopFunc)(mFaceContext);
+ (*mFaceDetectorFun.mFaceDectStartFunc)(mFaceContext,width, height, IMAGE_YUV420SP);
+ }
+ mFaceDetecW = width;
+ mFaceDetectH = height;
+ return 0;
+}
+
+void AppMsgNotifier::deInitializeFaceDetec(){
+ if(mFaceDetecInit){
+ (*mFaceDetectorFun.mFaceDectStopFunc)(mFaceContext);
+ (*mFaceDetectorFun.mFaceDetector_destory_func)(mFaceContext);
+ dlclose(mFaceDetectorFun.mLibFaceDetectLibHandle);
+ mFaceDetecInit = false;
+ }
+}
+void AppMsgNotifier::onOrientationChanged(uint32_t new_orien,int cam_orien,int face)
+{
+ Mutex::Autolock lock(mFaceDecLock);
+ int result,tmp_ori;
+
+ //correct source orientation by clockwise
+#if 1
+ //new_orien = 360 - new_orien;
+
+ if (face == CAMERA_FACING_FRONT) {
+ result = (cam_orien - new_orien + 360) % 360;
+ //result = (cam_orien + new_orien) % 360;
+ //result = (360 - result) % 360; // compensate the mirror
+ } else { // back-facing
+ new_orien = 360 - new_orien;
+ result = (cam_orien - new_orien + 360) % 360;
+ }
+ #else
+ result = (cam_orien - new_orien + 360) % 360;
+ #endif
+
+ //face detection is counter clocwise
+ tmp_ori = (4 - result / 90 ) % 4;
+ if(mCurOrintation != tmp_ori){
+ mFaceFrameNum = 0;
+ mCurOrintation = tmp_ori;
+ mCurBiasAngle = FACEDETECT_INIT_BIAS;
+ }
+
+ LOG2("%s:face:%d,new_orien:%d,cam_orien %d,new orientattion : %d",__FUNCTION__,face,new_orien,cam_orien,mCurOrintation);
+}
+
+bool AppMsgNotifier::isNeedSendToFaceDetect()
+{
+ Mutex::Autolock lock(mFaceDecLock);
+ if((mRecMetaDataEn) && (mRunningState & STA_RECEIVE_FACEDEC_FRAME))
+ return true;
+ else{
+ LOG2("%s%d:needn't to send this frame to this face detection",__FUNCTION__,__LINE__);
+
+ return false;
+ }
+}
+void AppMsgNotifier::notifyNewFaceDecFrame(FramInfo_s* frame)
+{
+ if (mFaceDetectionDone) {
+ //send to app msg thread
+ Message_cam msg;
+ Mutex::Autolock lock(mFaceDecLock);
+ if((mRecMetaDataEn) && (mRunningState & STA_RECEIVE_FACEDEC_FRAME)) {
+ msg.command = CameraAppFaceDetThread::CMD_FACEDET_FACE_DETECT ;
+ msg.arg2 = (void*)(frame);
+ msg.arg3 = (void*)(frame->used_flag);
+ LOG2("%s(%d):notify new frame,index(%ld)",__FUNCTION__,__LINE__,frame->frame_index);
+ faceDetThreadCommandQ.put(&msg);
+ return;
+ }
+ }
+ mFrameProvider->returnFrame(frame->frame_index,frame->used_flag);
+}
+
+void AppMsgNotifier::setPictureRawBufProvider(BufferProvider* bufprovider)
+{
+ mRawBufferProvider = bufprovider;
+ #if (JPEG_BUFFER_DYNAMIC == 0)
+ mRawBufferProvider->createBuffer(1, 0x1500000, RAWBUFFER,mRawBufferProvider->is_cif_driver);
+ #endif
+ }
+void AppMsgNotifier::setPictureJpegBufProvider(BufferProvider* bufprovider)
+{
+ mJpegBufferProvider = bufprovider;
+ #if (JPEG_BUFFER_DYNAMIC == 0)
+ mJpegBufferProvider->createBuffer(1, 0x1600000,JPEGBUFFER,mJpegBufferProvider->is_cif_driver);
+ #endif
+}
+void AppMsgNotifier::setFrameProvider(FrameProvider * framepro)
+{
+ mFrameProvider = framepro;
+}
+void AppMsgNotifier::setVideoBufProvider(BufferProvider* bufprovider)
+{
+ mVideoBufferProvider = bufprovider;
+}
+
+
+int AppMsgNotifier::takePicture(picture_info_s picinfo)
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mPictureLock);
+ //if(mReceivePictureFrame){
+ if(mRunningState&STA_RECEIVE_PIC_FRAME){
+ LOGE("%s(%d): picture taken process is running now !",__FUNCTION__,__LINE__);
+ return -1;
+ }
+ memset(&mPictureInfo,0,sizeof(picture_info_s));
+ memcpy(&mPictureInfo,&picinfo,sizeof(picture_info_s));
+ mEncPictureNum = mPictureInfo.num;
+ //mReceivePictureFrame = true;
+ mRunningState |= STA_RECEIVE_PIC_FRAME;
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+int AppMsgNotifier::flushPicture()
+{
+ Message_cam msg;
+ Semaphore sem;
+ LOG_FUNCTION_NAME
+ {
+ int trytimes = 0;
+ while((mRunningState&STA_RECEIVE_PIC_FRAME) && (trytimes++ < 50)){
+ usleep(30*1000);//sleep 30 ms
+ }
+ if(trytimes == 50){
+ Mutex::Autolock lock(mPictureLock);
+ //mReceivePictureFrame = false;
+ mRunningState &= ~STA_RECEIVE_PIC_FRAME;
+ LOGE("%s:cancel picture maybe failed !!! ",__FUNCTION__);
+ }
+ }
+
+ //send a msg to cancel pic
+ msg.command = EncProcessThread::CMD_ENCPROCESS_PAUSE;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ encProcessThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+
+int AppMsgNotifier::pausePreviewCBFrameProcess()
+{
+ Message_cam msg;
+ Semaphore sem;
+ LOG_FUNCTION_NAME
+ if(mRunningState &STA_RECEIVE_PREVIEWCB_FRAME){
+ {
+ Mutex::Autolock lock(mDataCbLock);
+
+ mRecPrevCbDataEn = false;
+ mRunningState &= ~STA_RECEIVE_PREVIEWCB_FRAME;
+ }
+
+ //send a msg to pause event frame
+ msg.command = CameraAppMsgThread::CMD_EVENT_PAUSE;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ eventThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+
+bool AppMsgNotifier::isNeedSendToPicture()
+{
+ Mutex::Autolock lock(mPictureLock);
+ //if((mReceivePictureFrame) && (mPictureInfo.num > 0)){
+ if((mRunningState&STA_RECEIVE_PIC_FRAME) && (mPictureInfo.num > 0)){
+ LOGD("This frame need to be encode picture, mPictureInfo.num: %d",mPictureInfo.num);
+ return true;
+ }else
+ return false;
+}
+
+static struct alloc_device_t *g_mGrallocAllocDev = NULL;
+int AppMsgNotifier::grallocDevInit()
+{
+ int ret;
+
+ LOG_FUNCTION_NAME
+ ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&mGrallocModule);
+ if (ret < 0) {
+ LOGE("%s: Unable to get gralloc module (error %d)", __func__, ret);
+ }
+ else {
+ if (!g_mGrallocAllocDev) {
+ ret = gralloc_open((const struct hw_module_t*)mGrallocModule, &g_mGrallocAllocDev);
+ if (ret < 0) {
+ LOGE("%s: Unable to open gralloc alloc device (error %d)", __func__, ret);
+ }
+ }
+ mGrallocAllocDev = g_mGrallocAllocDev;
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+
+int AppMsgNotifier::grallocDevDeinit()
+{
+ int ret=0;
+ LOG_FUNCTION_NAME
+ if (mGrallocAllocDev != NULL) {
+#if defined(RK_DRM_GRALLOC)
+ //DRM gralloc has a crash bug now when closed,
+ //so not closed temporarily.here will lead gralloc device
+ //memory leak everytime when cameraservice restart
+ mGrallocAllocDev = NULL;
+#else
+ ret = gralloc_close(mGrallocAllocDev);
+ if (ret < 0)
+ LOGE("%s: gralloc_close failed (error %d)", __func__, ret);
+ else {
+ mGrallocAllocDev = NULL;
+ g_mGrallocAllocDev = NULL;
+ }
+#endif
+ }
+ mGrallocModule = NULL;
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+
+void AppMsgNotifier::grallocVideoBufAlloc(
+ const char *camPixFmt,
+ unsigned int width,
+ unsigned int height)
+{
+ int ret,i;
+ NATIVE_HANDLE_TYPE *buffHandle;
+ int stride;
+ rk_videobuf_info_t *pVideoBuf;
+ unsigned int grallocFlags = CAMHAL_GRALLOC_USAGE;
+ unsigned int halPixFmt;
+ void *virtAddr;
+ LOG1("grallocVideoBufAlloc: format %s %dx%d, usage 0x%08x", camPixFmt, width, height, grallocFlags);
+
+ if (!mGrallocModule) {
+ LOGE("%s: No gralloc module, cannot allocate buffer", __func__);
+ return;
+ }
+
+ if (!mGrallocAllocDev) {
+ LOGE("%s: No gralloc alloc device, cannot allocate buffer", __func__);
+ return;
+ }
+
+ halPixFmt = cameraPixFmt2HalPixFmt(camPixFmt);
+ if ((int)halPixFmt < 0) {
+ LOGE("%s: Invalid pixel format", __func__);
+ return;
+ }
+
+ LOG1("alloc %dX%d,format=0x%x,flags=0x%x",width,height,halPixFmt,grallocFlags);
+ for (i=0; i<CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++){
+ virtAddr = NULL;
+ buffHandle = NULL;
+ pVideoBuf = (rk_videobuf_info_t*)malloc(sizeof(rk_videobuf_info_t));
+ if(!pVideoBuf){
+ LOGE("%s(%d): malloc video buffer structue failed!",__FUNCTION__,__LINE__);
+ goto FAIL;
+ }
+ memset(pVideoBuf, 0x0, sizeof(rk_videobuf_info_t));
+ mGrallocVideoBuf[i] = pVideoBuf;
+ ret = mGrallocAllocDev->alloc(mGrallocAllocDev,
+ width,
+ height,
+ halPixFmt,
+ grallocFlags,
+ (buffer_handle_t*)&buffHandle,
+ &stride);
+
+ if (ret < 0) {
+ LOGE("%s: gralloc buffer allocation failed (error %d)", __func__, ret);
+ goto FAIL;
+ }
+ pVideoBuf->lock = new Mutex();
+ pVideoBuf->buffer_hnd = (buffer_handle_t*)&buffHandle;
+ pVideoBuf->priv_hnd= (NATIVE_HANDLE_TYPE*)(buffHandle);
+ pVideoBuf->stride = stride;
+ pVideoBuf->phy_addr = 0x00;
+
+ ret = mGrallocModule->lock(mGrallocModule,
+ (buffer_handle_t)buffHandle,
+ grallocFlags, 0, 0,
+ width, height,
+ &virtAddr);
+ if(ret < 0){
+ LOGE("%s: gralloc buffer lock failed (error %d)", __func__, ret);
+ goto FAIL;
+ }
+ #if defined(TARGET_BOARD_PLATFORM_RK30XX) || defined(TARGET_RK29) || defined(TARGET_BOARD_PLATFORM_RK2928)
+ pVideoBuf->vir_addr = (long)buffHandle->base;
+ #elif defined(TARGET_BOARD_PLATFORM_RK30XXB) || defined(TARGET_RK3368) || defined(TARGET_RK3328) || defined(TARGET_RK312x) || defined(TARGET_RK3288)
+ pVideoBuf->vir_addr = (long)virtAddr;
+ #endif
+ //get fd
+ #if defined(RK_DRM_GRALLOC)
+ util_get_gralloc_buf_fd((buffer_handle_t)buffHandle,(int*)(&pVideoBuf->phy_addr));
+ #endif
+ LOG1("pVideoBuf->phy_addr=0x%x,pVideoBuf->vir_addr=0x%x,buffer_hnd=0x%x,priv_hnd=0x%x",
+ pVideoBuf->phy_addr,pVideoBuf->vir_addr,pVideoBuf->buffer_hnd,pVideoBuf->priv_hnd);
+ }
+ return;
+FAIL:
+ for(i=0; i<CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++){
+ if(mGrallocVideoBuf[i] && mGrallocVideoBuf[i]->buffer_hnd){
+ mGrallocModule->unlock(mGrallocModule, (buffer_handle_t)(mGrallocVideoBuf[i]->priv_hnd));
+ mGrallocAllocDev->free(mGrallocAllocDev, (buffer_handle_t)(mGrallocVideoBuf[i]->priv_hnd));
+ delete mGrallocVideoBuf[i]->lock;
+ free(mGrallocVideoBuf[i]);
+ mGrallocVideoBuf[i] = NULL;
+ }
+ }
+}
+
+void AppMsgNotifier::grallocVideoBufFree()
+{
+ int ret,i;
+
+ LOG_FUNCTION_NAME
+ if (mGrallocAllocDev == NULL)
+ LOGE("%s: No gralloc alloc device, cannot free buffer", __func__);
+ else {
+ for (i = 0; i < CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++){
+ if(mGrallocVideoBuf[i] && mGrallocVideoBuf[i]->buffer_hnd){
+ mGrallocModule->unlock(mGrallocModule, (buffer_handle_t)(mGrallocVideoBuf[i]->priv_hnd));
+ ret = mGrallocAllocDev->free(mGrallocAllocDev, (buffer_handle_t)(mGrallocVideoBuf[i]->priv_hnd));
+ if (ret != 0)
+ LOGE("%s: gralloc free buffer failed (error %d)", __func__, ret);
+ delete mGrallocVideoBuf[i]->lock;
+ free(mGrallocVideoBuf[i]);
+ mGrallocVideoBuf[i] = NULL;
+ }
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+}
+
+void AppMsgNotifier::grallocVideoBufLock()
+{
+}
+
+void AppMsgNotifier::grallocVideoBufUnlock()
+{
+}
+
+int AppMsgNotifier::grallocVideoBufGetAvailable()
+{
+ int i=-1;
+
+ for (i=0; i<CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++)
+ {
+ if(mGrallocVideoBuf[i]->buf_state == 0)
+ break;
+ }
+
+ return i;
+}
+int AppMsgNotifier::startRecording(int w,int h)
+{
+ LOG_FUNCTION_NAME
+ int i = 0,frame_size = 0;
+ long *addr;
+ struct bufferinfo_s videoencbuf;
+
+ Mutex::Autolock lock(mRecordingLock);
+ //create video buffer
+ //video enc just support yuv420 format
+ //w,h align up to 16
+ frame_size = PAGE_ALIGN(((w+15) & (~15))*((h+15) & (~15))*3/2);
+ if(mIsStoreMD == false){
+ //release video buffer
+ mVideoBufferProvider->freeBuffer();
+ mVideoBufferProvider->createBuffer(CONFIG_CAMERA_VIDEOENC_BUF_CNT, frame_size, VIDEOENCBUFFER,mVideoBufferProvider->is_cif_driver);
+
+ for (int i=0; i < CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++) {
+ if(!mVideoBufs[i])
+ mVideoBufs[i] = mRequestMemory(-1, sizeof(long), 1, NULL);
+ if( (NULL == mVideoBufs[i]) || ( NULL == mVideoBufs[i]->data)) {
+ mVideoBufs[i] = NULL;
+ LOGE("%s(%d): video buffer %d create failed",__FUNCTION__,__LINE__,i);
+ }
+ if (mVideoBufs[i]) {
+ addr = (long*)mVideoBufs[i]->data;
+ *addr = (long)mVideoBufferProvider->getBufPhyAddr(i);
+ }
+ }
+ }else{
+ if(!mGrallocAllocDev){
+ if(grallocDevInit() < 0)
+ {
+ grallocDevDeinit();
+ }
+ grallocVideoBufAlloc(CAMERA_DISPLAY_FORMAT_YUV420SP, w, h);
+ }
+ #if 0
+ for (int i=0; i < CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++) {
+ if(!mVideoBufs[i])
+ mVideoBufs[i] = mRequestMemory(-1, sizeof(long)*2, 1, NULL);
+ if( (NULL == mVideoBufs[i]) || ( NULL == mVideoBufs[i]->data)) {
+ mVideoBufs[i] = NULL;
+ LOGE("%s(%d): video buffer %d create failed",__FUNCTION__,__LINE__,i);
+ }
+
+ addr = (long*)mVideoBufs[i]->data;
+ addr[0] = kMetadataBufferTypeGrallocSource;
+ addr[1] = (/*buffer_handle_t*/long)(mGrallocVideoBuf[i]->priv_hnd);
+ }
+
+ #else
+ for (int i=0; i < CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++) {
+ if(!mVideoBufs[i]) {
+ #if defined(ANDROID_7_X)
+ mVideoBufs[i] = mRequestMemory(-1, sizeof(VideoNativeHandleMetadata), 1, NULL);
+ #endif
+ }
+ if( (NULL == mVideoBufs[i]) || ( NULL == mVideoBufs[i]->data)) {
+ mVideoBufs[i] = NULL;
+ LOGE("%s(%d): video buffer %d create failed",__FUNCTION__,__LINE__,i);
+ }
+
+ addr = (long*)mVideoBufs[i]->data;
+ #if defined(ANDROID_7_X)
+ addr[0] = kMetadataBufferTypeNativeHandleSource;
+ addr[1] = (long)&mGrallocVideoBuf[i]->priv_hnd->base;
+ #endif
+ }
+ #endif
+ }
+
+ mRecordW = w;
+ mRecordH = h;
+// mRecordingRunning = true;
+ mRunningState |= STA_RECORD_RUNNING;
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return 0;
+}
+
+int AppMsgNotifier::storeMetadataInBuffer(bool meta)
+{
+ #if defined(ANDROID_7_X)
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mRecordingLock);
+ mIsStoreMD = meta;
+ LOG_FUNCTION_NAME_EXIT
+ return NO_ERROR;
+ #else
+ return INVALID_OPERATION;
+ #endif
+}
+
+int AppMsgNotifier::stopRecording()
+{
+ Message_cam msg;
+ Semaphore sem;
+ LOG_FUNCTION_NAME
+
+ {
+ Mutex::Autolock lock(mRecordingLock);
+ //mRecordingRunning = false;
+ mRunningState &= ~STA_RECORD_RUNNING;
+ }
+ //send msg to stop recording
+ msg.command = CameraAppMsgThread::CMD_EVENT_PAUSE;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ eventThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ mIsStoreMD = false;
+ grallocVideoBufFree();
+ grallocDevDeinit();
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+bool AppMsgNotifier::isNeedSendToVideo()
+{
+ // LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mRecordingLock);
+ // if(mRecordingRunning == false)
+ if(!(mRunningState & STA_RECORD_RUNNING))
+ return false;
+ else{
+ LOG2("%s%d:need to encode video this frame",__FUNCTION__,__LINE__);
+
+ return true;
+ }
+}
+void AppMsgNotifier::releaseRecordingFrame(const void *opaque)
+{
+ ssize_t offset;
+ size_t size;
+ int index = -1,i;
+ LOG_FUNCTION_NAME
+ if(mIsStoreMD == false){
+ for(i=0; i<mVideoBufferProvider->getBufCount(); i++) {
+ if (mVideoBufs[i]->data == opaque) {
+ index = i;
+ break;
+ }
+ }
+ if (index == -1) {
+ LOGE("%s(%d): this video buffer is invaildate",__FUNCTION__,__LINE__);
+ LOG_FUNCTION_NAME_EXIT
+ return;
+ }
+ mVideoBufferProvider->setBufferStatus(index, 0, 0);
+ }else{
+ for(i=0; i<CONFIG_CAMERA_VIDEOENC_BUF_CNT; i++) {
+ if (mVideoBufs[i]->data == opaque) {
+ index = i;
+ break;
+ }
+ }
+ if (index == -1) {
+ LOGE("%s(%d): this video buffer is invaildate",__FUNCTION__,__LINE__);
+ LOG_FUNCTION_NAME_EXIT
+ return;
+ }
+ mGrallocVideoBuf[index]->buf_state = 0;
+ }
+ LOG_FUNCTION_NAME_EXIT
+
+}
+//must call this when PREVIEW DATACB MSG is disabled,and before enableMsgType
+int AppMsgNotifier::setPreviewDataCbRes(int w,int h, const char *fmt)
+{
+ LOG_FUNCTION_NAME
+ memset(mPreviewDataFmt,0,sizeof(mPreviewDataFmt));
+ strcpy(mPreviewDataFmt,fmt);
+ mPreviewDataW = w;
+ mPreviewDataH = h;
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+int AppMsgNotifier::enableMsgType(int32_t msgtype)
+{
+ LOG_FUNCTION_NAME
+ if(msgtype & (CAMERA_MSG_PREVIEW_FRAME)){
+ Mutex::Autolock lock(mDataCbLock);
+ mMsgTypeEnabled |= msgtype;
+ mRunningState |= STA_RECEIVE_PREVIEWCB_FRAME;
+ LOG1("enable msg CAMERA_MSG_PREVIEW_FRAME");
+ }else
+ mMsgTypeEnabled |= msgtype;
+ LOG_FUNCTION_NAME_EXIT
+
+ return 0;
+}
+int AppMsgNotifier::msgEnabled(int32_t msg_type)
+{
+ return (mMsgTypeEnabled & msg_type);
+}
+int AppMsgNotifier::disableMsgType(int32_t msgtype)
+{
+ LOG_FUNCTION_NAME
+
+ if(msgtype & (CAMERA_MSG_POSTVIEW_FRAME | CAMERA_MSG_RAW_IMAGE|CAMERA_MSG_COMPRESSED_IMAGE | CAMERA_MSG_SHUTTER)){
+// if((mEncPictureNum <= 0) || (mReceivePictureFrame == false))
+ if((mEncPictureNum <= 0) || ((mRunningState&STA_RECEIVE_PIC_FRAME) == 0x0)){
+ mMsgTypeEnabled &= ~msgtype;
+ LOG1("%s%d:disable picure msgtype suc!!",__FUNCTION__,__LINE__);
+ }else
+ LOGD("%s%d:needn't to disable picure msgtype.",__FUNCTION__,__LINE__);
+
+ }else if(msgtype & (CAMERA_MSG_PREVIEW_FRAME)){
+
+ {
+ LOG1("%s%d: get mDataCbLock",__FUNCTION__,__LINE__);
+ Mutex::Autolock lock(mDataCbLock);
+ mMsgTypeEnabled &= ~msgtype;
+ mRunningState &= ~STA_RECEIVE_PREVIEWCB_FRAME;
+
+ LOG1("%s%d: release mDataCbLock",__FUNCTION__,__LINE__);
+ }
+ }else if(msgtype & (CAMERA_MSG_PREVIEW_METADATA)){
+ Mutex::Autolock lock(mFaceDecLock);
+ mMsgTypeEnabled &= ~msgtype;
+ mRunningState &= ~STA_RECEIVE_FACEDEC_FRAME;
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+void AppMsgNotifier::setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user,
+ Mutex *mainthread_lock)
+{
+ LOG_FUNCTION_NAME
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mRequestMemory = get_memory;
+ mCallbackCookie = user;
+ mMainThreadLockRef = mainthread_lock;
+ LOG_FUNCTION_NAME_EXIT
+}
+void AppMsgNotifier::notifyCbMsg(int msg,int ret)
+{
+ if(mMsgTypeEnabled & msg)
+ mNotifyCb(msg, ret, 0, mCallbackCookie);
+}
+// need sync with enable/disable msg ?
+bool AppMsgNotifier::isNeedSendToDataCB()
+{
+ Mutex::Autolock lock(mDataCbLock);
+ if(mRunningState & STA_RECEIVE_PREVIEWCB_FRAME){
+ return ((mRecPrevCbDataEn) && (mMsgTypeEnabled & CAMERA_MSG_PREVIEW_FRAME) &&(mDataCb));
+ }else
+ return false;
+}
+
+void AppMsgNotifier::notifyNewPicFrame(FramInfo_s* frame)
+{
+ //send to enc thread;
+ // encProcessThreadCommandQ
+ //send a msg to disable preview frame cb
+ Message_cam msg;
+ Mutex::Autolock lock(mPictureLock);
+ mPictureInfo.num--;
+ msg.command = EncProcessThread::CMD_ENCPROCESS_SNAPSHOT;
+ msg.arg2 = (void*)(frame);
+ msg.arg3 = (void*)(frame->used_flag);
+ encProcessThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_notify_shutter()
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_SHUTTER;
+ msg.arg2 = NULL;
+ msg.arg3 = NULL;
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_raw_image(camera_memory_t* frame)
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_RAW_IMAGE;
+ msg.arg2 = (void *)frame;
+ msg.arg3 = NULL;
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_notify_raw_image()
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_RAW_IMAGE_NOTIFY;
+ msg.arg2 = NULL;
+ msg.arg3 = NULL;
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_compressed_image(camera_memory_t* frame)
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_COMPRESSED_IMAGE;
+ msg.arg2 = (void*)frame;
+ msg.arg3 = NULL;
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_notify_error()
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_ERROR;
+ msg.arg2 = NULL;
+ msg.arg3 = NULL;
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_preview_metadata(camera_memory_t* datacbFrameMem, camera_frame_metadata_t *facedata, struct RectFace *faces)
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_PREVIEW_METADATA;
+ msg.arg2 = (void *)facedata;
+ msg.arg3 = (void *)faces;
+ msg.arg4 = (void*)(datacbFrameMem);
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_video_frame(camera_memory_t* video_frame)
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_VIDEO_FRAME;
+ msg.arg2 = (void *)video_frame;
+ msg.arg3 = NULL;
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::callback_preview_frame(camera_memory_t* datacbFrameMem)
+{
+ //send to callbackthread
+ Message_cam msg;
+ msg.command = CameraAppCallbackThread::CMD_MSG_PREVIEW_FRAME;
+ msg.arg2 = (void*)(datacbFrameMem);
+ msg.arg3 = NULL;
+ callbackThreadCommandQ.put(&msg);
+}
+
+void AppMsgNotifier::notifyNewPreviewCbFrame(FramInfo_s* frame)
+{
+ //send to app msg thread
+ Message_cam msg;
+ Mutex::Autolock lock(mDataCbLock);
+ if((mRecPrevCbDataEn) &&(mRunningState & STA_RECEIVE_PREVIEWCB_FRAME)){
+ msg.command = CameraAppMsgThread::CMD_EVENT_PREVIEW_DATA_CB;
+ msg.arg2 = (void*)(frame);
+ msg.arg3 = (void*)(frame->used_flag);
+ eventThreadCommandQ.put(&msg);
+ }else
+ mFrameProvider->returnFrame(frame->frame_index,frame->used_flag);
+
+}
+void AppMsgNotifier::notifyNewVideoFrame(FramInfo_s* frame)
+{
+ //send to app msg thread
+ Message_cam msg;
+ Mutex::Autolock lock(mRecordingLock);
+ if(mRunningState & STA_RECORD_RUNNING){
+ msg.command = CameraAppMsgThread::CMD_EVENT_VIDEO_ENCING ;
+ msg.arg2 = (void*)(frame);
+ msg.arg3 = (void*)(frame->used_flag);
+ LOG2("%s(%d):notify new frame,index(%ld)",__FUNCTION__,__LINE__,frame->frame_index);
+ eventThreadCommandQ.put(&msg);
+ }else
+ mFrameProvider->returnFrame(frame->frame_index,frame->used_flag);
+}
+
+static BufferProvider* g_rawbufProvider = NULL;
+static BufferProvider* g_jpegbufProvider = NULL;
+
+extern "C" int jpegEncFlushBufferCb(int buf_type, int offset, int len)
+{
+ int ret = 0;
+
+ /* ddl@rock-chips.com notes:
+ * 0 : input buffer index for jpeg encoder
+ * 1 : output buffer index for jpeg encoder
+ */
+
+ if (buf_type == 0) {
+ g_rawbufProvider->flushBuffer(0);
+ } else if (buf_type == 1) {
+ g_jpegbufProvider->flushBuffer(0);
+ }
+
+ return ret;
+}
+
+
+int AppMsgNotifier::copyAndSendRawImage(void *raw_image, int size)
+{
+ camera_memory_t* picture = NULL;
+ void *dest = NULL, *src = NULL;
+
+ if(mMsgTypeEnabled & CAMERA_MSG_RAW_IMAGE) {
+
+ mPicture = mRequestMemory(-1, size, 1, NULL);
+ mPicSize = size;
+ picture = mPicture;
+ if (NULL != picture) {
+ dest = picture->data;
+ if (NULL != dest) {
+ memcpy(dest, raw_image, size);
+ callback_raw_image(picture);
+ }
+ } else if (mMsgTypeEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) {
+ callback_notify_raw_image();
+ }
+ } else if (mMsgTypeEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) {
+ callback_notify_raw_image();
+ }
+ return 0;
+}
+
+int AppMsgNotifier::copyAndSendCompressedImage(void *compressed_image, int size)
+{
+ camera_memory_t* picture = NULL;
+ void *dest = NULL, *src = NULL;
+
+ if(mMsgTypeEnabled & CAMERA_MSG_COMPRESSED_IMAGE) {
+ mPicture = mRequestMemory(-1, size, 1, NULL);
+ mPicSize = size;
+ picture = mPicture;
+ if (NULL != picture) {
+ dest = picture->data;
+ if (NULL != dest) {
+ memcpy(dest, compressed_image, size);
+ callback_compressed_image(picture);
+ }
+ }
+ }
+ return 0;
+}
+
+int AppMsgNotifier::Jpegfillexifinfo(RkExifInfo *exifInfo,picture_info_s &params)
+{
+ char property[PROPERTY_VALUE_MAX];
+
+ if(exifInfo==NULL){
+ LOGE( "..%s..%d..argument error ! ",__FUNCTION__,__LINE__);
+ return 0;
+ }
+
+ /*fill in jpeg exif tag*/
+ property_get("ro.product.brand", property, EXIF_DEF_MAKER);
+ strncpy((char *)ExifMaker, property,sizeof(ExifMaker) - 1);
+ ExifMaker[sizeof(ExifMaker) - 1] = '\0';
+ exifInfo->maker = ExifMaker;
+ exifInfo->makerchars = strlen(ExifMaker)+1;
+
+ property_get("ro.product.model", property, EXIF_DEF_MODEL);
+ strncpy((char *)ExifModel, property,sizeof(ExifModel) - 1);
+ ExifModel[sizeof(ExifModel) - 1] = '\0';
+ exifInfo->modelstr = ExifModel;
+ exifInfo->modelchars = strlen(ExifModel)+1;
+
+ //degree 0:1
+ //degree 90(clockwise):6
+ //degree 180:3
+ //degree 270:8
+ switch(params.rotation){
+ case 0:
+ exifInfo->Orientation = 1;
+ break;
+ case 90:
+ exifInfo->Orientation = 1;
+ break;
+ case 180:
+ exifInfo->Orientation = 1;
+ break;
+ case 270:
+ exifInfo->Orientation = 1;
+ break;
+ default:
+ exifInfo->Orientation = 1;
+ break;
+ }
+
+ // Date time
+ time_t rawtime;
+ struct tm *timeinfo;
+ time(&rawtime);
+ timeinfo = localtime(&rawtime);
+ strftime((char *)exifInfo->DateTime, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
+
+ exifInfo->ExposureTime.num = (int)(params.cameraparam.ExposureTime*10000);
+ exifInfo->ExposureTime.denom = 10000;
+ exifInfo->ApertureFNumber.num = 0x118;
+ exifInfo->ApertureFNumber.denom = 0x64;
+ exifInfo->ISOSpeedRatings = ((int)(params.cameraparam.ISOSpeedRatings))*100;
+ exifInfo->CompressedBitsPerPixel.num = 0x4;
+ exifInfo->CompressedBitsPerPixel.denom = 0x1;
+ exifInfo->ShutterSpeedValue.num = 0x452;
+ exifInfo->ShutterSpeedValue.denom = 0x100;
+ exifInfo->ApertureValue.num = 0x2f8;
+ exifInfo->ApertureValue.denom = 0x100;
+ exifInfo->ExposureBiasValue.num = 0;
+ exifInfo->ExposureBiasValue.denom = 0x100;
+ exifInfo->MaxApertureValue.num = 0x02f8;
+ exifInfo->MaxApertureValue.denom = 0x100;
+ exifInfo->MeteringMode = 02;
+
+ exifInfo->Flash = params.flash;
+ exifInfo->FocalLength.num = (uint32_t)params.focalen;
+ exifInfo->FocalLength.denom = 0x1;
+
+ exifInfo->FocalPlaneXResolution.num = 0x8383;
+ exifInfo->FocalPlaneXResolution.denom = 0x67;
+ exifInfo->FocalPlaneYResolution.num = 0x7878;
+ exifInfo->FocalPlaneYResolution.denom = 0x76;
+ exifInfo->SensingMethod = 2;
+ exifInfo->FileSource = 3;
+ exifInfo->CustomRendered = 1;
+ exifInfo->ExposureMode = 0;
+
+ exifInfo->WhiteBalance = params.whiteBalance;
+ exifInfo->DigitalZoomRatio.num = params.w;
+ exifInfo->DigitalZoomRatio.denom = params.w;
+ exifInfo->SceneCaptureType = 0x01;
+
+ snprintf(ExifSelfDefine,sizeof(ExifSelfDefine)-1,"XMLVersion=%s Rg_Proj=%0.5f s=%0.5f s_max1=%0.5f s_max2=%0.5f Bg1=%0.5f Rg1=%0.5f Bg2=%0.5f Rg2=%0.5f "
+ "colortemperature=%s ExpPriorIn=%0.2f ExpPriorOut=%0.2f region=%d ",params.cameraparam.XMLVersion,params.cameraparam.f_RgProj,\
+ params.cameraparam.f_s,params.cameraparam.f_s_Max1,params.cameraparam.f_s_Max2,params.cameraparam.f_Bg1,params.cameraparam.f_Rg1,params.cameraparam.f_Bg2,\
+ params.cameraparam.f_Rg2,params.cameraparam.illuName[params.cameraparam.illuIdx],params.cameraparam.expPriorIn,params.cameraparam.expPriorOut,\
+ params.cameraparam.region);
+
+ int i;
+ char str[64];
+ for(i=0; i<params.cameraparam.count; i++)
+ {
+ snprintf(str,sizeof(str)-1, "illuName[%d]=%s ",i,params.cameraparam.illuName[i]);
+ strncat(ExifSelfDefine, str, sizeof(ExifSelfDefine)-strlen(ExifSelfDefine)-1);
+ snprintf(str,sizeof(str)-1, "likehood[%d]=%0.2f ",i,params.cameraparam.likehood[i]);
+ strncat(ExifSelfDefine, str, sizeof(ExifSelfDefine)-strlen(ExifSelfDefine)-1);
+ snprintf(str,sizeof(str)-1, "wight[%d]=%0.2f ",i,params.cameraparam.wight[i]);
+ strncat(ExifSelfDefine, str, sizeof(ExifSelfDefine)-strlen(ExifSelfDefine)-1);
+ }
+ exifInfo->makernote = ExifSelfDefine;
+ exifInfo->makernotechars = strlen(ExifSelfDefine)+1;
+
+ return 0;
+}
+
+
+/*fill in jpeg gps information*/
+int AppMsgNotifier::Jpegfillgpsinfo(RkGPSInfo *gpsInfo,picture_info_s &params)
+{
+ char* gpsprocessmethod = NULL;
+ double latitude,longtitude,altitude;
+ double deg,min,sec;
+ double fract;
+ long timestamp,num;
+ int year,month,day,hour_t,min_t,sec_t;
+ char date[12];
+
+ if(gpsInfo==NULL) {
+ LOGE( "%s(%d): gpsInfo is NULL ",__FUNCTION__,__LINE__);
+ return 0;
+ }
+
+ altitude = params.altitude;
+ latitude = params.latitude;
+ longtitude = params.longtitude;
+ timestamp = params.timestamp;
+// gpsprocessmethod = (char*)params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+
+ if(latitude >= 0){
+ gpsInfo->GPSLatitudeRef[0] = 'N';
+ gpsInfo->GPSLatitudeRef[1] = '\0';
+ }else if((latitude <0)&&(latitude!=-1)){
+ gpsInfo->GPSLatitudeRef[0] = 'S';
+ gpsInfo->GPSLatitudeRef[1] = '\0';
+ }else{
+ gpsInfo->GPSLatitudeRef[0] = '\0';
+ gpsInfo->GPSLatitudeRef[1] = '\0';
+ }
+
+ if(latitude!= -1)
+ {
+ latitude = fabs(latitude);
+ fract = modf(latitude,&deg);
+ fract = modf(fract*60,&min);
+ fract = modf(fract*60,&sec);
+ if(fract >= 0.5)sec+=1;
+
+ //LOGD("latitude: deg = %f;min = %f;sec =%f",deg,min,sec);
+
+ gpsInfo->GPSLatitude[0].num = (uint32_t)deg;
+ gpsInfo->GPSLatitude[0].denom = 1;
+ gpsInfo->GPSLatitude[1].num = (uint32_t)min;
+ gpsInfo->GPSLatitude[1].denom = 1;
+ gpsInfo->GPSLatitude[2].num = (uint32_t)sec;
+ gpsInfo->GPSLatitude[2].denom = 1;
+ }
+
+ if(longtitude >= 0){
+ gpsInfo->GPSLongitudeRef[0] = 'E';
+ gpsInfo->GPSLongitudeRef[1] = '\0';
+ }else if((longtitude < 0)&&(longtitude!=-1)){
+ gpsInfo->GPSLongitudeRef[0] = 'W';
+ gpsInfo->GPSLongitudeRef[1] = '\0';
+ }else{
+ gpsInfo->GPSLongitudeRef[0] = '\0';
+ gpsInfo->GPSLongitudeRef[1] = '\0';
+ }
+
+ if(longtitude!=-1)
+ {
+ longtitude = fabs(longtitude);
+ fract = modf(longtitude,&deg);
+ fract = modf(fract*60,&min);
+ modf(fract*60,&sec);
+
+ //LOGD("longtitude: deg = %f;min = %f;sec =%f",deg,min,sec);
+ gpsInfo->GPSLongitude[0].num = (uint32_t)deg;
+ gpsInfo->GPSLongitude[0].denom = 1;
+ gpsInfo->GPSLongitude[1].num = (uint32_t)min;
+ gpsInfo->GPSLongitude[1].denom = 1;
+ gpsInfo->GPSLongitude[2].num = (uint32_t)sec;
+ gpsInfo->GPSLongitude[2].denom = 1;
+ }
+
+ if(altitude >= 0){
+ gpsInfo->GPSAltitudeRef = 0;
+ }else if((altitude <0 )&&(altitude!=-1)) {
+ gpsInfo->GPSAltitudeRef = 1;
+ }
+
+ if(altitude!=-1)
+ {
+ altitude = fabs(altitude);
+ gpsInfo->GPSAltitude.num =(uint32_t)altitude;
+ gpsInfo->GPSAltitude.denom = 0x1;
+ //LOGD("altitude =%f GPSAltitudeRef: %d",altitude,gpsInfo->GPSAltitudeRef);
+ }
+
+ if(timestamp!=-1)
+ {
+ /*timestamp,has no meaning,only for passing cts*/
+ //LOGD("timestamp =%d",timestamp);
+ gpsInfo->GpsTimeStamp[0].num =0;
+ gpsInfo->GpsTimeStamp[0].denom = 1;
+ gpsInfo->GpsTimeStamp[1].num = 0;
+ gpsInfo->GpsTimeStamp[1].denom = 1;
+ gpsInfo->GpsTimeStamp[2].num = timestamp&0x03;
+ gpsInfo->GpsTimeStamp[2].denom = 1;
+ memcpy(gpsInfo->GpsDateStamp,"2008:01:01\0",11);//"YYYY:MM:DD\0"
+ }
+ return 0;
+}
+
+int AppMsgNotifier::captureEncProcessPicture(FramInfo_s* frame){
+ int ret = 0;
+ int jpeg_w,jpeg_h,i,jpeg_buf_w,jpeg_buf_h;
+ unsigned int pictureSize;
+ int jpegSize;
+ int quality;
+ int thumbquality = 0;
+ int thumbwidth = 0;
+ int thumbheight = 0;
+ int err = 0;
+ int rotation = 0;
+ JpegEncInInfo JpegInInfo;
+ JpegEncOutInfo JpegOutInfo;
+ RkExifInfo exifInfo;
+ RkGPSInfo gpsInfo;
+ char ExifAsciiPrefix[8] = {'A', 'S', 'C', 'I', 'I', '\0', '\0', '\0'};
+ char gpsprocessmethod[45];
+ char *getMethod = NULL;
+ double latitude,longtitude,altitude;
+ long timestamp;
+ JpegEncType encodetype;
+ int picfmt;
+ long rawbuf_phy;
+ long rawbuf_vir;
+ long jpegbuf_phy;
+ long jpegbuf_vir;
+ long input_phy_addr,input_vir_addr;
+ long output_phy_addr,output_vir_addr;
+ int jpegbuf_size;
+ int bufindex;
+ bool mIs_Verifier = false;
+ char prop_value[PROPERTY_VALUE_MAX];
+ memset(&JpegInInfo,0x00,sizeof(JpegEncInInfo));
+ memset(&JpegOutInfo,0x00,sizeof(JpegEncOutInfo));
+ memset(&exifInfo,0x00,sizeof(exifInfo));
+ quality = mPictureInfo.quality;
+ /*only for passing cts yzm*/
+ property_get("sys.cts_camera.status",prop_value, "false");
+ if(!strcmp(prop_value,"true")){
+ thumbquality = mPictureInfo.thumbquality;
+ thumbwidth = mPictureInfo.thumbwidth;
+ thumbheight = mPictureInfo.thumbheight;
+ }else{
+ thumbquality = 70;
+ thumbwidth = 160;
+ thumbheight = 128;
+ }
+ rotation = mPictureInfo.rotation;
+
+ jpeg_w = mPictureInfo.w;
+ jpeg_h = mPictureInfo.h;
+ /*get gps information*/
+ altitude = mPictureInfo.altitude;
+ latitude = mPictureInfo.latitude;
+ longtitude = mPictureInfo.longtitude;
+ timestamp = mPictureInfo.timestamp;
+ getMethod = mPictureInfo.getMethod;//getMethod : len <= 32
+
+ picfmt = mPictureInfo.fmt;
+
+ if(frame->res)
+ mIs_Verifier = *((bool*)frame->res); //zyh,don't crop for cts FOV
+
+ if(picfmt ==V4L2_PIX_FMT_RGB565){
+ encodetype = HWJPEGENC_RGB565;
+ pictureSize = jpeg_w * jpeg_h *2;
+ }
+ else{
+ encodetype = JPEGENC_YUV420_SP;
+ jpeg_buf_w = jpeg_w;
+ jpeg_buf_h = jpeg_h;
+ if(jpeg_buf_w%16)
+ jpeg_buf_w += 8;
+ if(jpeg_buf_h%16)
+ jpeg_buf_h += 8;
+ pictureSize = jpeg_buf_w * jpeg_buf_h * 3/2;
+ }
+ if (pictureSize & 0xfff) {
+ pictureSize = (pictureSize & 0xfffff000) + 0x1000;
+ }
+
+ jpegbuf_size = 0x1400000; //pictureSize;
+ #if (JPEG_BUFFER_DYNAMIC == 1)
+ //create raw & jpeg buffer
+ ret = mRawBufferProvider->createBuffer(1, pictureSize, RAWBUFFER,mRawBufferProvider->is_cif_driver);
+ if(ret < 0){
+ LOGE("mRawBufferProvider->createBuffer FAILED");
+ goto captureEncProcessPicture_exit;
+ }
+ ret = mJpegBufferProvider->createBuffer(1, jpegbuf_size,JPEGBUFFER,mJpegBufferProvider->is_cif_driver);
+ if(ret < 0){
+ LOGE("mJpegBufferProvider->createBuffer FAILED");
+ goto captureEncProcessPicture_exit;
+ }
+ #endif
+
+ bufindex=mRawBufferProvider->getOneAvailableBuffer(&rawbuf_phy, &rawbuf_vir);
+ if(bufindex < 0){
+ LOGE("mRawBufferProvider->getOneAvailableBuffer FAILED");
+ goto captureEncProcessPicture_exit;
+ }
+
+ #if defined(RK_DRM_GRALLOC) // should use fd
+ rawbuf_phy = mRawBufferProvider->getBufShareFd(bufindex);
+ #endif
+
+ //mRawBufferProvider->setBufferStatus(bufindex, 1);
+ bufindex=mJpegBufferProvider->getOneAvailableBuffer(&jpegbuf_phy, &jpegbuf_vir);
+ if(bufindex < 0){
+ LOGE("mJpegBufferProvider->getOneAvailableBuffer FAILED");
+ goto captureEncProcessPicture_exit;
+ }
+
+ #if defined(RK_DRM_GRALLOC) // should use fd
+ jpegbuf_phy = mJpegBufferProvider->getBufShareFd(bufindex);
+ #endif
+
+ g_rawbufProvider = mRawBufferProvider;
+ g_jpegbufProvider = mJpegBufferProvider;
+
+
+ input_phy_addr = frame->phy_addr;
+ input_vir_addr = frame->vir_addr;
+ output_phy_addr = jpegbuf_phy;
+ output_vir_addr = jpegbuf_vir;
+ LOG1("input_phy_addr:%x,rawbuf_phy:%x,rawbuf_vir:%x;jpegbuf_phy = %x,jpegbuf_vir = %x",input_phy_addr,rawbuf_phy,rawbuf_vir,jpegbuf_phy,jpegbuf_vir);
+
+ if (mMsgTypeEnabled & CAMERA_MSG_SHUTTER)
+ callback_notify_shutter();
+
+ LOGD("captureEncProcessPicture,rotation = %d,jpeg_w = %d,jpeg_h = %d",rotation,jpeg_w,jpeg_h);
+
+ //2. copy to output buffer for mirro and flip
+ /*ddl@rock-chips.com: v0.4.7*/
+ // bool rotat_180 = false; //used by ipp
+ //frame->phy_addr = -1 ,just for isp soc camera used iommu,so ugly...
+ if((frame->frame_fmt == V4L2_PIX_FMT_NV12) && ((frame->frame_width != mPictureInfo.w) || (frame->frame_height != mPictureInfo.h) || (frame->zoom_value != 100) || (long)frame->phy_addr == -1)){
+ output_phy_addr = rawbuf_phy;
+ output_vir_addr = rawbuf_vir;
+ #if 0
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV12, (char*)(frame->vir_addr),
+ (char*)rawbuf_vir,frame->frame_width, frame->frame_height,
+ jpeg_w, jpeg_h,false,frame->zoom_value);
+ #else
+ #if defined(TARGET_RK3188)
+ rk_camera_zoom_ipp(V4L2_PIX_FMT_NV12, (int)(frame->phy_addr), frame->frame_width, frame->frame_height,(int)rawbuf_phy,frame->zoom_value);
+ #else
+ if(g_ctsV_flag)
+ {
+ mIs_Verifier = true;
+ }
+ else
+ {
+ mIs_Verifier = false;
+ }
+ #if defined(RK_DRM_GRALLOC)
+ if (frame->vir_addr_valid) {
+ err = rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int *)rawbuf_vir,
+ jpeg_w,jpeg_h,frame->zoom_value,false,!mIs_Verifier,false,0,frame->vir_addr_valid);
+ } else {
+ err = rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->phy_addr), (short int *)rawbuf_phy,
+ jpeg_w,jpeg_h,frame->zoom_value,false,!mIs_Verifier,false,0,frame->vir_addr_valid);
+ }
+ if (err < 0)
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV12, (char*)(frame->vir_addr),
+ (char*)rawbuf_vir,frame->frame_width, frame->frame_height,
+ jpeg_w, jpeg_h,false,frame->zoom_value);
+ #else
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int *)rawbuf_vir,
+ jpeg_w,jpeg_h,frame->zoom_value,false,!mIs_Verifier,false);
+ #endif
+ #endif
+ #endif
+ input_phy_addr = output_phy_addr;
+ input_vir_addr = output_vir_addr;
+ mRawBufferProvider->flushBuffer(0);
+ }
+ /*if ((frame->frame_fmt != picfmt) || (frame->frame_width!= jpeg_w) || (frame->frame_height != jpeg_h)
+ || (frame->zoom_value != 100)) {
+
+ output_phy_addr = rawbuf_phy;
+ output_vir_addr = rawbuf_vir;
+ //do rotation,scale,zoom,fmt convert.
+ if(cameraFormatConvert(frame->frame_fmt, picfmt, NULL,
+ (char*)input_vir_addr,(char*)output_vir_addr,0,0,jpeg_w*jpeg_h*2,
+ jpeg_w, jpeg_h,jpeg_w,jpeg_w, jpeg_h,jpeg_w,false)==0)
+ // arm_yuyv_to_nv12(frame->frame_width, frame->frame_height,(char*)input_vir_addr, (char*)output_vir_addr);
+ {
+ //change input addr
+ input_phy_addr = output_phy_addr;
+ input_vir_addr = output_vir_addr;
+ mRawBufferProvider->flushBuffer(0);
+ }
+ }*/
+
+ if((mMsgTypeEnabled & (CAMERA_MSG_RAW_IMAGE))|| (mMsgTypeEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
+ copyAndSendRawImage((void*)input_vir_addr, pictureSize);
+ }
+
+ output_phy_addr = jpegbuf_phy;
+ output_vir_addr = jpegbuf_vir;
+
+#if 1
+
+ //3. src data will be changed by mirror and flip algorithm
+ //use jpeg buffer as line buffer
+
+ if(rotation == 0)
+ {
+ JpegInInfo.rotateDegree = DEGREE_0;
+ }
+ else if(rotation == 180)
+ {
+ YuvData_Mirror_Flip(V4L2_PIX_FMT_NV12, (char*)input_vir_addr,
+ (char*) jpegbuf_vir, jpeg_w, jpeg_h);
+ mRawBufferProvider->flushBuffer(0);
+ JpegInInfo.rotateDegree = DEGREE_0;
+ }
+ else if(rotation == 90)
+ {
+ YuvData_Mirror_Flip(V4L2_PIX_FMT_NV12, (char*)input_vir_addr,
+ (char*) jpegbuf_vir, jpeg_w, jpeg_h);
+ mRawBufferProvider->flushBuffer(0);
+ JpegInInfo.rotateDegree = DEGREE_270;
+ }
+ else if(rotation == 270)
+ {
+ JpegInInfo.rotateDegree = DEGREE_270;
+ }
+#else
+ //set rotation in exif file
+ JpegInInfo.rotateDegree = DEGREE_0;
+#endif
+ JpegInInfo.frameHeader = 1;
+ JpegInInfo.yuvaddrfor180 = (int)NULL;
+ JpegInInfo.type = encodetype;
+ JpegInInfo.y_rgb_addr = input_phy_addr;
+ JpegInInfo.uv_addr = input_phy_addr + jpeg_w*jpeg_h;
+ //JpegInInfo.y_vir_addr = input_vir_addr;
+ //JpegInInfo.uv_vir_addr = input_vir_addr + jpeg_w*jpeg_h;
+ JpegInInfo.inputW = jpeg_w;
+ JpegInInfo.inputH = jpeg_h;
+ JpegInInfo.pool = pool;
+ JpegInInfo.qLvl = quality/10;
+ if (JpegInInfo.qLvl < 5) {
+ JpegInInfo.qLvl = 5;
+ }
+ JpegInInfo.thumbqLvl = thumbquality /10;
+ if (JpegInInfo.thumbqLvl < 5) {
+ JpegInInfo.thumbqLvl = 5;
+ }
+ if(JpegInInfo.thumbqLvl >10) {
+ JpegInInfo.thumbqLvl = 9;
+ }
+#if defined(TARGET_RK3188)
+ if(0)
+#else
+ if(thumbwidth !=0 && thumbheight !=0)
+#endif
+ {
+ JpegInInfo.doThumbNail = 1; //insert thumbnail at APP0 extension
+ JpegInInfo.thumbData = NULL; //if thumbData is NULL, do scale, the type above can not be 420_P or 422_UYVY
+ JpegInInfo.thumbDataLen = -1;
+ JpegInInfo.thumbW = thumbwidth;
+ JpegInInfo.thumbH = thumbheight;
+ JpegInInfo.y_vir_addr = (unsigned char*)input_vir_addr;
+ JpegInInfo.uv_vir_addr = (unsigned char*)input_vir_addr+jpeg_w*jpeg_h;
+ }else{
+ JpegInInfo.doThumbNail = 0; //insert thumbnail at APP0 extension
+ JpegInInfo.thumbData = NULL; //if thumbData is NULL, do scale, the type above can not be 420_P or 422_UYVY
+ JpegInInfo.thumbDataLen = -1;
+ JpegInInfo.thumbW = 0;
+ JpegInInfo.thumbH = 0;
+ JpegInInfo.y_vir_addr = 0;
+ JpegInInfo.uv_vir_addr = 0;
+ }
+
+ Jpegfillexifinfo(&exifInfo,mPictureInfo);
+ JpegInInfo.exifInfo =&exifInfo;
+
+ if((longtitude!=-1)&& (latitude!=-1)&&(timestamp!=-1)&&(getMethod!=NULL)) {
+ Jpegfillgpsinfo(&gpsInfo,mPictureInfo);
+ memset(gpsprocessmethod,0,45);
+ memcpy(gpsprocessmethod,ExifAsciiPrefix,8);
+ memcpy(gpsprocessmethod+8,getMethod,strlen(getMethod)+1);
+ gpsInfo.GpsProcessingMethodchars = strlen(getMethod)+1+8;
+ gpsInfo.GPSProcessingMethod = gpsprocessmethod;
+ LOG1("\nGpsProcessingMethodchars =%d",gpsInfo.GpsProcessingMethodchars);
+ JpegInInfo.gpsInfo = &gpsInfo;
+ } else {
+ JpegInInfo.gpsInfo = NULL;
+ }
+
+ JpegOutInfo.outBufPhyAddr = output_phy_addr;
+ JpegOutInfo.outBufVirAddr = (unsigned char*)output_vir_addr;
+ JpegOutInfo.outBuflen = jpegbuf_size;
+ JpegOutInfo.jpegFileLen = 0x00;
+ JpegOutInfo.cacheflush= /*jpegEncFlushBufferCb*/NULL;
+ LOG1("input_phy_addr %d,JpegOutInfo.outBufPhyAddr:%x,JpegOutInfo.outBufVirAddr:%p,jpegbuf_size:%d",input_phy_addr,JpegOutInfo.outBufPhyAddr,JpegOutInfo.outBufVirAddr,jpegbuf_size);
+
+#if defined(TARGET_RK322x)
+ generateJPEG((uint8_t*)input_vir_addr,jpeg_w,jpeg_h,JpegOutInfo.outBufVirAddr,&(JpegOutInfo.jpegFileLen));
+ copyAndSendCompressedImage((void*)JpegOutInfo.outBufVirAddr,JpegOutInfo.jpegFileLen);
+#else
+
+LOG2("\nJpegInInfo.y_rgb_addr=0x%x\n"
+ "JpegInInfo.uv_addr=0x%x\n"
+ "JpegInInfo.inputW=0x%x\n"
+ "JpegInInfo.inputH=0x%x\n"
+ "JpegInInfo.pool=0x%x\n"
+ "JpegInInfo.qLvl=0x%x\n"
+ "\n"
+ "JpegOutInfo.outBufPhyAddr=0x%x\n"
+ "JpegOutInfo.outBufVirAddr=0x%x\n"
+ "JpegOutInfo.outBuflen=0x%x\n",
+ JpegInInfo.y_rgb_addr,
+ JpegInInfo.uv_addr,
+ JpegInInfo.inputW,
+ JpegInInfo.inputH,
+ JpegInInfo.pool,
+ JpegInInfo.qLvl,
+ JpegOutInfo.outBufPhyAddr,
+ JpegOutInfo.outBufVirAddr,
+ JpegOutInfo.outBuflen);
+
+ err = hw_jpeg_encode(&JpegInInfo, &JpegOutInfo);
+
+ if ((err < 0) || (JpegOutInfo.jpegFileLen <=0x00)) {
+ LOGE("%s(%d): hw_jpeg_encode Failed, err: %d JpegOutInfo.jpegFileLen:0x%x\n",__FUNCTION__,__LINE__,
+ err, JpegOutInfo.jpegFileLen);
+ goto captureEncProcessPicture_exit;
+ } else {
+ copyAndSendCompressedImage((void*)JpegOutInfo.outBufVirAddr,JpegOutInfo.jpegFileLen);
+ }
+#endif
+
+captureEncProcessPicture_exit:
+ //destroy raw and jpeg buffer
+ #if (JPEG_BUFFER_DYNAMIC == 1)
+ mRawBufferProvider->freeBuffer();
+ mJpegBufferProvider->freeBuffer();
+ #endif
+ if(err < 0) {
+ LOGE("%s(%d) take picture erro!!!,",__FUNCTION__,__LINE__);
+ if (mNotifyCb && (mMsgTypeEnabled & CAMERA_MSG_ERROR)) {
+ callback_notify_error();
+ }
+ }
+
+
+return ret;
+}
+
+int AppMsgNotifier::processPreviewDataCb(FramInfo_s* frame){
+ int ret = 0;
+ int pixFmt;
+ sp<MemoryHeapBase> mHeap;
+ mDataCbLock.lock();
+ if ((mMsgTypeEnabled & CAMERA_MSG_PREVIEW_FRAME) && mDataCb) {
+ //compute request mem size
+ int tempMemSize = 0;
+ int tempMemSize_crop = 0;
+ //request bufer
+ camera_memory_t* tmpPreviewMemory = NULL;
+ camera_memory_t* tmpNV12To420pMemory = NULL;
+ bool isYUV420p = false;
+
+ if (strcmp(mPreviewDataFmt,android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ tempMemSize = mPreviewDataW*mPreviewDataH*2;
+ tempMemSize_crop = mPreviewDataW*mPreviewDataH*2;
+ } else if (strcmp(mPreviewDataFmt,android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ pixFmt =V4L2_PIX_FMT_NV21;
+ tempMemSize = mPreviewDataW*mPreviewDataH*3/2;
+ tempMemSize_crop = mPreviewDataW*mPreviewDataH*3/2;
+ } else if (strcmp(mPreviewDataFmt,android::CameraParameters::PIXEL_FORMAT_YUV422SP) == 0) {
+ tempMemSize = mPreviewDataW*mPreviewDataH*2;
+ tempMemSize_crop = mPreviewDataW*mPreviewDataH*2;
+ } else if(strcmp(mPreviewDataFmt,android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0){
+ pixFmt =V4L2_PIX_FMT_NV12;
+ tempMemSize = ((mPreviewDataW+15)&0xfffffff0)*mPreviewDataH
+ +((mPreviewDataW/2+15)&0xfffffff0)*mPreviewDataH;
+ tempMemSize_crop = mPreviewDataW*mPreviewDataH*3/2;
+ isYUV420p = true;
+ }else {
+ LOGE("%s(%d): pixel format %s is unknow!",__FUNCTION__,__LINE__,mPreviewDataFmt);
+ }
+ mDataCbLock.unlock();
+
+ #if defined(RK_DRM_GRALLOC)
+ mHeap = new MemoryHeapBase(tempMemSize_crop * 1);
+ tmpPreviewMemory = mRequestMemory(mHeap->getHeapID(), tempMemSize_crop, 1, NULL);
+ #else
+ tmpPreviewMemory = mRequestMemory(-1, tempMemSize_crop, 1, NULL);
+ #endif
+ if (tmpPreviewMemory) {
+#if 0
+ //QQ voip need NV21
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV21, (char*)(frame->vir_addr),
+ (char*)tmpPreviewMemory->data,frame->frame_width, frame->frame_height,mPreviewDataW, mPreviewDataH,mDataCbFrontMirror,frame->zoom_value);
+#else
+ if(g_ctsV_flag &&((mPreviewDataW==176&&mPreviewDataH==144)||(mPreviewDataW==352&&mPreviewDataH==288))) {
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, pixFmt, (char*)(frame->vir_addr),
+ (char*)tmpPreviewMemory->data,frame->frame_width, frame->frame_height,
+ mPreviewDataW, mPreviewDataH,false,frame->zoom_value);
+ }else{
+ #if defined(RK_DRM_GRALLOC)
+ if (!strcmp("com.tencent.mobileqq:MSF",mCallingProcess)
+ || !strcmp("com.tencent.mobileqq:peak",mCallingProcess))
+ //workround fix qq self capture little video problem.
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV21, (char*)(frame->vir_addr),
+ (char*)tmpPreviewMemory->data,frame->frame_width, frame->frame_height,mPreviewDataW, mPreviewDataH,
+ mDataCbFrontMirror,frame->zoom_value);
+ else
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int *)(tmpPreviewMemory->data),
+ mPreviewDataW,mPreviewDataH,frame->zoom_value,mDataCbFrontMirror,true,!isYUV420p,0,true);
+ #else
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int *)(tmpPreviewMemory->data),
+ mPreviewDataW,mPreviewDataH,frame->zoom_value,mDataCbFrontMirror,true,!isYUV420p);
+ #endif
+ }
+#endif
+ //arm_yuyv_to_nv12(frame->frame_width, frame->frame_height,(char*)(frame->vir_addr), (char*)buf_vir);
+
+ if (strcmp(mPreviewDataFmt,android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ tmpNV12To420pMemory = mRequestMemory(-1, tempMemSize, 1, NULL);
+ if(tmpNV12To420pMemory){
+ cameraFormatConvert(V4L2_PIX_FMT_NV12,0,mPreviewDataFmt,
+ (char*)tmpPreviewMemory->data,(char*)tmpNV12To420pMemory->data,0,0,tempMemSize,
+ mPreviewDataW,mPreviewDataH,mPreviewDataW,
+ //frame->frame_width,frame->frame_height,frame->frame_width,false);
+ mPreviewDataW,mPreviewDataH,mPreviewDataW,mDataCbFrontMirror);
+ tmpPreviewMemory->release(tmpPreviewMemory);
+ tmpPreviewMemory = tmpNV12To420pMemory;
+ } else {
+ LOGE("%s(%d): mPreviewMemory create failed",__FUNCTION__,__LINE__);
+ }
+ }
+ if(mDataCbFrontFlip) {
+ LOG1("----------------need flip -------------------");
+ YuvData_Mirror_Flip(V4L2_PIX_FMT_NV12, (char*) tmpPreviewMemory->data,
+ (char*)frame->vir_addr,mPreviewDataW, mPreviewDataH);
+ }
+
+ callback_preview_frame(tmpPreviewMemory);
+ } else {
+ LOGE("%s(%d): mPreviewMemory create failed",__FUNCTION__,__LINE__);
+ }
+ } else {
+ mDataCbLock.unlock();
+ LOG1("%s(%d): needn't to send preview datacb",__FUNCTION__,__LINE__);
+ }
+ return ret;
+}
+
+int AppMsgNotifier::processVideoCb(FramInfo_s* frame){
+ int ret = 0,buf_index = -1;
+ long buf_phy = 0,buf_vir = 0;
+
+ if(mIsStoreMD == false){
+ //get one available buffer
+ if((buf_index = mVideoBufferProvider->getOneAvailableBuffer(&buf_phy,&buf_vir)) == -1){
+ ret = -1;
+ LOGE("%s(%d):no available buffer",__FUNCTION__,__LINE__);
+ return ret;
+ }
+
+ mVideoBufferProvider->setBufferStatus(buf_index, 1);
+ if((frame->frame_fmt == V4L2_PIX_FMT_NV12)){
+ #if 0
+ buf_vir = mGrallocVideoBuf[buf_index]->vir_addr;
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV12, (char*)(frame->vir_addr),
+ (char*)buf_vir,frame->frame_width, frame->frame_height,
+ mRecordW, mRecordH,false,frame->zoom_value);
+ #else
+ if(g_ctsV_flag &&((mRecordW==176&&mRecordH==144)||(mRecordW==352&&mRecordH==288))) {
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV12, (char*)(frame->vir_addr),
+ (char*)buf_vir,frame->frame_width, frame->frame_height,
+ mRecordW, mRecordH,false,frame->zoom_value);
+ }else{
+ #if defined(RK_DRM_GRALLOC)
+ if (frame->vir_addr_valid){
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int *)buf_vir,
+ mRecordW,mRecordH,frame->zoom_value,false,true,false,0,frame->vir_addr_valid);
+ } else{
+ long fd = mVideoBufferProvider->getBufShareFd(buf_index);
+ LOGD(">>>ZYL>>fd:0x%x",fd);
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->phy_addr), (short int *)fd,
+ mRecordW,mRecordH,frame->zoom_value,false,true,false,0,frame->vir_addr_valid);
+ }
+ #else
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int *)buf_vir,
+ mRecordW,mRecordH,frame->zoom_value,false,true,false);
+ #endif
+ }
+ #endif
+
+ mVideoBufferProvider->flushBuffer(buf_index);
+ callback_video_frame(mVideoBufs[buf_index]);
+ LOG1("EncPicture:V4L2_PIX_FMT_NV12,arm_camera_yuv420_scale_arm");
+ }
+ }else{
+ buf_index = grallocVideoBufGetAvailable();
+ if(buf_index >= CONFIG_CAMERA_VIDEOENC_BUF_CNT)
+ {
+ LOGE("%s(%d):no available buffer",__FUNCTION__,__LINE__);
+ ret = -1;
+ return ret;
+ }
+ mGrallocVideoBuf[buf_index]->buf_state = 1;
+
+ if((frame->frame_fmt == V4L2_PIX_FMT_NV12)){
+ #if 0
+ buf_vir = mGrallocVideoBuf[buf_index]->vir_addr;
+ arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV12, (char*)(frame->vir_addr),
+ (char*)buf_vir,frame->frame_width, frame->frame_height,
+ mRecordW, mRecordH,false,frame->zoom_value);
+ #else
+
+ #if defined(RK_DRM_GRALLOC)
+ if (frame->vir_addr_valid){
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int*)(mGrallocVideoBuf[buf_index]->vir_addr),
+ mRecordW,mRecordH,frame->zoom_value,false,true,false,0,frame->vir_addr_valid);
+ } else{
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->phy_addr), (short int*)(mGrallocVideoBuf[buf_index]->phy_addr),
+ mRecordW,mRecordH,frame->zoom_value,false,true,false,0,frame->vir_addr_valid);
+ }
+ #else
+ rga_nv12_scale_crop(frame->frame_width, frame->frame_height,
+ (char*)(frame->vir_addr), (short int*)(mGrallocVideoBuf[buf_index]->vir_addr),
+ mRecordW,mRecordH,frame->zoom_value,false,true,false);
+ #endif
+ #endif
+
+ callback_video_frame(mVideoBufs[buf_index]);
+ }
+
+ }
+
+ return ret;
+}
+
+int AppMsgNotifier::processFaceDetect(FramInfo_s* frame, long frame_used_flag)
+{
+ int num = 0;
+ //Mutex::Autolock lock(mFaceDecLock);
+ if(mMsgTypeEnabled & CAMERA_MSG_PREVIEW_METADATA){
+ if((frame->frame_fmt == V4L2_PIX_FMT_NV12)){
+ struct RectFace *faces = NULL;
+ int i = 0,hasSmileFace = 0;
+ int zoom_value, frame_width, frame_height;
+ nsecs_t last = systemTime(SYSTEM_TIME_MONOTONIC);
+ if(!mFaceDetecInit)
+ return -1;
+
+ (*mFaceDetectorFun.mFaceDectprepareFunc)(mFaceContext, (void*)frame->vir_addr);
+ zoom_value = frame->zoom_value;
+ frame_width = frame->frame_width;
+ frame_height = frame->frame_height;
+ mFrameProvider->returnFrame(frame->frame_index,frame_used_flag);
+
+ (*mFaceDetectorFun.mFaceDectFindFaceFun)(mFaceContext, mCurOrintation,mCurBiasAngle,0, &hasSmileFace, &faces, &num);
+ nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ nsecs_t diff = now - last;
+ LOG2("FaceDetection mCurBiasAngle %0.0f,facenum: %d, use time: %lldms\n", mCurBiasAngle,num, ns2ms(diff));
+ camera_frame_metadata_t *pMetadata = (camera_frame_metadata_t *)malloc(sizeof(camera_frame_metadata_t));
+
+ if(num > 0){
+ num = MIN(num, mFaceNum);
+ pMetadata->number_of_faces = num;
+ camera_face_t* pFace = (camera_face_t*)malloc(sizeof(camera_face_t)*num);
+ pMetadata->faces = pFace;
+ int tmpX,tmpY,tmpW,tempH;
+ for(i = 0;i < num;i++){
+ tmpX = faces[i].x;
+ tmpY = faces[i].y;
+ tmpW = faces[i].width;
+ tempH = faces[i].height;
+
+ switch(mCurOrintation){
+ case 1://90 degree
+ tmpY += tempH;
+ faces[i].x = frame_width - tmpY;
+ faces[i].y = tmpX;
+ faces[i].width = tempH;
+ faces[i].height = tmpW;
+ break;
+ case 2://180 degree
+ tmpX += tmpW;
+ tmpY += tempH;
+ faces[i].x = frame_width - tmpX;
+ faces[i].y = frame_height - tmpY;
+ break;
+ case 3://270 degree
+ tmpX += tmpW;
+ faces[i].x = tmpY ;
+ faces[i].y = frame_height - tmpX;
+ faces[i].width = tempH;
+ faces[i].height = tmpW;
+ break;
+ default:
+ break;
+ }
+ LOG2("facerect[%d]: (%d, %d, %d, %d)\n", i, faces[i].x, faces[i].y, faces[i].width, faces[i].height);
+ //map to face rect
+ int virWidtHig = 2000 * zoom_value / 100;
+
+ pFace[i].rect[0] = faces[i].x * virWidtHig/frame_width - (virWidtHig/2);
+ pFace[i].rect[1] = faces[i].y * virWidtHig/frame_height - (virWidtHig/2);
+ pFace[i].rect[2] = pFace[i].rect[0] + (faces[i].width)*virWidtHig/frame_width;
+ pFace[i].rect[3] = pFace[i].rect[1] + (faces[i].height)*virWidtHig/frame_height;
+
+ pFace[i].score = (hasSmileFace > 0)? 100:60;
+ pFace[i].id = 0;
+
+ LOG2("pFace RECT (%d,%d,%d,%d)",pFace[i].rect[0],pFace[i].rect[1],pFace[i].rect[2],pFace[i].rect[3]);
+ }
+ {
+ Mutex::Autolock lock(mFaceDecLock);
+ if(mMsgTypeEnabled & CAMERA_MSG_PREVIEW_METADATA){
+ camera_memory_t* tmpPreviewMemory = NULL;
+ callback_preview_metadata(tmpPreviewMemory, pMetadata, faces);
+ }
+ }
+ }else{
+ pMetadata->number_of_faces = 0;
+ pMetadata->faces = NULL;
+ Mutex::Autolock lock(mFaceDecLock);
+ if(mMsgTypeEnabled & CAMERA_MSG_PREVIEW_METADATA){
+ callback_preview_metadata(NULL, pMetadata, faces);
+ }
+ }
+ }
+ }
+ return num;
+}
+
+void AppMsgNotifier::stopReceiveFrame()
+{
+ //pause event and enc thread
+ flushPicture();
+ pausePreviewCBFrameProcess();
+ stopFaceDection();
+ //disable messeage receive
+}
+
+void AppMsgNotifier::startReceiveFrame()
+{
+ Mutex::Autolock lock(mDataCbLock);
+
+ mRecPrevCbDataEn = true;
+ mRunningState |= STA_RECEIVE_PREVIEWCB_FRAME;
+}
+void AppMsgNotifier::dump()
+{
+
+}
+
+void AppMsgNotifier::setDatacbFrontMirrorFlipState(bool mirror,bool Flip)
+{
+ mDataCbFrontMirror = mirror;
+ mDataCbFrontFlip = Flip;
+}
+
+picture_info_s& AppMsgNotifier::getPictureInfoRef()
+{
+ return mPictureInfo;
+}
+
+void AppMsgNotifier::encProcessThread()
+{
+ bool loop = true;
+ Message_cam msg;
+ int err = 0;
+ long frame_used_flag = -1;
+
+ LOG_FUNCTION_NAME
+ while (loop) {
+ memset(&msg,0,sizeof(msg));
+ encProcessThreadCommandQ.get(&msg);
+
+ switch (msg.command)
+ {
+ case EncProcessThread::CMD_ENCPROCESS_SNAPSHOT:
+ {
+ FramInfo_s *frame = (FramInfo_s*)msg.arg2;
+
+ LOGD("%s(%d): receive CMD_SNAPSHOT_SNAPSHOT with buffer %p,mEncPictureNum=%d",__FUNCTION__,__LINE__, (void*)frame->frame_index,mEncPictureNum);
+
+ //set picture encode info
+ {
+ Mutex::Autolock lock(mPictureLock);
+
+ if((mEncPictureNum > 0) && (mRunningState & STA_RECEIVE_PIC_FRAME)){
+ mEncPictureNum--;
+ if(!mEncPictureNum)
+ //mReceivePictureFrame = false;
+ mRunningState &= ~STA_RECEIVE_PIC_FRAME;
+ }else{
+ LOGD("%s(%d): needn't enc this frame!",__FUNCTION__,__LINE__);
+ }
+ }
+ /* zyc@rock-chips.com: v0.0x22.0 */
+ captureEncProcessPicture(frame);
+
+ //return frame
+ frame_used_flag = (long)msg.arg3;
+ mFrameProvider->returnFrame(frame->frame_index,frame_used_flag);
+
+ break;
+ }
+ case EncProcessThread::CMD_ENCPROCESS_PAUSE:
+
+ {
+ Message_cam filter_msg;
+ while(!encProcessThreadCommandQ.isEmpty()){
+ encProcessThreadCommandQ.get(&filter_msg);
+ if(filter_msg.command == EncProcessThread::CMD_ENCPROCESS_SNAPSHOT){
+ FramInfo_s *frame = (FramInfo_s*)msg.arg2;
+ mFrameProvider->returnFrame(frame->frame_index,frame->used_flag);
+ }
+ }
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ //wake up waiter
+ break;
+ }
+ case EncProcessThread::CMD_ENCPROCESS_EXIT:
+ {
+ LOGD("%s(%d): receive CMD_ENCPROCESS_EXIT",__FUNCTION__,__LINE__);
+ loop = false;
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+
+ break;
+ }
+
+ default:
+ {
+ LOGE("%s(%d): receive unknow command(0x%x)",__FUNCTION__,__LINE__,msg.command);
+ break;
+ }
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return;
+}
+
+void AppMsgNotifier::faceDetectThread()
+{
+ bool loop = true;
+ Message_cam msg;
+ FramInfo_s *frame = NULL;
+ long frame_used_flag = -1;
+ bool face_detected = 0;
+ LOG_FUNCTION_NAME
+ while (loop) {
+ memset(&msg,0,sizeof(msg));
+ faceDetThreadCommandQ.get(&msg);
+ switch (msg.command)
+ {
+ case CameraAppFaceDetThread::CMD_FACEDET_FACE_DETECT:
+ mFaceDetectionDone = false;
+ frame_used_flag = (long)msg.arg3;
+ frame = (FramInfo_s*)msg.arg2;
+ LOG2("%s(%d):get new frame , index(%ld),useflag(%d)",__FUNCTION__,__LINE__,frame->frame_index,frame_used_flag);
+ mFaceDecLock.lock();
+ if(mFaceFrameNum++ % FACEDETECT_FRAME_INTERVAL == 0){
+ mFaceDecLock.unlock();
+ if((processFaceDetect(frame, frame_used_flag) > 0)){
+ face_detected = true;
+ }else
+ face_detected = false;
+ mFaceDecLock.lock();
+ if(!face_detected){
+ mCurBiasAngle = (mCurBiasAngle + FACEDETECT_BIAS_INERVAL);
+ if(mCurBiasAngle - (-FACEDETECT_INIT_BIAS) > 0.0001){
+ mCurBiasAngle = FACEDETECT_INIT_BIAS;
+ }
+ }
+ mFaceDecLock.unlock();
+ mFaceDetectionDone = true;
+ break;
+ }else
+ mFaceDecLock.unlock();
+ //return frame
+ mFaceDetectionDone = true;
+ mFrameProvider->returnFrame(frame->frame_index,frame_used_flag);
+ break;
+ case CameraAppFaceDetThread::CMD_FACEDET_PAUSE:
+ {
+ Message_cam filter_msg;
+ LOG1("%s(%d),receive CameraAppFaceDetThread::CMD_FACEDET_PAUSE",__FUNCTION__,__LINE__);
+ while(!faceDetThreadCommandQ.isEmpty()){
+ if(faceDetThreadCommandQ.get(&filter_msg,10) == 0){
+
+ if((filter_msg.command != CameraAppFaceDetThread::CMD_FACEDET_EXIT)
+ && (filter_msg.command != CameraAppFaceDetThread::CMD_FACEDET_PAUSE)) {
+ FramInfo_s *frame = (FramInfo_s*)filter_msg.arg2;
+ mFrameProvider->returnFrame(frame->frame_index,frame->used_flag);
+ }
+ }else{
+ //may cause mem leak,fix me .
+ //if msg is CMD_EVENT_PREVIEW_DATA_CB ,CMD_EVENT_VIDEO_ENCING,CMD_EVENT_FACE_DETECT,
+ //and reading msg erro,then lead to frame not returned correctly
+ }
+ }
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ //wake up waiter
+ break;
+ }
+ case CameraAppFaceDetThread::CMD_FACEDET_EXIT:
+ {
+ loop = false;
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return;
+
+}
+
+void AppMsgNotifier::eventThread()
+{
+ bool loop = true;
+ Message_cam msg;
+ int index,err = 0;
+ FramInfo_s *frame = NULL;
+ long frame_used_flag = -1;
+ LOG_FUNCTION_NAME
+ while (loop) {
+ memset(&msg,0,sizeof(msg));
+ eventThreadCommandQ.get(&msg);
+ switch (msg.command)
+ {
+ case CameraAppMsgThread::CMD_EVENT_PREVIEW_DATA_CB:
+ frame = (FramInfo_s*)msg.arg2;
+ processPreviewDataCb(frame);
+ //return frame
+ frame_used_flag = (long)msg.arg3;
+ mFrameProvider->returnFrame(frame->frame_index,frame_used_flag);
+ break;
+ case CameraAppMsgThread::CMD_EVENT_VIDEO_ENCING:
+ frame_used_flag = (long)msg.arg3;
+ frame = (FramInfo_s*)msg.arg2;
+ LOG2("%s(%d):get new frame , index(%ld),useflag(%d)",__FUNCTION__,__LINE__,frame->frame_index,frame_used_flag);
+
+ processVideoCb(frame);
+ //return frame
+ mFrameProvider->returnFrame(frame->frame_index,frame_used_flag);
+ break;
+ case CameraAppMsgThread::CMD_EVENT_PAUSE:
+ {
+ Message_cam filter_msg;
+ LOG1("%s(%d),receive CameraAppMsgThread::CMD_EVENT_PAUSE",__FUNCTION__,__LINE__);
+ while(!eventThreadCommandQ.isEmpty()){
+ if(eventThreadCommandQ.get(&filter_msg,10) == 0){
+
+ if((filter_msg.command != CameraAppMsgThread::CMD_EVENT_EXIT)
+ && (filter_msg.command != CameraAppMsgThread::CMD_EVENT_PAUSE)) {
+ FramInfo_s *frame = (FramInfo_s*)filter_msg.arg2;
+ mFrameProvider->returnFrame(frame->frame_index,frame->used_flag);
+ }
+ }else{
+ //may cause mem leak,fix me .
+ //if msg is CMD_EVENT_PREVIEW_DATA_CB ,CMD_EVENT_VIDEO_ENCING,CMD_EVENT_FACE_DETECT,
+ //and reading msg erro,then lead to frame not returned correctly
+ }
+ }
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ //wake up waiter
+ break;
+ }
+ case CameraAppMsgThread::CMD_EVENT_EXIT:
+ {
+ loop = false;
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return;
+
+ }
+
+void AppMsgNotifier::callbackThread()
+{
+ bool loop = true;
+ Message_cam msg;
+ int index,err = 0;
+ camera_memory_t *frame = NULL;
+ LOG_FUNCTION_NAME
+ while (loop) {
+ memset(&msg,0,sizeof(msg));
+ callbackThreadCommandQ.get(&msg);
+ switch (msg.command)
+ {
+ case CameraAppCallbackThread::CMD_MSG_PREVIEW_FRAME:
+ {
+ LOG2("datacb: send preview frame (CAMERA_MSG_PREVIEW_FRAME).");
+ frame = (camera_memory_t*)msg.arg2;
+ if (mMsgTypeEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ mDataCb(CAMERA_MSG_PREVIEW_FRAME, frame, 0,NULL,mCallbackCookie);
+ //release buffer
+ frame->release(frame);
+ }
+ break;
+
+ case CameraAppCallbackThread::CMD_MSG_SHUTTER:
+ LOG1("Notify CAMERA_MSG_SHUTTER.");
+ if (mMsgTypeEnabled & CAMERA_MSG_SHUTTER)
+ mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
+ break;
+
+ case CameraAppCallbackThread::CMD_MSG_RAW_IMAGE:
+ {
+ LOG1("send raw image pic.");
+ frame = (camera_memory_t*)msg.arg2;
+ if(mMsgTypeEnabled & CAMERA_MSG_RAW_IMAGE)
+ mDataCb(CAMERA_MSG_RAW_IMAGE, frame, 0, NULL, mCallbackCookie);
+ //release buffer
+ frame->release(frame);
+ }
+ break;
+
+ case CameraAppCallbackThread::CMD_MSG_RAW_IMAGE_NOTIFY:
+ LOG1("Notify CAMERA_MSG_RAW_IMAGE_NOTIFY.");
+ if (mMsgTypeEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)
+ mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie);
+ break;
+
+ case CameraAppCallbackThread::CMD_MSG_COMPRESSED_IMAGE:
+ {
+ LOG1("send compressed jpeg image pic.");
+ frame = (camera_memory_t*)msg.arg2;
+ if(mMsgTypeEnabled & CAMERA_MSG_COMPRESSED_IMAGE)
+ mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, frame, 0, NULL, mCallbackCookie);
+ //release buffer
+ frame->release(frame);
+ }
+ break;
+
+ case CameraAppCallbackThread::CMD_MSG_ERROR:
+ LOG1("Notify CAMERA_MSG_ERROR.");
+ if(mMsgTypeEnabled & CAMERA_MSG_ERROR)
+ mNotifyCb(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED,0,mCallbackCookie);
+ break;
+
+ case CameraAppCallbackThread::CMD_MSG_PREVIEW_METADATA:
+ {
+ camera_frame_metadata_t *tempMetaData;
+
+ tempMetaData = (camera_frame_metadata_t *)msg.arg2;
+ struct RectFace *faces = (struct RectFace *)msg.arg3;
+ frame = (camera_memory_t*)msg.arg4;
+ LOG1("send facedetect data, number_of_faces=%d.",tempMetaData->number_of_faces);
+ if(tempMetaData->number_of_faces>0){
+ if(mMsgTypeEnabled & CAMERA_MSG_PREVIEW_METADATA){
+ mDataCb(CAMERA_MSG_PREVIEW_METADATA, frame ,0,tempMetaData,mCallbackCookie);
+ }
+ mCamAdp->faceNotify(faces, &tempMetaData->number_of_faces);
+ free(tempMetaData->faces);
+ free(tempMetaData);
+ }else{
+ if(mMsgTypeEnabled & CAMERA_MSG_PREVIEW_METADATA){
+ mDataCb(CAMERA_MSG_PREVIEW_METADATA, NULL,0,tempMetaData,mCallbackCookie);
+ }
+ mCamAdp->faceNotify(NULL, &tempMetaData->number_of_faces);
+ free(tempMetaData);
+ }
+ }
+ break;
+
+ case CameraAppCallbackThread::CMD_MSG_VIDEO_FRAME:
+ {
+ LOG1("send video frame.");
+ frame = (camera_memory_t*)msg.arg2;
+ mDataCbTimestamp(systemTime(CLOCK_MONOTONIC), CAMERA_MSG_VIDEO_FRAME, frame, 0, mCallbackCookie);
+ }
+ break;
+
+ case CameraAppCallbackThread::CMD_CALLBACK_PAUSE:
+ {
+ break;
+ }
+
+ case CameraAppCallbackThread::CMD_CALLBACK_EXIT:
+ {
+ loop = false;
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return;
+
+ }
+
+}
+
diff --git a/CameraHal/CameraAdapter.cpp b/CameraHal/CameraAdapter.cpp
new file mode 100755
index 0000000..2c45cf9
--- /dev/null
+++ b/CameraHal/CameraAdapter.cpp
@@ -0,0 +1,876 @@
+#include "CameraHal.h"
+namespace android{
+
+
+CameraAdapter::CameraAdapter(int cameraId):mPreviewRunning(0),
+ mCamId(cameraId)
+{
+ LOG_FUNCTION_NAME
+ mRefDisplayAdapter = NULL;
+ mRefEventNotifier = NULL;
+ mCameraPreviewThread = NULL;
+ mPreviewRunning = 0;
+ mPictureRunning = 0;
+ mPreviewBufProvider = NULL;
+ mCamDrvWidth = 0;
+ mCamDrvHeight = 0;
+ mVideoWidth = 0;
+ mVideoHeight = 0;
+ mCamDriverStream = false;
+ camera_device_error = false;
+ mPreviewFrameIndex = 0;
+ mPreviewErrorFrameCount = 0;
+ mCamFd = -1;
+ mCamDriverPreviewFmt = 0;
+ mZoomVal = 100;
+ mLibstageLibHandle = NULL;
+
+ CameraHal_SupportFmt[0] = V4L2_PIX_FMT_NV12;
+ CameraHal_SupportFmt[1] = V4L2_PIX_FMT_NV16;
+ CameraHal_SupportFmt[2] = V4L2_PIX_FMT_YUYV;
+ CameraHal_SupportFmt[3] = V4L2_PIX_FMT_RGB565;
+ CameraHal_SupportFmt[4] = 0x00;
+ CameraHal_SupportFmt[5] = 0x00;
+ memset(mCamDriverSupportFmt, 0, sizeof(mCamDriverSupportFmt));
+ cif_driver_iommu = false;
+ LOG_FUNCTION_NAME_EXIT
+}
+
+int CameraAdapter::initialize()
+{
+ int ret = -1;
+ //create focus thread
+ LOG_FUNCTION_NAME
+
+ if((ret = cameraCreate(mCamId)) < 0)
+ return ret;
+
+ initDefaultParameters(mCamId);
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+CameraAdapter::~CameraAdapter()
+{
+ LOG_FUNCTION_NAME
+
+
+ if(mAutoFocusThread != NULL){
+ mAutoFocusLock.lock();
+ mExitAutoFocusThread = true;
+ mAutoFocusLock.unlock();
+ mAutoFocusCond.signal();
+ mAutoFocusThread->requestExitAndWait();
+ mAutoFocusThread.clear();
+ mAutoFocusThread = NULL;
+ }
+
+ if(mPreviewBufProvider)
+ mPreviewBufProvider->freeBuffer();
+
+ if (mCamFd > 0) {
+ close(mCamFd);
+ mCamFd = -1;
+ }
+
+ this->cameraDestroy();
+ LOG_FUNCTION_NAME_EXIT
+}
+
+void CameraAdapter::setDisplayAdapterRef(DisplayAdapter& refDisplayAdap)
+{
+ mRefDisplayAdapter = &refDisplayAdap;
+}
+void CameraAdapter::setEventNotifierRef(AppMsgNotifier& refEventNotify)
+{
+ mRefEventNotifier = &refEventNotify;
+}
+
+int CameraAdapter::faceNotify(struct RectFace* faces, int* num)
+{
+ return 0;
+}
+void CameraAdapter::setPreviewBufProvider(BufferProvider* bufprovider)
+{
+ mPreviewBufProvider = bufprovider;
+}
+CameraParameters & CameraAdapter::getParameters()
+{
+ return mParameters;
+}
+int CameraAdapter::getCurPreviewState(int *drv_w,int *drv_h)
+{
+ *drv_w = mCamDrvWidth;
+ *drv_h = mCamDrvHeight;
+ return mPreviewRunning;
+}
+int CameraAdapter::getCurVideoSize(int *video_w, int *video_h)
+{
+ *video_w = mVideoWidth;
+ *video_h = mVideoHeight;
+ return mPreviewRunning;
+
+}
+
+bool CameraAdapter::isNeedToRestartPreview()
+{
+ int preferPreviewW=0,preferPreviewH=0;
+ int previewFrame2AppW=0,previewFrame2AppH=0;
+ bool ret = false;
+ char prop_value[PROPERTY_VALUE_MAX];
+
+ mParameters.getPreviewSize(&previewFrame2AppW, &previewFrame2AppH);
+ //case 1: when setVideoSize is suported
+
+ if(mPreviewRunning && (mParameters.get(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES) != NULL)) {
+ mParameters.getVideoSize(&mVideoWidth,&mVideoHeight);
+ }else{
+ mVideoWidth = -1;
+ mVideoHeight = -1;
+ }
+
+ if(previewFrame2AppW >= mVideoWidth){
+ preferPreviewW = previewFrame2AppW;
+ preferPreviewH = previewFrame2AppH;
+ } else {
+ preferPreviewW = mVideoWidth;
+ preferPreviewH = mVideoHeight;
+ }
+ //not support setvideosize
+ if(mVideoWidth == -1){
+ mVideoWidth = preferPreviewW;
+ mVideoHeight = preferPreviewH;
+ }
+
+ LOGD("mPreviewFrame2AppW (%dx%d)",previewFrame2AppW,previewFrame2AppH);
+ LOGD("mCamPreviewW (%dx%d)",mCamPreviewW,mCamPreviewH);
+ LOGD("video width (%dx%d)",mVideoWidth,mVideoHeight);
+
+ if(mPreviewRunning && ((preferPreviewW != mCamPreviewW) || (preferPreviewH != mCamPreviewH)) && (mVideoWidth != -1) && (!mIsCtsTest))
+ {
+ ret = true;
+ }
+ return ret;
+}
+
+void CameraAdapter::dump(int cameraId)
+{
+ LOG2("%s CameraAdapter dump cameraId(%d)\n", __FUNCTION__,cameraId);
+
+}
+
+void CameraAdapter::getCameraParamInfo(cameraparam_info_s &paraminfo)
+{
+
+}
+
+bool CameraAdapter::getFlashStatus()
+{
+ return false;
+}
+status_t CameraAdapter::startPreview(int preview_w,int preview_h,int w, int h, int fmt,bool is_capture)
+{
+
+ //create buffer
+ LOG_FUNCTION_NAME
+ unsigned int frame_size = 0,i;
+ struct bufferinfo_s previewbuf;
+ int ret = 0,buf_count = CONFIG_CAMERA_PREVIEW_BUF_CNT;
+ LOGD("%s%d:preview_w = %d,preview_h = %d,drv_w = %d,drv_h = %d",__FUNCTION__,__LINE__,preview_w,preview_h,w,h);
+ mPreviewFrameIndex = 0;
+ mPreviewErrorFrameCount = 0;
+ switch (mCamDriverPreviewFmt)
+ {
+ case V4L2_PIX_FMT_NV12:
+ case V4L2_PIX_FMT_YUV420:
+ frame_size = ((w+15)&(~15))*((h+15)&(~15))*3/2;
+ break;
+ case V4L2_PIX_FMT_NV16:
+ case V4L2_PIX_FMT_YUV422P:
+ default:
+ frame_size = w*h*2;
+ break;
+ }
+
+ //for test capture
+ if(is_capture){
+ buf_count = 1;
+ }
+ if(mPreviewBufProvider->createBuffer(buf_count,frame_size,PREVIEWBUFFER,mPreviewBufProvider->is_cif_driver) < 0)
+ {
+ LOGE("%s%d:create preview buffer failed",__FUNCTION__,__LINE__);
+ return -1;
+ }
+ //set size
+ if(cameraSetSize(w, h, mCamDriverPreviewFmt,is_capture)<0){
+ ret = -1;
+ goto start_preview_end;
+ }
+ mCamDrvWidth = w;
+ mCamDrvHeight = h;
+ mCamPreviewH = preview_h;
+ mCamPreviewW = preview_w;
+
+ memset(mPreviewFrameInfos,0,sizeof(mPreviewFrameInfos));
+ //camera start
+ if(cameraStart() < 0){
+ ret = -1;
+ goto start_preview_end;
+ }
+ //new preview thread
+ mCameraPreviewThread = new CameraPreviewThread(this);
+ mCameraPreviewThread->run("CameraPreviewThread",ANDROID_PRIORITY_DISPLAY);
+
+ mPreviewRunning = 1;
+ LOGD("%s(%d):OUT",__FUNCTION__,__LINE__);
+ return 0;
+start_preview_end:
+ mCamDrvWidth = 0;
+ mCamDrvHeight = 0;
+ mCamPreviewH = 0;
+ mCamPreviewW = 0;
+ return ret;
+
+}
+status_t CameraAdapter::stopPreview()
+{
+ LOGD("%s(%d):IN",__FUNCTION__,__LINE__);
+ if(mPreviewRunning == 1){
+ //camera stop
+ cameraStream(false);
+
+ //quit preview thread
+ if(mCameraPreviewThread != NULL){
+ mCameraPreviewThread->requestExitAndWait();
+ mCameraPreviewThread.clear();
+ mCameraPreviewThread = NULL;
+ }
+
+ cameraStop();
+ //destroy preview buffer
+ if(mPreviewBufProvider)
+ mPreviewBufProvider->freeBuffer();
+ mPreviewRunning = 0;
+ }
+ mCamDrvWidth = 0;
+ mCamDrvHeight = 0;
+ LOGD("%s(%d):OUT",__FUNCTION__,__LINE__);
+ return 0;
+}
+int CameraAdapter::setParameters(const CameraParameters &params_set,bool &isRestartValue)
+{
+
+ return 0;
+}
+void CameraAdapter::initDefaultParameters(int camFd)
+{
+
+}
+status_t CameraAdapter::autoFocus()
+{
+
+ mAutoFocusCond.signal();
+ return 0;
+}
+status_t CameraAdapter::cancelAutoFocus()
+{
+ return 0;
+}
+int CameraAdapter::getCameraFd()
+{
+ return mCamFd;
+}
+int CameraAdapter::flashcontrol()
+{
+ return 0;
+}
+
+//talk to driver
+//open camera
+int CameraAdapter::cameraCreate(int cameraId)
+{
+ int err = 0,iCamFd;
+ int pmem_fd,i,j,cameraCnt;
+ char cam_path[20];
+ char cam_num[3];
+ char *ptr_tmp;
+ struct v4l2_fmtdesc fmtdesc;
+ char *cameraDevicePathCur = NULL;
+ char decode_name[50];
+
+ LOG_FUNCTION_NAME
+
+ memset(decode_name,0x00,sizeof(decode_name));
+ mLibstageLibHandle = dlopen("libstagefright.so", RTLD_NOW);
+ if (mLibstageLibHandle == NULL) {
+ LOGE("%s(%d): open libstagefright.so fail",__FUNCTION__,__LINE__);
+ } else {
+ mMjpegDecoder.get = (getMjpegDecoderFun)dlsym(mLibstageLibHandle, "get_class_On2JpegDecoder");
+ }
+
+ if (mMjpegDecoder.get == NULL) {
+ if (mLibstageLibHandle != NULL)
+ dlclose(mLibstageLibHandle); /* ddl@rock-chips.com: v0.4.0x27 */
+ mLibstageLibHandle = dlopen("librk_vpuapi.so", RTLD_NOW);
+ if (mLibstageLibHandle == NULL) {
+ LOGE("%s(%d): open librk_vpuapi.so fail",__FUNCTION__,__LINE__);
+ } else {
+ mMjpegDecoder.get = (getMjpegDecoderFun)dlsym(mLibstageLibHandle, "get_class_RkJpegDecoder");
+ if (mMjpegDecoder.get == NULL) {
+ LOGE("%s(%d): dlsym get_class_RkJpegDecoder fail",__FUNCTION__,__LINE__);
+ } else {
+ strcat(decode_name,"dec_oneframe_RkJpegDecoder");
+ }
+ }
+ } else {
+ strcat(decode_name,"dec_oneframe_class_RkJpegDecoder");
+ }
+
+
+ if (mMjpegDecoder.get != NULL) {
+ mMjpegDecoder.decoder = mMjpegDecoder.get();
+ if (mMjpegDecoder.decoder==NULL) {
+ LOGE("%s(%d): get mjpeg decoder failed",__FUNCTION__,__LINE__);
+ } else {
+ mMjpegDecoder.destroy =(destroyMjpegDecoderFun)dlsym(mLibstageLibHandle, "destroy_class_RkJpegDecoder");
+ if (mMjpegDecoder.destroy == NULL)
+ LOGE("%s(%d): dlsym destroy_class_RkJpegDecoder fail",__FUNCTION__,__LINE__);
+
+ mMjpegDecoder.init = (initMjpegDecoderFun)dlsym(mLibstageLibHandle, "init_class_RkJpegDecoder");
+ if (mMjpegDecoder.init == NULL)
+ LOGE("%s(%d): dlsym init_class_RkJpegDecoder fail",__FUNCTION__,__LINE__);
+
+ mMjpegDecoder.deInit =(deInitMjpegDecoderFun)dlsym(mLibstageLibHandle, "deinit_class_RkJpegDecoder");
+ if (mMjpegDecoder.deInit == NULL)
+ LOGE("%s(%d): dlsym deinit_class_RkJpegDecoder fail",__FUNCTION__,__LINE__);
+
+ mMjpegDecoder.decode =(mjpegDecodeOneFrameFun)dlsym(mLibstageLibHandle, decode_name);
+ if (mMjpegDecoder.decode == NULL)
+ LOGE("%s(%d): dlsym %s fail",__FUNCTION__,__LINE__,decode_name);
+
+ if ((mMjpegDecoder.deInit != NULL) && (mMjpegDecoder.init != NULL) &&
+ (mMjpegDecoder.destroy != NULL) && (mMjpegDecoder.decode != NULL)) {
+ mMjpegDecoder.state = mMjpegDecoder.init(mMjpegDecoder.decoder);
+ }
+ }
+ }
+
+
+ cameraDevicePathCur = (char*)&gCamInfos[cameraId].device_path[0];
+ iCamFd = open(cameraDevicePathCur, O_RDWR);
+ if (iCamFd < 0) {
+ LOGE("%s(%d): open camera%d(%s) is failed ,err: %s",__FUNCTION__,__LINE__,cameraId,cameraDevicePathCur,strerror(errno));
+ goto exit;
+ }
+
+ memset(&mCamDriverCapability, 0, sizeof(struct v4l2_capability));
+ err = ioctl(iCamFd, VIDIOC_QUERYCAP, &mCamDriverCapability);
+ if (err < 0) {
+ LOGE("%s(%d): %s query device's capability failed , err: %s \n",__FUNCTION__,__LINE__,cam_path,strerror(errno));
+ goto exit1;
+ }
+
+ LOGD("Camera driver: %s Card Name:%s Driver version: %d.%d.%d CameraHal version: %d.%d.%d ",mCamDriverCapability.driver,
+ mCamDriverCapability.card,(mCamDriverCapability.version>>16) & 0xff,(mCamDriverCapability.version>>8) & 0xff,
+ mCamDriverCapability.version & 0xff,(CONFIG_CAMERAHAL_VERSION>>16) & 0xff,(CONFIG_CAMERAHAL_VERSION>>8) & 0xff,
+ CONFIG_CAMERAHAL_VERSION & 0xff);
+
+ memset(&fmtdesc, 0, sizeof(fmtdesc));
+ fmtdesc.index = 0;
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ while (ioctl(iCamFd, VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
+ mCamDriverSupportFmt[fmtdesc.index] = fmtdesc.pixelformat;
+ LOGD("mCamDriverSupportFmt: fmt = %d(%s),index = %d",fmtdesc.pixelformat,fmtdesc.description,fmtdesc.index);
+
+ fmtdesc.index++;
+ }
+
+
+ i = 0;
+ while (CameraHal_SupportFmt[i]) {
+ LOG1("CameraHal_SupportFmt:fmt = %d,index = %d",CameraHal_SupportFmt[i],i);
+ j = 0;
+ while (mCamDriverSupportFmt[j]) {
+ if (mCamDriverSupportFmt[j] == CameraHal_SupportFmt[i]) {
+ if ((mCamDriverSupportFmt[j] == V4L2_PIX_FMT_MJPEG) && (mMjpegDecoder.state == -1))
+ continue;
+ break;
+ }
+ j++;
+ }
+ if (mCamDriverSupportFmt[j] == CameraHal_SupportFmt[i]) {
+ break;
+ }
+ i++;
+ }
+
+ if (CameraHal_SupportFmt[i] == 0x00) {
+ LOGE("%s(%d): all camera driver support format is not supported in CameraHal!!",__FUNCTION__,__LINE__);
+ j = 0;
+ while (mCamDriverSupportFmt[j]) {
+ LOG1("pixelformat = '%c%c%c%c'",
+ mCamDriverSupportFmt[j] & 0xFF, (mCamDriverSupportFmt[j] >> 8) & 0xFF,
+ (mCamDriverSupportFmt[j] >> 16) & 0xFF, (mCamDriverSupportFmt[j] >> 24) & 0xFF);
+ j++;
+ }
+ goto exit1;
+ } else {
+ mCamDriverPreviewFmt = CameraHal_SupportFmt[i];
+ LOGD("%s(%d): mCamDriverPreviewFmt(%c%c%c%c) is cameraHal and camera driver is also supported!!",__FUNCTION__,__LINE__,
+ mCamDriverPreviewFmt & 0xFF, (mCamDriverPreviewFmt >> 8) & 0xFF,
+ (mCamDriverPreviewFmt >> 16) & 0xFF, (mCamDriverPreviewFmt >> 24) & 0xFF);
+
+ LOGD("mCamDriverPreviewFmt = %d",mCamDriverPreviewFmt);
+
+ }
+
+
+ LOGD("%s(%d): Current driver is %s, v4l2 memory is %s",__FUNCTION__,__LINE__,mCamDriverCapability.driver,
+ (mCamDriverV4l2MemType==V4L2_MEMORY_MMAP)?"V4L2_MEMORY_MMAP":"V4L2_MEMORY_OVERLAY");
+ mCamFd = iCamFd;
+
+ //create focus thread for soc or usb camera.
+ mAutoFocusThread = new AutoFocusThread(this);
+ mAutoFocusThread->run("AutoFocusThread", ANDROID_PRIORITY_URGENT_DISPLAY);
+ mExitAutoFocusThread = false;
+
+
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+exit1:
+ if (iCamFd > 0) {
+ close(iCamFd);
+ iCamFd = -1;
+ }
+
+exit:
+ LOGE("%s(%d): exit with error -1",__FUNCTION__,__LINE__);
+ return -1;
+}
+int CameraAdapter::cameraDestroy()
+{
+ int err,i;
+
+ LOG_FUNCTION_NAME
+ if (mLibstageLibHandle && (mMjpegDecoder.state == 0)) {
+ mMjpegDecoder.deInit(mMjpegDecoder.decoder);
+ mMjpegDecoder.destroy(mMjpegDecoder.decoder);
+ mMjpegDecoder.decoder = NULL;
+
+ dlclose(mLibstageLibHandle);
+ mLibstageLibHandle = NULL;
+ } LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+
+int CameraAdapter::cameraSetSize(int w, int h, int fmt, bool is_capture)
+{
+ int err=0;
+ struct v4l2_format format;
+
+ /* Set preview format */
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format.fmt.pix.width = w;
+ format.fmt.pix.height = h;
+ format.fmt.pix.pixelformat = fmt;
+ format.fmt.pix.field = V4L2_FIELD_NONE; /* ddl@rock-chips.com : field must be initialised for Linux kernel in 2.6.32 */
+ LOGD("%s(%d):IN, w = %d,h = %d",__FUNCTION__,__LINE__,w,h);
+ if (is_capture) { /* ddl@rock-chips.com: v0.4.1 add capture and preview check */
+ format.fmt.pix.priv = 0xfefe5a5a;
+ } else {
+ format.fmt.pix.priv = 0x5a5afefe;
+ }
+
+ err = ioctl(mCamFd, VIDIOC_S_FMT, &format);
+ if ( err < 0 ){
+ LOGE("%s(%d): VIDIOC_S_FMT failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ } else {
+ LOG1("%s(%d): VIDIOC_S_FMT %dx%d '%c%c%c%c'",__FUNCTION__,__LINE__,format.fmt.pix.width, format.fmt.pix.height,
+ fmt & 0xFF, (fmt >> 8) & 0xFF,(fmt >> 16) & 0xFF, (fmt >> 24) & 0xFF);
+ }
+
+ return err;
+}
+int CameraAdapter::cameraStream(bool on)
+{
+ int err = 0;
+ int cmd ;
+ enum v4l2_buf_type type;
+ LOGD("%s(%d):on = %d",__FUNCTION__,__LINE__,on);
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ cmd = (on)?VIDIOC_STREAMON:VIDIOC_STREAMOFF;
+
+ mCamDriverStreamLock.lock();
+ err = ioctl(mCamFd, cmd, &type);
+ if (err < 0) {
+ LOGE("%s(%d): %s Failed ,err: %s",__FUNCTION__,__LINE__,((on)?"VIDIOC_STREAMON":"VIDIOC_STREAMOFF"),strerror(errno));
+ goto cameraStream_end;
+ }
+ mCamDriverStream = on;
+
+cameraStream_end:
+ mCamDriverStreamLock.unlock();
+ return err;
+}
+// query buffer ,qbuf , stream on
+int CameraAdapter::cameraStart()
+{
+ int preview_size,i;
+ int err;
+ int nSizeBytes;
+ int buffer_count;
+ struct v4l2_format format;
+ enum v4l2_buf_type type;
+ struct v4l2_requestbuffers creqbuf;
+ struct v4l2_buffer buffer;
+ CameraParameters tmpparams;
+
+ LOG_FUNCTION_NAME
+
+ buffer_count = mPreviewBufProvider->getBufCount();
+ creqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ creqbuf.memory = mCamDriverV4l2MemType;
+ creqbuf.count = buffer_count;
+ if (ioctl(mCamFd, VIDIOC_REQBUFS, &creqbuf) < 0) {
+ LOGE ("%s(%d): VIDIOC_REQBUFS Failed. %s",__FUNCTION__,__LINE__, strerror(errno));
+ goto fail_reqbufs;
+ }
+ if (buffer_count == 0) {
+ LOGE("%s(%d): preview buffer havn't alloced",__FUNCTION__,__LINE__);
+ goto fail_reqbufs;
+ }
+ memset(mCamDriverV4l2Buffer, 0x00, sizeof(mCamDriverV4l2Buffer));
+ for (int i = 0; i < buffer_count; i++) {
+ memset(&buffer, 0, sizeof(struct v4l2_buffer));
+ buffer.type = creqbuf.type;
+ buffer.memory = creqbuf.memory;
+ buffer.flags = 0;
+ buffer.index = i;
+
+ if (ioctl(mCamFd, VIDIOC_QUERYBUF, &buffer) < 0) {
+ LOGE("%s(%d): VIDIOC_QUERYBUF Failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ goto fail_bufalloc;
+ }
+
+ if (buffer.memory == V4L2_MEMORY_OVERLAY) {
+
+ buffer.m.offset = mPreviewBufProvider->getBufPhyAddr(i);
+ mCamDriverV4l2Buffer[i] = (char*)mPreviewBufProvider->getBufVirAddr(i);
+ } else if (buffer.memory == V4L2_MEMORY_MMAP) {
+ mCamDriverV4l2Buffer[i] = (char*)mmap(0 /* start anywhere */ ,
+ buffer.length, PROT_READ, MAP_SHARED, mCamFd,
+ buffer.m.offset);
+ if (mCamDriverV4l2Buffer[i] == MAP_FAILED) {
+ LOGE("%s(%d): Unable to map buffer(length:0x%x offset:0x%x) %s(err:%d)\n",__FUNCTION__,__LINE__, buffer.length,buffer.m.offset,strerror(errno),errno);
+ goto fail_bufalloc;
+ }
+ }
+ mCamDriverV4l2BufferLen = buffer.length;
+
+ mPreviewBufProvider->setBufferStatus(i, 1,PreviewBufferProvider::CMD_PREVIEWBUF_WRITING);
+ err = ioctl(mCamFd, VIDIOC_QBUF, &buffer);
+ if (err < 0) {
+ LOGE("%s(%d): VIDIOC_QBUF Failed,err=%d[%s]\n",__FUNCTION__,__LINE__,err, strerror(errno));
+ mPreviewBufProvider->setBufferStatus(i, 0,PreviewBufferProvider::CMD_PREVIEWBUF_WRITING);
+ goto fail_bufalloc;
+ }
+ }
+
+
+ mPreviewErrorFrameCount = 0;
+ mPreviewFrameIndex = 0;
+ cameraStream(true);
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+fail_bufalloc:
+ mPreviewBufProvider->freeBuffer();
+fail_reqbufs:
+ LOGE("%s(%d): exit with error(%d)",__FUNCTION__,__LINE__,-1);
+ return -1;
+}
+int CameraAdapter::cameraStop()
+{
+ int i, buffer_count;
+ if (mCamDriverV4l2MemType==V4L2_MEMORY_MMAP) {
+ buffer_count = mPreviewBufProvider->getBufCount();
+ for (i=0; i<buffer_count; i++) {
+ munmap(mCamDriverV4l2Buffer[i], mCamDriverV4l2BufferLen);
+ }
+ }
+
+ return 0;
+}
+int CameraAdapter::cameraAutoFocus(bool auto_trig_only)
+{
+ return 0;
+}
+
+void CameraAdapter::debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ LOGD("Camera %d Frames, %2.3f FPS", mFrameCount, mFps);
+ }
+ // XXX: mFPS has the value we want
+}
+
+//dqbuf
+int CameraAdapter::getFrame(FramInfo_s** tmpFrame){
+
+ struct v4l2_buffer cfilledbuffer1;
+ int ret = 0;
+
+ memset(&cfilledbuffer1, 0, sizeof(struct v4l2_buffer));
+ cfilledbuffer1.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ cfilledbuffer1.memory = mCamDriverV4l2MemType;
+ cfilledbuffer1.reserved = 0;
+
+ FILTER_FRAMES:
+ /* De-queue the next avaliable buffer */
+ LOG2("%s(%d): get frame in",__FUNCTION__,__LINE__);
+ if (ioctl(mCamFd, VIDIOC_DQBUF, &cfilledbuffer1) < 0) {
+ LOGE("%s(%d): VIDIOC_DQBUF Failed!!! err[%s] \n",__FUNCTION__,__LINE__,strerror(errno));
+ if (errno == EIO) {
+ mCamDriverStreamLock.lock();
+ if (mCamDriverStream) {
+ camera_device_error = true;
+ LOGE("%s(%d): camera driver or device may be error, so notify CAMERA_MSG_ERROR",
+ __FUNCTION__,__LINE__);
+ }
+ mCamDriverStreamLock.unlock();
+ } else {
+ mCamDriverStreamLock.lock();
+ if (mCamDriverStream) {
+ mPreviewErrorFrameCount++;
+ if (mPreviewErrorFrameCount >= 2) {
+ mCamDriverStreamLock.unlock();
+ camera_device_error = true;
+ LOGE("%s(%d): mPreviewErrorFrameCount is %d, camera driver or device may be error, so notify CAMERA_MSG_ERROR",
+ __FUNCTION__,__LINE__,mPreviewErrorFrameCount);
+ } else {
+ mCamDriverStreamLock.unlock();
+ }
+ } else {
+ mCamDriverStreamLock.unlock();
+ }
+ }
+ ret = -1;
+ goto getFrame_out;
+ } else {
+ mPreviewErrorFrameCount = 0;
+ }
+ LOG2("%s(%d): deque a frame %d success",__FUNCTION__,__LINE__,cfilledbuffer1.index);
+
+ if(mPreviewFrameIndex++ < FILTER_FRAME_NUMBER)
+ {
+ LOG2("%s:filter frame %d",__FUNCTION__,mPreviewFrameIndex);
+ mCamDriverStreamLock.lock();
+ if(mCamDriverStream)
+ ioctl(mCamFd, VIDIOC_QBUF, &cfilledbuffer1);
+ mCamDriverStreamLock.unlock();
+ goto FILTER_FRAMES;
+ }
+ // fill frame info:w,h,phy,vir
+ mPreviewFrameInfos[cfilledbuffer1.index].frame_fmt= mCamDriverPreviewFmt;
+ mPreviewFrameInfos[cfilledbuffer1.index].frame_height = mCamDrvHeight;
+ mPreviewFrameInfos[cfilledbuffer1.index].frame_width = mCamDrvWidth;
+ mPreviewFrameInfos[cfilledbuffer1.index].frame_index = cfilledbuffer1.index;
+ if(mCamDriverV4l2MemType == V4L2_MEMORY_OVERLAY){
+ if(cif_driver_iommu){
+ mPreviewFrameInfos[cfilledbuffer1.index].phy_addr = mPreviewBufProvider->getBufShareFd(cfilledbuffer1.index);
+ }else
+ mPreviewFrameInfos[cfilledbuffer1.index].phy_addr = mPreviewBufProvider->getBufPhyAddr(cfilledbuffer1.index);
+ }else
+ mPreviewFrameInfos[cfilledbuffer1.index].phy_addr = 0;
+ mPreviewFrameInfos[cfilledbuffer1.index].vir_addr = (unsigned long)mCamDriverV4l2Buffer[cfilledbuffer1.index];
+ //get zoom_value
+ mPreviewFrameInfos[cfilledbuffer1.index].zoom_value = mZoomVal;
+ mPreviewFrameInfos[cfilledbuffer1.index].used_flag = 0;
+ mPreviewFrameInfos[cfilledbuffer1.index].frame_size = cfilledbuffer1.bytesused;
+ mPreviewFrameInfos[cfilledbuffer1.index].res = NULL;
+
+ *tmpFrame = &(mPreviewFrameInfos[cfilledbuffer1.index]);
+ LOG2("%s(%d): fill frame info success",__FUNCTION__,__LINE__);
+ debugShowFPS();
+getFrame_out:
+ if(camera_device_error)
+ mRefEventNotifier->notifyCbMsg(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED);
+ return ret ;
+}
+int CameraAdapter::adapterReturnFrame(long index,int cmd){
+ int buf_state = -1;
+ struct v4l2_buffer vb;
+ mCamDriverStreamLock.lock();
+
+ if (!mCamDriverStream) {
+ LOGD("%s(%d): preview thread is pause, so buffer %d isn't enqueue to camera",__FUNCTION__,__LINE__,index);
+ mCamDriverStreamLock.unlock();
+ return 0;
+ }
+ mPreviewBufProvider->setBufferStatus(index,0, cmd);
+ buf_state = mPreviewBufProvider->getBufferStatus(index);
+ LOG2("%s(%d):index(%d),cmd(%d),buf_state(%d)",__FUNCTION__,__LINE__,index,cmd,buf_state);
+ if (buf_state == 0) {
+ memset(&vb, 0, sizeof(struct v4l2_buffer));
+ vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ vb.memory = mCamDriverV4l2MemType;
+ vb.index = index;
+ vb.reserved = 0;
+ vb.m.offset = mPreviewBufProvider->getBufPhyAddr(index);
+
+ mPreviewBufProvider->setBufferStatus(index,1,PreviewBufferProvider::CMD_PREVIEWBUF_WRITING);
+ if (ioctl(mCamFd, VIDIOC_QBUF, &vb) < 0) {
+ LOGE("%s(%d): VIDIOC_QBUF %d Failed!!! err[%s]", __FUNCTION__,__LINE__,index, strerror(errno));
+ mPreviewBufProvider->setBufferStatus(index,0,PreviewBufferProvider::CMD_PREVIEWBUF_WRITING);
+ }
+
+ } else {
+ LOG2("%s(%d): buffer %d state 0x%x",__FUNCTION__,__LINE__, index, buf_state);
+ }
+ mCamDriverStreamLock.unlock();
+ return 0;
+}
+
+int CameraAdapter::returnFrame(long index,int cmd)
+{
+ return adapterReturnFrame(index,cmd);
+}
+
+//define the frame info ,such as w, h ,fmt
+int CameraAdapter::reprocessFrame(FramInfo_s* frame)
+{
+ // usb camera may do something
+ return 0;
+}
+void CameraAdapter::previewThread(){
+ bool loop = true;
+ FramInfo_s* tmpFrame = NULL;
+ int buffer_log = 0;
+ int ret = 0;
+ while(loop){
+
+ //get a frame
+ //fill frame info
+
+ //dispatch a frame
+ tmpFrame = NULL;
+ mCamDriverStreamLock.lock();
+ if (mCamDriverStream == false) {
+ mCamDriverStreamLock.unlock();
+ break;
+ }
+ mCamDriverStreamLock.unlock();
+
+ ret = getFrame(&tmpFrame);
+
+// LOG2("%s(%d),frame addr = %p,%dx%d,index(%d)",__FUNCTION__,__LINE__,tmpFrame,tmpFrame->frame_width,tmpFrame->frame_height,tmpFrame->frame_index);
+ if((ret!=-1) && (!camera_device_error)){
+ mPreviewBufProvider->setBufferStatus(tmpFrame->frame_index, 0,PreviewBufferProvider::CMD_PREVIEWBUF_WRITING);
+ //set preview buffer status
+ ret = reprocessFrame(tmpFrame);
+ if(ret < 0){
+ returnFrame(tmpFrame->frame_index,buffer_log);
+ continue;
+ }
+
+ buffer_log = 0;
+ //display ?
+ if(mRefDisplayAdapter->isNeedSendToDisplay())
+ buffer_log |= PreviewBufferProvider::CMD_PREVIEWBUF_DISPING;
+ //video enc ?
+ if(mRefEventNotifier->isNeedSendToVideo())
+ buffer_log |= PreviewBufferProvider::CMD_PREVIEWBUF_VIDEO_ENCING;
+ //picture ?
+ if(mRefEventNotifier->isNeedSendToPicture())
+ buffer_log |= PreviewBufferProvider::CMD_PREVIEWBUF_SNAPSHOT_ENCING;
+ //preview data callback ?
+ if(mRefEventNotifier->isNeedSendToDataCB())
+ buffer_log |= PreviewBufferProvider::CMD_PREVIEWBUF_DATACB;
+
+ mPreviewBufProvider->setBufferStatus(tmpFrame->frame_index,1,buffer_log);
+
+ if(mRefEventNotifier->isNeedSendToFaceDetect()){
+ mRefEventNotifier->notifyNewFaceDecFrame(tmpFrame);
+ }
+ if(buffer_log & PreviewBufferProvider::CMD_PREVIEWBUF_DISPING){
+ tmpFrame->used_flag = PreviewBufferProvider::CMD_PREVIEWBUF_DISPING;
+ mRefDisplayAdapter->notifyNewFrame(tmpFrame);
+ }
+ if(buffer_log & (PreviewBufferProvider::CMD_PREVIEWBUF_SNAPSHOT_ENCING)){
+ tmpFrame->used_flag = PreviewBufferProvider::CMD_PREVIEWBUF_SNAPSHOT_ENCING;
+ mRefEventNotifier->notifyNewPicFrame(tmpFrame);
+ }
+ if(buffer_log & (PreviewBufferProvider::CMD_PREVIEWBUF_VIDEO_ENCING)){
+ tmpFrame->used_flag = PreviewBufferProvider::CMD_PREVIEWBUF_VIDEO_ENCING;
+ mRefEventNotifier->notifyNewVideoFrame(tmpFrame);
+ }
+ if(buffer_log & (PreviewBufferProvider::CMD_PREVIEWBUF_DATACB)){
+ tmpFrame->used_flag = PreviewBufferProvider::CMD_PREVIEWBUF_DATACB;
+ mRefEventNotifier->notifyNewPreviewCbFrame(tmpFrame);
+ }
+
+ if(buffer_log == 0)
+ returnFrame(tmpFrame->frame_index,buffer_log);
+ }else if(camera_device_error){
+ //notify app erro
+ break;
+ }else if((ret==-1) && (!camera_device_error)){
+
+ if(tmpFrame)
+ returnFrame(tmpFrame->frame_index,buffer_log);
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return;
+}
+
+
+void CameraAdapter::autofocusThread()
+{
+ int err;
+ LOG_FUNCTION_NAME
+ while (1) {
+ mAutoFocusLock.lock();
+ /* check early exit request */
+ if (mExitAutoFocusThread) {
+ mAutoFocusLock.unlock();
+ LOG1("%s(%d) exit", __FUNCTION__,__LINE__);
+ goto autofocusThread_end;
+ }
+ mAutoFocusCond.wait(mAutoFocusLock);
+ LOGD("wait out\n");
+ /* check early exit request */
+ if (mExitAutoFocusThread) {
+ mAutoFocusLock.unlock();
+ LOG1("%s(%d) exit", __FUNCTION__,__LINE__);
+ goto autofocusThread_end;
+ }
+ mAutoFocusLock.unlock();
+
+ //const char *mfocusMode = params.get(CameraParameters::KEY_FOCUS_MODE);
+ //if(mfocusMode)
+ //err = cameraAutoFocus(mfocusMode/*CameraParameters::FOCUS_MODE_AUTO*/,false);
+ err = cameraAutoFocus(false);
+ if(mRefEventNotifier){
+ mRefEventNotifier->notifyCbMsg(CAMERA_MSG_FOCUS, err);
+ }
+
+ }
+
+autofocusThread_end:
+ LOG_FUNCTION_NAME_EXIT
+ return;
+}
+
+}
+
diff --git a/CameraHal/CameraBuffer.cpp b/CameraHal/CameraBuffer.cpp
new file mode 100644
index 0000000..5d058d5
--- /dev/null
+++ b/CameraHal/CameraBuffer.cpp
@@ -0,0 +1,274 @@
+#include "CameraHal.h"
+namespace android{
+int BufferProvider::getBufferStatus(int bufindex){
+ int ret_status;
+ mBufInfo[bufindex].lock->lock();
+ ret_status = mBufInfo[bufindex].buf_state;
+ mBufInfo[bufindex].lock->unlock();
+ return ret_status;
+ }
+int BufferProvider::getBufCount()
+{
+ return mBufCount;
+}
+long BufferProvider::getBufPhyAddr(int bufindex)
+{
+ long phy_addr;
+ mBufInfo[bufindex].lock->lock();
+ phy_addr = mBufInfo[bufindex].phy_addr;
+ mBufInfo[bufindex].lock->unlock();
+ return phy_addr;
+}
+long BufferProvider::getBufVirAddr(int bufindex)
+{
+ long vir_addr;
+ mBufInfo[bufindex].lock->lock();
+ vir_addr = mBufInfo[bufindex].vir_addr;
+ mBufInfo[bufindex].lock->unlock();
+ return vir_addr;
+}
+
+int BufferProvider::getBufShareFd(int bufindex)
+{
+ int share_fd = -1;
+ mBufInfo[bufindex].lock->lock();
+ share_fd = mBufInfo[bufindex].share_fd;
+ mBufInfo[bufindex].lock->unlock();
+ return share_fd;
+}
+int BufferProvider::createBuffer(int count,int perbufsize,buffer_type_enum buftype,bool is_cif_driver)
+{
+ int ret = 0,i;
+ struct bufferinfo_s buf;
+
+ memset(&buf,0,sizeof(struct bufferinfo_s));
+ mBufCount = count;
+ buf.mNumBffers = count;
+ buf.mPerBuffersize = PAGE_ALIGN(perbufsize);
+ buf.mBufType = (buffer_type_enum)buftype;
+
+ mBufType = (buffer_type_enum)buftype;
+ buf.mIsForceIommuBuf = true;
+
+ switch(buftype){
+ case PREVIEWBUFFER:
+ if(is_cif_driver){//should use cma buffer
+ buf.mIsForceIommuBuf = false;
+ }
+ if(mCamBuffer->createPreviewBuffer(&buf) !=0) {
+ LOGE("%s(%d): preview buffer create failed",__FUNCTION__,__LINE__);
+ ret = -1;
+ }
+ break;
+ case RAWBUFFER:
+ #if defined(TARGET_RK3188)//should use cma buffer
+ buf.mIsForceIommuBuf = false;
+ #endif
+ if(mCamBuffer->createRawBuffer(&buf) !=0) {
+ LOGE("%s(%d): raw buffer create failed",__FUNCTION__,__LINE__);
+ ret = -1;
+ }
+ break;
+ case JPEGBUFFER:
+ if(mCamBuffer->createJpegBuffer(&buf) !=0) {
+ LOGE("%s(%d): jpeg buffer create failed",__FUNCTION__,__LINE__);
+ ret = -1;
+ }
+ break;
+ case VIDEOENCBUFFER:
+ if(mCamBuffer->createVideoEncBuffer(&buf) !=0) {
+ LOGE("%s(%d): video buffer create failed",__FUNCTION__,__LINE__);
+ ret = -1;
+ }
+ break;
+ default :
+ ret = -1;
+
+
+ }
+ if(ret == -1) {
+ LOGE("%s(%d): buffer create failed",__FUNCTION__,__LINE__);
+ ret = -1;
+ goto createBuffer_end;
+ }
+
+ mBufInfo = (rk_buffer_info_t*)malloc(sizeof(rk_buffer_info_t)*count);
+ if(!mBufInfo){
+ LOGE("%s(%d): buffer create failed",__FUNCTION__,__LINE__);
+ ret = -1;
+ goto createBuffer_end;
+ }
+ for (i=0; i<count; i++) {
+ mBufInfo[i].lock = new Mutex();
+ mBufInfo[i].vir_addr = (long)mCamBuffer->getBufferAddr(buftype,i,buffer_addr_vir);
+ mBufInfo[i].phy_addr = (long)mCamBuffer->getBufferAddr(buftype,i,buffer_addr_phy);
+ mBufInfo[i].share_fd = (long)mCamBuffer->getBufferAddr(buftype,i,buffer_sharre_fd);
+ mBufInfo[i].buf_state = 0;
+ }
+
+createBuffer_end:
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+
+int BufferProvider::freeBuffer()
+{
+
+ LOG_FUNCTION_NAME
+
+ if(mBufInfo != NULL){
+ for(int i=0; i<mBufCount; i++){
+ delete mBufInfo[i].lock;
+ }
+ free(mBufInfo);
+ mBufInfo = NULL;
+ mBufCount = 0;
+ switch(mBufType){
+ case PREVIEWBUFFER:
+ mCamBuffer->destroyPreviewBuffer();
+ break;
+ case RAWBUFFER:
+ mCamBuffer->destroyRawBuffer();
+ break;
+ case JPEGBUFFER:
+ mCamBuffer->destroyJpegBuffer();
+ break;
+ case VIDEOENCBUFFER:
+ mCamBuffer->destroyVideoEncBuffer();
+ break;
+ default :
+ break;
+
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+
+int BufferProvider::setBufferStatus(int bufindex,int status,int cmd)
+{
+ int err = NO_ERROR;
+ rk_buffer_info_t *buf_hnd = NULL;
+
+ if(bufindex >= mBufCount){
+ LOGE("%s(%d): Camerahal preview buffer is null, Don't allow set buffer state",__FUNCTION__,__LINE__);
+ err = -EINVAL;
+ goto setBufferStatus_end;
+ }
+ if (mBufInfo == NULL) {
+ LOGE("%s(%d): buf_hnd is null",__FUNCTION__,__LINE__);
+ err = -EINVAL;
+ goto setBufferStatus_end;
+ }
+
+ buf_hnd = mBufInfo+bufindex;
+
+ buf_hnd->lock->lock();
+
+ buf_hnd->buf_state = status;
+ buf_hnd->lock->unlock();
+setBufferStatus_end:
+ return err;
+}
+
+int BufferProvider::getOneAvailableBuffer(long *buf_phy,long *buf_vir)
+{
+ int i;
+ for ( i=0; i < mBufCount; i++) {
+ if((mBufInfo[i].buf_state) ==0)
+ break;
+ }
+
+ if(i == mBufCount)
+ return -1;
+ else{
+ *buf_phy = mBufInfo[i].phy_addr;
+ *buf_vir = mBufInfo[i].vir_addr;
+ }
+ return i;
+}
+
+int BufferProvider::flushBuffer(int bufindex)
+{
+ if(mBufInfo != NULL){
+ return mCamBuffer->flushCacheMem(mBufType,0,0);
+ }else{
+ return -1;
+ }
+}
+
+//preview buffer
+int PreviewBufferProvider::setBufferStatus(int bufindex,int set,int cmd)
+{
+ int err = NO_ERROR;
+ rk_buffer_info_t *buf_hnd = NULL;
+
+ if(bufindex >= mBufCount){
+ LOGE("%s(%d): Camerahal preview buffer is null, Don't allow set buffer state",__FUNCTION__,__LINE__);
+ err = -EINVAL;
+ goto setPreviewBufferStatus_end;
+ }
+ if (mBufInfo == NULL) {
+ LOGE("%s(%d): buf_hnd is null",__FUNCTION__,__LINE__);
+ err = -EINVAL;
+ goto setPreviewBufferStatus_end;
+ }
+
+ buf_hnd = mBufInfo+bufindex;
+
+ buf_hnd->lock->lock();
+
+ if (cmd & CMD_PREVIEWBUF_DISPING) {
+ if (set){
+ if (CAMERA_PREVIEWBUF_ALLOW_DISPLAY(buf_hnd->buf_state)==false)
+ LOGE("%s(%d): Set buffer displaying, but buffer status(0x%x) is error",__FUNCTION__,__LINE__,buf_hnd->buf_state);
+ buf_hnd->buf_state |= CMD_PREVIEWBUF_DISPING;
+ } else {
+ buf_hnd->buf_state &= ~CMD_PREVIEWBUF_DISPING;
+ }
+ }
+
+ if (cmd & CMD_PREVIEWBUF_VIDEO_ENCING) {
+ if (set) {
+ if (CAMERA_PREVIEWBUF_ALLOW_ENC(buf_hnd->buf_state)==false)
+ LOGE("%s(%d): Set buffer encoding, but buffer status(0x%x) is error",__FUNCTION__,__LINE__,buf_hnd->buf_state);
+ buf_hnd->buf_state |= CMD_PREVIEWBUF_VIDEO_ENCING;
+ } else {
+ buf_hnd->buf_state &= ~CMD_PREVIEWBUF_VIDEO_ENCING;
+ }
+ }
+
+ if (cmd & CMD_PREVIEWBUF_SNAPSHOT_ENCING) {
+ if (set) {
+ if (CAMERA_PREVIEWBUF_ALLOW_ENC_PICTURE(buf_hnd->buf_state)==false)
+ LOGE("%s(%d): Set buffer snapshot encoding, but buffer status(0x%x) is error",__FUNCTION__,__LINE__,buf_hnd->buf_state);
+ buf_hnd->buf_state |= CMD_PREVIEWBUF_SNAPSHOT_ENCING;
+ } else {
+ buf_hnd->buf_state &= ~CMD_PREVIEWBUF_SNAPSHOT_ENCING;
+ }
+ }
+ if (cmd & CMD_PREVIEWBUF_DATACB) {
+ if (set) {
+ if (CAMERA_PREVIEWBUF_ALLOW_DATA_CB(buf_hnd->buf_state)==false)
+ LOGE("%s(%d): Set buffer datacb, but buffer status(0x%x) is error",__FUNCTION__,__LINE__,buf_hnd->buf_state);
+ buf_hnd->buf_state |= CMD_PREVIEWBUF_DATACB;
+ } else {
+ buf_hnd->buf_state &= ~CMD_PREVIEWBUF_DATACB;
+ }
+ }
+ if (cmd & CMD_PREVIEWBUF_WRITING) {
+ if (set) {
+ if (CAMERA_PREVIEWBUF_ALLOW_WRITE(buf_hnd->buf_state)==false)
+ LOGE("%s(%d): Set buffer writing, but buffer status(0x%x) is error",__FUNCTION__,__LINE__,buf_hnd->buf_state);
+ buf_hnd->buf_state |= CMD_PREVIEWBUF_WRITING;
+ } else {
+ buf_hnd->buf_state &= ~CMD_PREVIEWBUF_WRITING;
+ }
+ }
+
+ buf_hnd->lock->unlock();
+ setPreviewBufferStatus_end:
+ return err;
+}
+}
+
diff --git a/CameraHal/CameraGL.h b/CameraHal/CameraGL.h
new file mode 100644
index 0000000..eed94c2
--- /dev/null
+++ b/CameraHal/CameraGL.h
@@ -0,0 +1,4 @@
+#include <jni.h> #include "string.h"
+#include "assert.h" #include <system/window.h>
+#include <hardware/gralloc.h>
+#include "MutliFrameDenoise.h" #ifndef CAMERA_GL__H #define CAMERA_GL__H /*enum OUTPUT_FMT { OUTPUT_FMT_CV_MAT, OUTPUT_FMT_YUV, OUTPUT_FMT_RGBA, OUTPUT_NONE }; struct cv_fimc_buffer { void *start; int format; int share_fd; size_t length; int stride; size_t bytesused; buffer_handle_t handle; };*/ class CameraGL { public: bool initialized; CameraGL(); ~CameraGL(); void init(alloc_device_t *m_alloc_dev, int width, int height, int buf_cnt = 1); void update(struct cv_fimc_buffer *m_buffers_capture); long process(long* targetAddr, int mode = OUTPUT_NONE, int ratio = 10, float distance = 5.0f); int getResult(long targetAddr); void destroy(); private: void getImageUnderDir(char *path, char *suffix); }; #endif //CAMERA_GL__H \ No newline at end of file
diff --git a/CameraHal/CameraHal.cpp b/CameraHal/CameraHal.cpp
new file mode 100755
index 0000000..5748239
--- /dev/null
+++ b/CameraHal/CameraHal.cpp
@@ -0,0 +1,1376 @@
+/**
+* @file CameraHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#include "CameraHal.h"
+#include "CameraHal_Module.h"
+#include <pthread.h>
+
+#include <binder/IPCThreadState.h>
+
+#include <utils/CallStack.h>
+#include <binder/IServiceManager.h>
+#include <binder/IMemory.h>
+
+#include <sys/stat.h>
+#include <unistd.h>
+#include <linux/fb.h>
+
+#include "CameraIspAdapter.h"
+#include "FakeCameraAdapter.h"
+
+#ifdef TARGET_RK29
+#include "../libyuvtorgb/yuvtorgb.h"
+#endif
+
+extern rk_cam_info_t gCamInfos[CAMERAS_SUPPORT_MAX];
+extern bool g_ctsV_flag = false;
+
+namespace android {
+
+/************************
+接口实现有两种方式
+1。接口由command线程负责具体实现
+ 有两种方式,一种异步,一种同步
+ 1.1 同步
+ 通过Semaphore
+ 1.2 异步
+ 无Semaphore
+2. 接口由在接口函数本身中实现,不发送给command线程。
+
+接口间的同步异步关系。
+1.1 和2 型 为 同步关系
+1.2 和2 型 为 异步关系
+1.1 和1.2型 为 同步关系 ,同步由线程队列实现。
+
+1.2型接口对于cameraservice来说是异步接口。
+1.2 和 1.2 为同步关系,对于cameraservice来说是异步。
+
+*********************/
+
+static void gsensor_orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie){
+ if(cookie){
+ int cam_orien = 0, face = 0;
+ CameraHal* pCameraHal = static_cast<CameraHal*> (cookie);
+ cam_orien = gCamInfos[pCameraHal->getCameraId()].pcam_total_info->mHardInfo.mSensorInfo.mOrientation;
+ face = gCamInfos[pCameraHal->getCameraId()].pcam_total_info->mHardInfo.mSensorInfo.mFacing;
+ pCameraHal->mEventNotifier->onOrientationChanged(orientation,cam_orien,face);
+
+ }
+};
+
+CameraHal::CameraHal(int cameraId)
+ :commandThreadCommandQ("commandCmdQ")
+{
+ LOG_FUNCTION_NAME
+
+ {
+ char trace_level[PROPERTY_VALUE_MAX];
+ int level;
+
+ property_get(CAMERAHAL_TRACE_LEVEL_PROPERTY_KEY, trace_level, "0");
+
+ sscanf(trace_level,"%d",&level);
+
+ setTracerLevel(level);
+
+ }
+ mInitState = true;
+ mCamId = cameraId;
+ mCamFd = -1;
+ mCommandRunning = -1;
+ mCameraStatus = 0;
+
+ #if (CONFIG_CAMERA_MEM == CAMERA_MEM_ION)
+ mCamMemManager = new IonMemManager();
+ LOG1("%s(%d): Camera Hal memory is alloced from ION device",__FUNCTION__,__LINE__);
+ #elif(CONFIG_CAMERA_MEM == CAMERA_MEM_IONDMA)
+ mCamMemManager = new IonDmaMemManager(gCamInfos[cameraId].pcam_total_info->mIsIommuEnabled);
+ LOG1("%s(%d): Camera Hal memory is alloced from ION device",__FUNCTION__,__LINE__);
+ #elif(CONFIG_CAMERA_MEM == CAMERA_MEM_GRALLOC_DRM)
+ mCamMemManager = new GrallocDrmMemManager(gCamInfos[cameraId].pcam_total_info->mIsIommuEnabled);
+ LOG1("%s(%d): Camera Hal memory is alloced from GRALLOC device",__FUNCTION__,__LINE__);
+ #elif(CONFIG_CAMERA_MEM == CAMERA_MEM_PMEM)
+ if(access(CAMERA_PMEM_NAME, O_RDWR) < 0) {
+ LOGE("%s(%d): %s isn't registered, CameraHal_Mem current configuration isn't support ION memory!!!",
+ __FUNCTION__,__LINE__,CAMERA_PMEM_NAME);
+ } else {
+ mCamMemManager = new PmemManager((char*)CAMERA_PMEM_NAME);
+ LOG1("%s(%d): Camera Hal memory is alloced from %s device",__FUNCTION__,__LINE__,CAMERA_PMEM_NAME);
+ }
+ #endif
+
+ mPreviewBuf = new PreviewBufferProvider(mCamMemManager);
+ mVideoBuf = new BufferProvider(mCamMemManager);
+ mRawBuf = new BufferProvider(mCamMemManager);
+ mJpegBuf = new BufferProvider(mCamMemManager);
+
+ mPreviewBuf->is_cif_driver = false;
+ mVideoBuf->is_cif_driver = false;
+ mRawBuf->is_cif_driver = false;
+ mJpegBuf->is_cif_driver = false;
+
+ char value[PROPERTY_VALUE_MAX];
+ property_get(/*CAMERAHAL_TYPE_PROPERTY_KEY*/"sys.cam_hal.type", value, "none");
+
+ if (!strcmp(value, "fakecamera")) {
+ LOGD("it is a fake camera!");
+ mCameraAdapter = new CameraFakeAdapter(cameraId);
+ } else {
+ if((strcmp(gCamInfos[cameraId].driver,"uvcvideo") == 0)) {
+ LOGD("it is a uvc camera!");
+ mCameraAdapter = new CameraUSBAdapter(cameraId);
+ }
+ else if(gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.type == CamSys_Phy_Cif){
+ LOGD("it is a isp soc camera");
+ if(gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.info.cif.fmt == CamSys_Fmt_Raw_10b
+ || gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.info.cif.fmt == CamSys_Fmt_Raw_12b)
+ mCameraAdapter = new CameraIspSOCAdapter(cameraId);
+ else
+ mCameraAdapter = new CameraIspAdapter(cameraId);
+ }
+ else if(gCamInfos[cameraId].pcam_total_info->mHardInfo.mSensorInfo.mPhy.type == CamSys_Phy_Mipi){
+ LOGD("it is a isp camera");
+ mCameraAdapter = new CameraIspAdapter(cameraId);
+ }
+ else{
+ LOGD("it is a soc camera!");
+ mCameraAdapter = new CameraSOCAdapter(cameraId);
+ //mCameraAdapter->is_cif_driver = true;
+ mCameraAdapter->cif_driver_iommu = gCamInfos[cameraId].pcam_total_info->mIsIommuEnabled;
+ mPreviewBuf->is_cif_driver = true;
+ mVideoBuf->is_cif_driver = true;
+ mRawBuf->is_cif_driver = true;
+ mJpegBuf->is_cif_driver = true;
+ }
+ }
+
+ //initialize
+ {
+ char *call_process = getCallingProcess();
+ if(strstr(call_process,"com.android.cts.verifier")) {
+ mCameraAdapter->setImageAllFov(true);
+ g_ctsV_flag = true;
+ property_set("sys.cts_camera.status","false");
+ }else if(strstr(call_process,"android.camera.cts")) {
+ g_ctsV_flag = false;
+ property_set("sys.cts_camera.status","true");
+ }else {
+ mCameraAdapter->setImageAllFov(false);
+ g_ctsV_flag = false;
+ property_set("sys.cts_camera.status","false");
+ }
+ }
+
+ char prop_value[PROPERTY_VALUE_MAX];
+ property_get("sys.cts_camera.status",prop_value, "false");
+ if(!strcmp(prop_value,"true")){
+ mCameraAdapter->setCtsTestFlag(true);
+ }else{
+ mCameraAdapter->setCtsTestFlag(false);
+ }
+
+ mDisplayAdapter = new DisplayAdapter();
+ mEventNotifier = new AppMsgNotifier(mCameraAdapter);
+
+ mCameraAdapter->setEventNotifierRef(*mEventNotifier);
+ if(mCameraAdapter->initialize() < 0){
+ mInitState = false;
+ LOGE("Error: camera initialize failed.");
+ return;
+ }
+ updateParameters(mParameters);
+ mCameraAdapter->setPreviewBufProvider(mPreviewBuf);
+ mCameraAdapter->setDisplayAdapterRef(*mDisplayAdapter);
+
+ mDisplayAdapter->setFrameProvider(mCameraAdapter);
+
+ mEventNotifier->setPictureRawBufProvider(mRawBuf);
+ mEventNotifier->setPictureJpegBufProvider(mJpegBuf);
+ mEventNotifier->setVideoBufProvider(mVideoBuf);
+ mEventNotifier->setFrameProvider(mCameraAdapter);
+ //command thread
+ mCommandThread = new CommandThread(this);
+ mCommandThread->run("CameraCmdThread", ANDROID_PRIORITY_URGENT_DISPLAY);
+
+ bool dataCbFrontMirror;
+ bool dataCbFrontFlip;
+#if CONFIG_CAMERA_FRONT_MIRROR_MDATACB
+ if (gCamInfos[cameraId].facing_info.facing == CAMERA_FACING_FRONT) {
+#if CONFIG_CAMERA_FRONT_MIRROR_MDATACB_ALL
+ dataCbFrontMirror = true;
+#else
+ const char* cameraCallProcess = getCallingProcess();
+ if (strstr(CONFIG_CAMERA_FRONT_MIRROR_MDATACB_APK,cameraCallProcess)) {
+ if (!strcmp("com.tencent.mm", cameraCallProcess)) {
+ /*if device default orientation is portrait, must config front camera orientation = 270
+ *and back camera orientation = 90.
+ *if device default orientation is landscap, must config front camera orientation = 0
+ *and back camera orientation = 0.
+ *weixin video call not need to mirror if device default orientation is portrait.
+ */
+ if (gCamInfos[cameraId].facing_info.orientation == 270)
+ dataCbFrontMirror = false;
+ else
+ dataCbFrontMirror = true;
+ } else {
+ dataCbFrontMirror = true;
+ }
+ } else {
+ dataCbFrontMirror = false;
+ }
+ if (strstr(CONFIG_CAMERA_FRONT_FLIP_MDATACB_APK,cameraCallProcess)) {
+ if (!strcmp("com.tencent.mm", cameraCallProcess)) {
+ /*if device default orientation is portrait, must config front camera orientation = 270
+ *and back camera orientation = 90.
+ *if device default orientation is landscap, must config front camera orientation = 0
+ *and back camera orientation = 0.
+ *weixin video call not need to flip if device default orientation is portrait.
+ */
+ if (gCamInfos[cameraId].facing_info.orientation == 270)
+ dataCbFrontFlip = false;
+ else
+ dataCbFrontFlip = true;
+ } else {
+ dataCbFrontFlip = true;
+ }
+ } else {
+ dataCbFrontFlip = false;
+ }
+#endif
+ } else {
+ dataCbFrontMirror = false;
+ dataCbFrontFlip = false;
+ }
+#else
+ dataCbFrontMirror = false;
+#endif
+ mEventNotifier->setDatacbFrontMirrorFlipState(dataCbFrontMirror,dataCbFrontFlip);
+
+#if 1
+ // register for sensor events
+ mSensorListener = new SensorListener();
+ if (mSensorListener.get()) {
+ if (mSensorListener->initialize() == NO_ERROR) {
+ mSensorListener->setCallbacks(gsensor_orientation_cb, this);
+ mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
+ } else {
+ LOGE("Error initializing SensorListener. not fatal, continuing");
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+ }
+#else
+ mSensorListener = NULL;
+#endif
+
+ LOG_FUNCTION_NAME_EXIT
+
+
+}
+
+
+void CameraHal::updateParameters(CameraParameters & tmpPara)
+{
+ LOG_FUNCTION_NAME
+
+ tmpPara = mCameraAdapter->getParameters();
+
+ LOG_FUNCTION_NAME_EXIT
+}
+
+//release resouse
+CameraHal::~CameraHal()
+{
+ LOG_FUNCTION_NAME
+ if (mSensorListener.get()) {
+ mSensorListener->disableSensor(SensorListener::SENSOR_ORIENTATION);
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+
+ if(mDisplayAdapter){
+ delete mDisplayAdapter;
+ mDisplayAdapter = NULL;
+ }
+ if(mEventNotifier){
+ delete mEventNotifier;
+ mEventNotifier = NULL;
+ }
+ if(mCameraAdapter){
+ delete mCameraAdapter;
+ mCameraAdapter = NULL;
+ }
+ if(mPreviewBuf){
+ delete mPreviewBuf;
+ mPreviewBuf = NULL;
+ }
+ if(mVideoBuf){
+ delete mVideoBuf;
+ mVideoBuf = NULL;
+ }
+ if(mRawBuf){
+ delete mRawBuf;
+ mRawBuf = NULL;
+ }
+ if(mJpegBuf){
+ delete mJpegBuf;
+ mJpegBuf = NULL;
+ }
+ if(mCamMemManager){
+ delete mCamMemManager;
+ mCamMemManager =NULL;
+ }
+ if(mCommandThread != NULL){
+ Message_cam msg;
+ Semaphore sem;
+ msg.command = CMD_EXIT;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ setCamStatus(CMD_EXIT_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ if(mCameraStatus&CMD_EXIT_DONE)
+ LOG1("exit command OK.");
+ mCommandThread->requestExitAndWait();
+ mCommandThread.clear();
+ }
+
+ LOGD("CameraHal destory success");
+ LOG_FUNCTION_NAME_EXIT
+}
+void CameraHal::release()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ LOG_FUNCTION_NAME_EXIT
+}
+
+
+int CameraHal::setPreviewWindow(struct preview_stream_ops *window)
+{
+ LOG_FUNCTION_NAME
+ Message_cam msg;
+ Semaphore sem;
+ Mutex::Autolock lock(mLock);
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_SET_PREVIEW_WINDOW;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ msg.arg2 = (void*)window;
+ setCamStatus(CMD_SET_PREVIEW_WINDOW_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ if(mCameraStatus&CMD_SET_PREVIEW_WINDOW_DONE)
+ LOG1("set preview window OK.");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+
+//async
+int CameraHal::startPreview()
+{
+ LOG_FUNCTION_NAME
+ Message_cam msg;
+
+#ifdef ANDROID_6_X
+ if ( mSensorListener == NULL ) {
+ // register for sensor events
+ mSensorListener = new SensorListener();
+ if (mSensorListener.get()) {
+ if (mSensorListener->initialize() == NO_ERROR) {
+ mSensorListener->setCallbacks(gsensor_orientation_cb, this);
+ mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
+ } else {
+ LOGE("Error initializing SensorListener. not fatal, continuing");
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+ }
+ }
+#endif
+
+ Mutex::Autolock lock(mLock);
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_PREVIEW_START;
+ msg.arg1 = NULL;
+ setCamStatus(CMD_PREVIEW_START_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ }
+ // mPreviewCmdReceived = true;
+ setCamStatus(STA_PREVIEW_CMD_RECEIVED, 1);
+ LOG_FUNCTION_NAME_EXIT
+ return NO_ERROR ;
+}
+
+//MUST SYNC
+void CameraHal::stopPreview()
+{
+ LOG_FUNCTION_NAME
+ Message_cam msg;
+ Semaphore sem;
+ Mutex::Autolock lock(mLock);
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_PREVIEW_STOP;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ setCamStatus(CMD_PREVIEW_STOP_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ if(mCameraStatus&CMD_PREVIEW_STOP_DONE)
+ LOGD("stop preview OK.");
+ }
+ //mPreviewCmdReceived = false;
+ setCamStatus(STA_PREVIEW_CMD_RECEIVED, 0);
+ LOG_FUNCTION_NAME_EXIT
+}
+
+void CameraHal::setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ mEventNotifier->setCallbacks(notify_cb, data_cb,data_cb_timestamp,get_memory,user,&mLock);
+ LOG_FUNCTION_NAME_EXIT
+}
+
+void CameraHal::enableMsgType(int32_t msgType)
+{
+
+ LOG_FUNCTION_NAME
+ if(msgType & (CAMERA_MSG_PREVIEW_FRAME)){
+ //mParameters is sync
+ int w,h;
+ const char * fmt= mParameters.getPreviewFormat();
+ mParameters.getPreviewSize(&w, &h);
+ mEventNotifier->setPreviewDataCbRes(w, h, fmt);
+ }
+ mEventNotifier->enableMsgType(msgType);
+ LOG_FUNCTION_NAME_EXIT
+}
+
+void CameraHal::disableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME
+ mEventNotifier->disableMsgType(msgType);
+ LOG_FUNCTION_NAME_EXIT
+}
+
+int CameraHal::msgTypeEnabled(int32_t msgType)
+{
+ LOG_FUNCTION_NAME
+ return mEventNotifier->msgEnabled(msgType);
+ LOG_FUNCTION_NAME_EXIT
+}
+
+int CameraHal::autoFocus()
+{
+ LOG_FUNCTION_NAME
+ Message_cam msg;
+ Semaphore sem;
+ Mutex::Autolock lock(mLock);
+
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_AF_START;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ setCamStatus(CMD_AF_START_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ if(mCameraStatus&CMD_AF_START_DONE)
+ LOG1("AF command OK.");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return NO_ERROR;
+}
+
+int CameraHal::cancelAutoFocus()
+{
+ LOG_FUNCTION_NAME
+ Message_cam msg;
+ Semaphore sem;
+ Mutex::Autolock lock(mLock);
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_AF_CANCEL;
+ sem.Create();
+ msg.arg1 = NULL;
+ commandThreadCommandQ.put(&msg);
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+ return NO_ERROR;
+}
+int CameraHal::previewEnabled()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ LOG_FUNCTION_NAME_EXIT
+ return (mCameraStatus&STA_PREVIEW_CMD_RECEIVED);
+}
+int CameraHal::storeMetaDataInBuffers(int enable)
+{
+ LOG1("%s: enable = %d",__FUNCTION__, enable);
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ return mEventNotifier->storeMetadataInBuffer((enable != 0));
+}
+int CameraHal::startRecording()
+{
+ int err=NO_ERROR,prevStatus = -1;
+ CameraParameters params;
+ int recordW,recordH;
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ //get preview status
+ //if(mPreviewCmdReceived)
+ if(mCameraStatus&STA_PREVIEW_CMD_RECEIVED)
+ // prevStatus = mCameraAdapter->getCurPreviewState(&recordW, &recordH);
+ prevStatus = mCameraAdapter->getCurVideoSize(&recordW, &recordH);
+ if(prevStatus == -1){
+ err = -1;
+ return err;
+ }
+ #if (CONFIG_CAMERA_SETVIDEOSIZE == 0)
+ //set picture size
+ mParameters.setPictureSize(recordW,recordH);
+ setParametersUnlock(mParameters);
+ updateParameters(mParameters);
+ #endif
+
+ //notify event
+ mEventNotifier->startRecording(recordW,recordH);
+ //mRecordRunning = true;
+ setCamStatus(STA_RECORD_RUNNING, 1);
+ LOG_FUNCTION_NAME_EXIT
+ return err;
+}
+
+void CameraHal::stopRecording()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ mEventNotifier->stopRecording();
+ //mRecordRunning = false;
+ setCamStatus(STA_RECORD_RUNNING, 0);
+ LOG_FUNCTION_NAME_EXIT
+}
+
+int CameraHal::recordingEnabled()
+{
+ LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME_EXIT
+ Mutex::Autolock lock(mLock);
+ //return mRecordRunning;
+ return (mCameraStatus&STA_RECORD_RUNNING);
+}
+void CameraHal::releaseRecordingFrame(const void *opaque)
+{
+ // LOG_FUNCTION_NAME
+ mEventNotifier->releaseRecordingFrame(opaque);
+ // LOG_FUNCTION_NAME_EXIT
+}
+
+int CameraHal::takePicture()
+{
+ LOG_FUNCTION_NAME
+ Message_cam msg;
+ Semaphore sem;
+ Mutex::Autolock lock(mLock);
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_CONTINUOS_PICTURE;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ setCamStatus(CMD_CONTINUOS_PICTURE_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ if(mCameraStatus&CMD_CONTINUOS_PICTURE_DONE)
+ LOGD("take picture command OK.");
+ }
+ //when back to preview status,cameraservice will call startpreview.
+ //mPreviewCmdReceived = false;
+ setCamStatus(STA_PREVIEW_CMD_RECEIVED, 0);
+ LOG_FUNCTION_NAME_EXIT
+ return NO_ERROR;
+}
+
+int CameraHal::cancelPicture()
+{
+ LOG_FUNCTION_NAME
+ Message_cam msg;
+ Semaphore sem;
+ Mutex::Autolock lock(mLock);
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_PREVIEW_CAPTURE_CANCEL;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ setCamStatus(CMD_PREVIEW_CAPTURE_CANCEL_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ if(mCameraStatus&CMD_PREVIEW_CAPTURE_CANCEL_DONE)
+ LOGD("cancel picture OK.");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return NO_ERROR;
+}
+
+int CameraHal::setParameters(const char* parameters)
+{
+ CameraParameters params;
+ String8 str_params(parameters);
+
+ params.unflatten(str_params);
+ return setParameters(params);
+}
+
+//sync
+int CameraHal::setParameters(const CameraParameters &params_set)
+{
+ int err = 0;
+ LOG_FUNCTION_NAME
+ //get preview status
+ //if(mPreviewCmdReceived){
+ if(mCameraStatus&STA_PREVIEW_CMD_RECEIVED){
+ //preview has been started,some parameter can't change
+
+ }
+
+ Message_cam msg;
+ Semaphore sem;
+ Mutex::Autolock lock(mLock);
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_SET_PARAMETERS;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ msg.arg2 = (void*)(&params_set);
+ setCamStatus(CMD_SET_PARAMETERS_PREPARE, 1);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ if(mCameraStatus&CMD_SET_PARAMETERS_DONE){
+ LOG1("set parameters OK.");
+ }else{
+ err = -1;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return err;
+}
+
+
+int CameraHal::setParametersUnlock(const CameraParameters &params_set)
+{
+ int err = 0;
+ LOG_FUNCTION_NAME
+ //get preview status
+ //if(mPreviewCmdReceived){
+ if(mCameraStatus&STA_PREVIEW_CMD_RECEIVED){
+ //preview has been started,some parameter can't change
+
+ }
+
+ Message_cam msg;
+ Semaphore sem;
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_SET_PARAMETERS;
+ sem.Create();
+ msg.arg1 = (void*)(&sem);
+ msg.arg2 = (void*)(&params_set);
+ commandThreadCommandQ.put(&msg);
+ if(msg.arg1){
+ sem.Wait();
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return err;
+}
+
+char* CameraHal::getParameters()
+{
+ String8 params_str8;
+ char* params_string;
+ const char * valstr = NULL;
+ CameraParameters mParams = mParameters;
+
+ params_str8 = mParams.flatten();
+
+ // camera service frees this string...
+ params_string = (char*) malloc(sizeof(char) * (params_str8.length()+1));
+ strcpy(params_string, params_str8.string());
+
+ ///Return the current set of parameters
+
+ return params_string;
+}
+
+void CameraHal::putParameters(char *parms)
+{
+ free(parms);
+}
+int CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ LOG_FUNCTION_NAME
+ int ret = 0,drv_w,drv_h;
+ Mutex::Autolock lock(mLock);
+
+ if(cmd == CAMERA_CMD_START_FACE_DETECTION){
+ const char* szFace = mParameters.get(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW);
+ if(!szFace || ( szFace && (strtol(szFace,0,0) == 0))){
+ ret = BAD_VALUE;
+ return ret;
+ }
+ LOGD("sendCommand start face detection");
+ enableMsgType(CAMERA_MSG_PREVIEW_METADATA);
+ Message_cam msg;
+ if ((mCommandThread != NULL)) {
+ msg.command = CMD_START_FACE_DETECTION;
+ msg.arg1 = (void*)(NULL);
+ msg.arg2 = (void*)szFace;
+ commandThreadCommandQ.put(&msg);
+ }
+ }else if(cmd == CAMERA_CMD_STOP_FACE_DETECTION){
+ const char* szFace = mParameters.get(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW);
+ if(!szFace || ( szFace && (strtol(szFace,0,0) == 0))){
+ ret = BAD_VALUE;
+ return ret;
+ }
+ disableMsgType(CAMERA_MSG_PREVIEW_METADATA);
+
+ mEventNotifier->stopFaceDection();
+ LOGD("sendCommand stop face detection ");
+ }
+ return ret;
+}
+
+
+int CameraHal::dump(int fd)
+{
+ int i;
+ char trace_level[PROPERTY_VALUE_MAX];
+ int level;
+
+ property_get(CAMERAHAL_TRACE_LEVEL_PROPERTY_KEY, trace_level, "0");
+
+ sscanf(trace_level,"%d",&level);
+
+ setTracerLevel(level);
+
+ commandThreadCommandQ.dump();
+ if(mCameraAdapter)
+ mCameraAdapter->dump(mCamId);
+ if(mDisplayAdapter)
+ mDisplayAdapter->dump();
+ if(mEventNotifier)
+ mEventNotifier->dump();
+
+
+ return 0;
+}
+int CameraHal::getCameraFd()
+{
+ Mutex::Autolock lock(mLock);
+
+ return mCameraAdapter->getCameraFd();
+}
+
+int CameraHal::getCameraId()
+{
+
+ return mCamId;
+}
+
+
+int CameraHal::selectPreferedDrvSize(int *width,int * height,bool is_capture)
+{
+ return mCameraAdapter->selectPreferedDrvSize(width,height,is_capture);
+}
+int CameraHal::fillPicturInfo(picture_info_s& picinfo)
+{
+ mParameters.getPictureSize(&picinfo.w, &picinfo.h);
+ picinfo.quality= mParameters.getInt("jpeg-quality");
+ picinfo.rotation = strtol(mParameters.get(CameraParameters::KEY_ROTATION),0,0);
+ picinfo.thumbquality= strtol(mParameters.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY),0,0);
+ picinfo.thumbwidth= strtol(mParameters.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH),0,0);
+ picinfo.thumbheight= strtol(mParameters.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT),0,0);
+
+ // gps longitude
+ const char *new_gps_longitude_str = mParameters.get(CameraParameters::KEY_GPS_LONGITUDE);
+ if (new_gps_longitude_str) {
+ picinfo.longtitude= strtod(new_gps_longitude_str,NULL);
+ } else {
+ picinfo.longtitude = -1;
+ }
+
+ // gps altitude
+ const char *new_gps_altitude_str = mParameters.get(CameraParameters::KEY_GPS_ALTITUDE);
+ if (new_gps_altitude_str) {
+ picinfo.altitude= strtod(new_gps_altitude_str,NULL);
+ } else {
+ picinfo.altitude = -1;
+ }
+
+ // gps latitude
+ const char *new_gps_latitude_str = mParameters.get(CameraParameters::KEY_GPS_LATITUDE);
+ if (new_gps_latitude_str) {
+ picinfo.latitude= strtod(new_gps_latitude_str,NULL);
+ } else {
+ picinfo.latitude = -1;
+ }
+
+ // gps timestamp
+ const char *new_gps_timestamp_str = mParameters.get(CameraParameters::KEY_GPS_TIMESTAMP);
+ if (new_gps_timestamp_str) {
+ picinfo.timestamp= strtol(new_gps_timestamp_str,0,0);
+ } else {
+ picinfo.timestamp = -1;
+ }
+
+ char* getMethod = (char*)mParameters.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);//getMethod : len <= 32
+ if(getMethod)
+ strcpy(picinfo.getMethod,getMethod);
+ //focus length
+ picinfo.focalen = strtol(mParameters.get(CameraParameters::KEY_FOCAL_LENGTH),0,0);
+ //flash
+ if (mParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES) && mParameters.get(CameraParameters::KEY_FLASH_MODE)) {
+ if (!strcmp(CameraParameters::FLASH_MODE_OFF, mParameters.get(CameraParameters::KEY_FLASH_MODE))) {
+ picinfo.flash = 0;
+ } else {
+ picinfo.flash = mCameraAdapter->getFlashStatus();
+ }
+ } else {
+ picinfo.flash = 0;
+ }
+ //white balance
+ if (mParameters.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE) && mParameters.get(CameraParameters::KEY_WHITE_BALANCE)) {
+ if (!strcmp(CameraParameters::WHITE_BALANCE_AUTO, mParameters.get(CameraParameters::KEY_WHITE_BALANCE))) {
+ picinfo.whiteBalance = 0;
+ } else {
+ picinfo.whiteBalance = 1;
+ }
+ } else {
+ picinfo.whiteBalance = 0;
+ }
+ picinfo.fmt = V4L2_PIX_FMT_NV12;
+
+ return 0;
+}
+void CameraHal::commandThread()
+{
+ Message_cam msg;
+ bool shouldLive = true;
+ bool has_message;
+ int err = 0,tmp_arg1;
+ struct v4l2_control control;
+ struct v4l2_queryctrl hdr;
+ picture_info_s picinfo;
+ memset(&picinfo,0x0,sizeof(picture_info_s));
+ int prevStatus = -1,drv_w,drv_h,picture_w,picture_h;
+ int app_previw_w = 0,app_preview_h = 0;
+ bool isRestartPreview = false;
+ char prop_value[PROPERTY_VALUE_MAX];
+ LOG_FUNCTION_NAME
+
+ while(shouldLive) {
+get_command:
+ //mCommandRunning = -1;
+ memset(&msg,0,sizeof(msg));
+ commandThreadCommandQ.get(&msg);
+
+ switch(msg.command)
+ {
+ case CMD_PREVIEW_START:
+ {
+ LOGD("%s(%d):receive CMD_PREVIEW_START",__FUNCTION__,__LINE__);
+
+ RESTART_PREVIEW_INTERNAL:
+
+ //2. current preview status ?
+ mParameters.getPreviewSize(&app_previw_w,&app_preview_h);
+ prevStatus = mCameraAdapter->getCurPreviewState(&drv_w,&drv_h);
+ int prefered_w = app_previw_w, prefered_h = app_preview_h;
+ selectPreferedDrvSize(&prefered_w,&prefered_h,false);
+
+ if(prevStatus){
+ //get preview size
+ property_get("sys.cts_camera.status",prop_value, "false");
+ if(strcmp(prop_value,"true") && ((prefered_w != drv_w) || (prefered_h != drv_h))){
+ if (mDisplayAdapter->getDisplayStatus() == DisplayAdapter::STA_DISPLAY_RUNNING) {
+ err=mDisplayAdapter->pauseDisplay();
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_PAUSE, 1);
+ }
+
+ //stop eventnotify
+ mEventNotifier->stopReceiveFrame();
+ //need to stop preview.
+ err=mCameraAdapter->stopPreview();
+ if(err != 0)
+ goto PREVIEW_START_OUT;
+ //set new drv size;
+ drv_w = prefered_w;
+ drv_h = prefered_h;
+ err=mCameraAdapter->startPreview(app_previw_w,app_preview_h,drv_w, drv_h, 0, false);
+ if(mEventNotifier->msgEnabled(CAMERA_MSG_PREVIEW_FRAME))
+ mEventNotifier->startReceiveFrame();
+ if(mEventNotifier->msgEnabled(CAMERA_MSG_PREVIEW_METADATA))
+ mEventNotifier->startFaceDection(drv_w,drv_h, -1);
+ if(err != 0)
+ goto PREVIEW_START_OUT;
+ }
+
+ }else{
+
+ if (mDisplayAdapter->getDisplayStatus() == DisplayAdapter::STA_DISPLAY_RUNNING) {
+ err=mDisplayAdapter->pauseDisplay();
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_PAUSE, 1);
+ }
+
+ drv_w = prefered_w;
+ drv_h = prefered_h;
+ //stop eventnotify
+ mEventNotifier->stopReceiveFrame();
+ //selet a proper preview size.
+ err=mCameraAdapter->startPreview(app_previw_w,app_preview_h,drv_w, drv_h, 0, false);
+ if(err != 0)
+ goto PREVIEW_START_OUT;
+ if(mEventNotifier->msgEnabled(CAMERA_MSG_PREVIEW_FRAME))
+ mEventNotifier->startReceiveFrame();
+ }
+
+ if(mDisplayAdapter->getPreviewWindow())
+ {
+ if (mDisplayAdapter->getDisplayStatus() != DisplayAdapter::STA_DISPLAY_RUNNING) {
+ err = mDisplayAdapter->startDisplay(app_previw_w, app_preview_h);
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_RUNNING, 1);
+ else
+ goto PREVIEW_START_OUT;
+ }
+ }
+
+ setCamStatus(CMD_PREVIEW_START_DONE, 1);
+ PREVIEW_START_OUT:
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ LOGD("%s(%d): CMD_PREVIEW_START out",__FUNCTION__,__LINE__);
+ break;
+ }
+
+ case CMD_PREVIEW_STOP:
+ {
+ LOGD("%s(%d):receive CMD_PREVIEW_STOP",__FUNCTION__,__LINE__);
+ //pause display
+ if(mDisplayAdapter->getDisplayStatus() == DisplayAdapter::STA_DISPLAY_RUNNING){
+ err =mDisplayAdapter->pauseDisplay();
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_PAUSE, 1);
+ else
+ goto CMD_PREVIEW_STOP_OUT;
+ }
+ //stop eventnotify
+ mEventNotifier->stopReceiveFrame();
+ //stop preview
+ err=mCameraAdapter->stopPreview();
+ if(err != 0)
+ goto CMD_PREVIEW_STOP_OUT;
+ //wake up wait thread.
+
+ setCamStatus(CMD_PREVIEW_STOP_DONE, 1);
+ CMD_PREVIEW_STOP_OUT:
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ LOGD("%s(%d): CMD_PREVIEW_STOP out",__FUNCTION__,__LINE__);
+ break;
+ }
+ case CMD_SET_PREVIEW_WINDOW:
+ {
+ LOGD("%s(%d):receive CMD_SET_PREVIEW_WINDOW",__FUNCTION__,__LINE__);
+ mParameters.getPreviewSize(&app_previw_w,&app_preview_h);
+ mDisplayAdapter->setPreviewWindow((struct preview_stream_ops *)msg.arg2);
+ prevStatus = mCameraAdapter->getCurPreviewState(&drv_w,&drv_h);
+
+ if ((mDisplayAdapter->getPreviewWindow()) && prevStatus) {
+ err=mDisplayAdapter->startDisplay(app_previw_w, app_preview_h);
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_RUNNING, 1);
+ } else {
+ LOG1("%s(%d): not start display now",__FUNCTION__,__LINE__);
+ }
+
+ setCamStatus(CMD_SET_PREVIEW_WINDOW_DONE, 1);
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ LOGD("%s(%d): CMD_SET_PREVIEW_WINDOW out",__FUNCTION__,__LINE__);
+ break;
+ }
+ case CMD_SET_PARAMETERS:
+ {
+ LOG1("%s(%d): receive CMD_SET_PARAMETERS", __FUNCTION__,__LINE__);
+ //set parameters
+ CameraParameters* para = (CameraParameters*)msg.arg2;
+ if(mCameraAdapter->setParameters(*para,isRestartPreview) == 0){
+ //update parameters
+ updateParameters(mParameters);
+ setCamStatus(CMD_SET_PARAMETERS_DONE, 1);
+ }else{
+ //update parameters
+ updateParameters(mParameters);
+ }
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+
+ LOG1("%s(%d): CMD_SET_PARAMETERS out",__FUNCTION__,__LINE__);
+ if((isRestartPreview) && (mCameraStatus & STA_PREVIEW_CMD_RECEIVED)) {
+ LOGD("%s:setparameter demand restart preview!",__FUNCTION__);
+ msg.arg1 = NULL;
+ goto RESTART_PREVIEW_INTERNAL;
+ }
+ break;
+
+ }
+ case CMD_PREVIEW_CAPTURE_CANCEL:
+ {
+ LOGD("%s(%d): receive CMD_PREVIEW_CAPTURE_CANCEL", __FUNCTION__,__LINE__);
+ mEventNotifier->flushPicture();
+ setCamStatus(CMD_PREVIEW_CAPTURE_CANCEL_DONE, 1);
+ //reset pic num to 1
+ mParameters.set(KEY_CONTINUOUS_PIC_NUM,"1");
+ mCameraAdapter->setParameters(mParameters,isRestartPreview);
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ LOGD("%s(%d): CMD_PREVIEW_CAPTURE_CANCEL out",__FUNCTION__,__LINE__);
+ break;
+ }
+ case CMD_CONTINUOS_PICTURE:
+ {
+ LOGD("%s(%d): receive CMD_CONTINUOS_PICTURE", __FUNCTION__,__LINE__);
+ int pic_num = strtod(mParameters.get(KEY_CONTINUOUS_PIC_NUM),NULL);
+ mParameters.getPictureSize(&picture_w, &picture_h);
+ //need to pause display(is recording? is continuous picture ?)
+ //if(!mRecordRunning && (pic_num == 1)){
+ if(!(mCameraStatus&STA_RECORD_RUNNING) && (pic_num == 1)){
+ err=mDisplayAdapter->pauseDisplay();
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_PAUSE, 1);
+ }
+ //need to resize preview size ?
+ int prefered_w = picture_w, prefered_h = picture_h;
+ selectPreferedDrvSize(&prefered_w,&prefered_h,true);
+ prevStatus = mCameraAdapter->getCurPreviewState(&drv_w,&drv_h);
+ if(prevStatus){
+ //get preview size
+ //if(mRecordRunning){
+ property_get("sys.cts_camera.status",prop_value, "false");
+ if(mCameraStatus&STA_RECORD_RUNNING){
+ LOGE("%s(%d):not support set picture size when recording.",__FUNCTION__,__LINE__);
+ } else if(strcmp(prop_value,"true") && ((prefered_w != drv_w) || (prefered_h != drv_h))){
+ //need to stop preview.
+ err=mDisplayAdapter->pauseDisplay();
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_PAUSE, 1);
+ //stop eventnotify
+ mEventNotifier->stopReceiveFrame();
+
+ err =mCameraAdapter->stopPreview();
+ if(err != 0)
+ goto CMD_CONTINUOS_PICTURE_OUT;
+
+ //set new drv size;
+ drv_w = prefered_w;
+ drv_h = prefered_h;
+ err=mCameraAdapter->startPreview(picture_w,picture_h,drv_w, drv_h, 0, true);
+ if(err != 0)
+ goto CMD_CONTINUOS_PICTURE_OUT;
+
+ if(pic_num > 1)
+ {
+ mParameters.getPreviewSize(&app_previw_w,&app_preview_h);
+ err=mDisplayAdapter->startDisplay(app_previw_w,app_preview_h);
+ if(err != -1)
+ setCamStatus(STA_DISPLAY_RUNNING, 1);
+ else
+ goto CMD_CONTINUOS_PICTURE_OUT;
+ }
+ }
+ else if((prefered_w == drv_w) && (prefered_h == drv_h)){
+ /*for soc caemra flash control when preview size == picture size*/
+ err = mCameraAdapter->flashcontrol();
+ }
+
+ }else{
+ //selet a proper preview size.
+ LOGD("%s(%d):WARNING,take pic before start preview!",__FUNCTION__,__LINE__);
+ err=mCameraAdapter->startPreview(picture_w,picture_h,drv_w, drv_h, 0, true);
+ if(err != 0)
+ goto CMD_CONTINUOS_PICTURE_OUT;
+ }
+ //take picture
+ picinfo.num = pic_num;
+ fillPicturInfo(picinfo);
+ mEventNotifier->takePicture(picinfo);
+
+ setCamStatus(CMD_CONTINUOS_PICTURE_DONE, 1);
+ CMD_CONTINUOS_PICTURE_OUT:
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ LOGD("%s(%d): CMD_CONTINUOS_PICTURE out",__FUNCTION__,__LINE__);
+ break;
+ }
+ case CMD_AF_START:
+ {
+ LOGD("%s(%d): receive CMD_AF_START", __FUNCTION__,__LINE__);
+ mCameraAdapter->autoFocus();
+
+ setCamStatus(CMD_AF_START_DONE, 1);
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ LOGD("%s(%d): exit receive CMD_AF_START", __FUNCTION__,__LINE__);
+ break;
+ }
+ case CMD_AF_CANCEL:
+ {
+ LOGD("%s(%d): receive CMD_AF_CANCEL", __FUNCTION__,__LINE__);
+ mCameraAdapter->cancelAutoFocus();
+ setCamStatus(CMD_AF_CANCEL_DONE, 1);
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ break;
+ }
+ case CMD_START_FACE_DETECTION:
+ LOGD("%s(%d): receive CMD_AF_CANCEL", __FUNCTION__,__LINE__);
+ if(mCameraAdapter->getCurPreviewState(&drv_w,&drv_h) > 0)
+ mEventNotifier->startFaceDection(drv_w,drv_h,atoi((const char*)msg.arg2));
+ break;
+ case CMD_EXIT:
+ {
+ LOGD("%s(%d): receive CMD_EXIT", __FUNCTION__,__LINE__);
+ shouldLive = false;
+
+ setCamStatus(CMD_EXIT_DONE, 1);
+ if(msg.arg1)
+ ((Semaphore*)(msg.arg1))->Signal();
+ break;
+ }
+ default:
+ LOGE("%s(%d) receive unknow command(0x%x)\n", __FUNCTION__,__LINE__,msg.command);
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+ return;
+}
+
+int CameraHal::checkCamStatus(int cmd)
+{
+ int err = 0;
+ switch(cmd)
+ {
+ case CMD_PREVIEW_START:
+ {
+ break;
+ }
+ case CMD_PREVIEW_STOP:
+ {
+ if((mCameraStatus&STA_DISPLAY_STOP))
+ err = -1;
+ break;
+ }
+ case CMD_SET_PREVIEW_WINDOW:
+ {
+ if((mCameraStatus&STA_DISPLAY_RUNNING) || (mCameraStatus&STA_RECORD_RUNNING))
+ err = -1;
+ break;
+ }
+ case CMD_SET_PARAMETERS:
+ {
+ break;
+ }
+ case CMD_PREVIEW_CAPTURE_CANCEL:
+ {
+ break;
+ }
+ case CMD_CONTINUOS_PICTURE:
+ {
+ if((mCameraStatus&STA_DISPLAY_RUNNING)==0x0)
+ err = -1;
+ break;
+ }
+ case CMD_AF_START:
+ {
+ break;
+ }
+ case CMD_AF_CANCEL:
+ {
+ break;
+ }
+ }
+
+
+ return err;
+}
+//type:1-set, 0-clear
+void CameraHal::setCamStatus(int status, int type)
+{
+ if(type)
+ {
+ if(status&CMD_PREVIEW_START_PREPARE)
+ {
+ mCameraStatus &= ~CMD_PREVIEW_START_MASK;
+ mCameraStatus |= CMD_PREVIEW_START_PREPARE;
+ }
+ if(status&CMD_PREVIEW_START_DONE)
+ {
+ mCameraStatus &= ~CMD_PREVIEW_START_MASK;
+ mCameraStatus |= CMD_PREVIEW_START_DONE;
+ }
+ if(status&CMD_PREVIEW_STOP_PREPARE)
+ {
+ mCameraStatus &= ~CMD_PREVIEW_STOP_MASK;
+ mCameraStatus |= CMD_PREVIEW_STOP_PREPARE;
+ }
+ if(status&CMD_PREVIEW_STOP_DONE)
+ {
+ mCameraStatus &= ~CMD_PREVIEW_STOP_MASK;
+ mCameraStatus |= CMD_PREVIEW_STOP_DONE;
+ }
+ if(status&CMD_SET_PREVIEW_WINDOW_PREPARE)
+ {
+ mCameraStatus &= ~CMD_SET_PREVIEW_WINDOW_MASK;
+ mCameraStatus |= CMD_SET_PREVIEW_WINDOW_PREPARE;
+ }
+ if(status&CMD_SET_PREVIEW_WINDOW_DONE)
+ {
+ mCameraStatus &= ~CMD_SET_PREVIEW_WINDOW_MASK;
+ mCameraStatus |= CMD_SET_PREVIEW_WINDOW_DONE;
+ }
+
+ if(status&CMD_SET_PARAMETERS_PREPARE)
+ {
+ mCameraStatus &= ~CMD_SET_PARAMETERS_MASK;
+ mCameraStatus |= CMD_SET_PARAMETERS_PREPARE;
+ }
+ if(status&CMD_SET_PARAMETERS_DONE)
+ {
+ mCameraStatus &= ~CMD_SET_PARAMETERS_MASK;
+ mCameraStatus |= CMD_SET_PARAMETERS_DONE;
+ }
+
+ if(status&CMD_PREVIEW_CAPTURE_CANCEL_PREPARE)
+ {
+ mCameraStatus &= ~CMD_PREVIEW_CAPTURE_CANCEL_MASK;
+ mCameraStatus |= CMD_PREVIEW_CAPTURE_CANCEL_PREPARE;
+ }
+ if(status&CMD_PREVIEW_CAPTURE_CANCEL_DONE)
+ {
+ mCameraStatus &= ~CMD_PREVIEW_CAPTURE_CANCEL_MASK;
+ mCameraStatus |= CMD_PREVIEW_CAPTURE_CANCEL_DONE;
+ }
+
+ if(status&CMD_CONTINUOS_PICTURE_PREPARE)
+ {
+ mCameraStatus &= ~CMD_CONTINUOS_PICTURE_MASK;
+ mCameraStatus |= CMD_CONTINUOS_PICTURE_PREPARE;
+ }
+ if(status&CMD_CONTINUOS_PICTURE_DONE)
+ {
+ mCameraStatus &= ~CMD_CONTINUOS_PICTURE_MASK;
+ mCameraStatus |= CMD_CONTINUOS_PICTURE_DONE;
+ }
+
+ if(status&CMD_AF_START_PREPARE)
+ {
+ mCameraStatus &= ~CMD_AF_START_MASK;
+ mCameraStatus |= CMD_AF_START_PREPARE;
+ }
+ if(status&CMD_AF_START_DONE)
+ {
+ mCameraStatus &= ~CMD_AF_START_MASK;
+ mCameraStatus |= CMD_AF_START_DONE;
+ }
+
+ if(status&CMD_AF_CANCEL_PREPARE)
+ {
+ mCameraStatus &= ~CMD_AF_CANCEL_MASK;
+ mCameraStatus |= CMD_AF_CANCEL_PREPARE;
+ }
+ if(status&CMD_AF_CANCEL_DONE)
+ {
+ mCameraStatus &= ~CMD_AF_CANCEL_MASK;
+ mCameraStatus |= CMD_AF_CANCEL_DONE;
+ }
+
+ if(status&CMD_EXIT_PREPARE)
+ {
+ mCameraStatus &= ~CMD_EXIT_MASK;
+ mCameraStatus |= CMD_EXIT_PREPARE;
+ }
+ if(status&CMD_EXIT_DONE)
+ {
+ mCameraStatus &= ~CMD_EXIT_MASK;
+ mCameraStatus |= CMD_EXIT_DONE;
+ }
+
+ if(status&STA_RECORD_RUNNING)
+ {
+ mCameraStatus |= STA_RECORD_RUNNING;
+ }
+
+ if(status&STA_PREVIEW_CMD_RECEIVED)
+ {
+ mCameraStatus |= STA_PREVIEW_CMD_RECEIVED;
+ }
+
+ if(status&STA_DISPLAY_RUNNING)
+ {
+ mCameraStatus &= ~STA_DISPLAY_MASK;
+ mCameraStatus |= STA_DISPLAY_RUNNING;
+ }
+ if(status&STA_DISPLAY_PAUSE)
+ {
+ mCameraStatus &= ~STA_DISPLAY_MASK;
+ mCameraStatus |= STA_DISPLAY_PAUSE;
+ }
+ if(status&STA_DISPLAY_STOP)
+ {
+ mCameraStatus &= ~STA_DISPLAY_MASK;
+ mCameraStatus |= STA_DISPLAY_STOP;
+ }
+ }
+ else
+ {
+ if(status&STA_RECORD_RUNNING)
+ {
+ mCameraStatus &= ~STA_RECORD_RUNNING;
+ }
+
+ if(status&STA_PREVIEW_CMD_RECEIVED)
+ {
+ mCameraStatus &= ~STA_PREVIEW_CMD_RECEIVED;
+ }
+ }
+}
+}; // namespace android
+
diff --git a/CameraHal/CameraHal.h b/CameraHal/CameraHal.h
new file mode 100755
index 0000000..3516216
--- /dev/null
+++ b/CameraHal/CameraHal.h
@@ -0,0 +1,1984 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_H
+#define ANDROID_HARDWARE_CAMERA_HARDWARE_H
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <time.h>
+#include <fcntl.h>
+#include <dlfcn.h>
+#include <math.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <cutils/atomic.h>
+#include <linux/version.h>
+#include <linux/videodev2.h>
+#if defined(ANDROID_5_X)
+#include <linux/v4l2-controls.h>
+#endif
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/GraphicBufferMapper.h>
+#include <ui/GraphicBuffer.h>
+#include <system/window.h>
+#include <hardware/hardware.h>
+#include <hardware/camera.h>
+#include <camera/CameraParameters.h>
+#include "Semaphore.h"
+#if defined(TARGET_RK29)
+#include <linux/android_pmem.h>
+#endif
+#include "MessageQueue.h"
+#include "../jpeghw/release/encode_release/hw_jpegenc.h"
+#include "../jpeghw/release/encode_release/rk29-ipp.h"
+#include "../librkvpu/vpu_global.h"
+
+#include "CameraHal_Module.h"
+#include "common_type.h"
+
+#include "vpu_global.h"
+#include "vpu_mem_pool.h"
+#include "SensorListener.h"
+#include "FaceDetector.h"
+
+
+/*
+*NOTE:
+* CONFIG_CAMERA_INVALIDATE_RGA is debug macro,
+* CONFIG_CAMERA_INVALIDATE_RGA must equal to 0 in official version.
+*/
+#define CONFIG_CAMERA_INVALIDATE_RGA 0
+
+
+#if defined(TARGET_RK30) && (defined(TARGET_BOARD_PLATFORM_RK30XX) || (defined(TARGET_BOARD_PLATFORM_RK2928)))
+#include "../libgralloc/gralloc_priv.h"
+#if (CONFIG_CAMERA_INVALIDATE_RGA==0)
+#include <hardware/rga.h>
+#endif
+#elif defined(TARGET_RK30) && defined(TARGET_BOARD_PLATFORM_RK30XXB)
+#include <hardware/hal_public.h>
+#include <hardware/rga.h>
+#elif defined(TARGET_RK3368) || defined(TARGET_RK3328) || defined(TARGET_RK312x) || defined(TARGET_RK3288)
+#include <hardware/img_gralloc_public.h>
+#include <hardware/rga.h>
+#elif defined(TARGET_RK29)
+#include "../libgralloc/gralloc_priv.h"
+#endif
+
+
+#include "CameraHal_Mem.h"
+#include "CameraHal_Tracer.h"
+
+extern "C" int getCallingPid();
+extern "C" void callStack();
+extern "C" int cameraPixFmt2HalPixFmt(const char *fmt);
+extern "C" void arm_nv12torgb565(int width, int height, char *src, short int *dst,int dstbuf_w);
+extern "C" int rga_nv12torgb565(int src_width, int src_height, char *src, short int *dst,
+ int dstbuf_width,int dst_width,int dst_height);
+extern "C" int rk_camera_yuv_scale_crop_ipp(int v4l2_fmt_src, int v4l2_fmt_dst,
+ long srcbuf, long dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool rotation_180);
+extern "C" int YData_Mirror_Line(int v4l2_fmt_src, int *psrc, int *pdst, int w);
+extern "C" int UVData_Mirror_Line(int v4l2_fmt_src, int *psrc, int *pdst, int w);
+extern "C" int YuvData_Mirror_Flip(int v4l2_fmt_src, char *pdata, char *pline_tmp, int w, int h);
+extern "C" int YUV420_rotate(const unsigned char* srcy, int src_stride, unsigned char* srcuv,
+ unsigned char* dsty, int dst_stride, unsigned char* dstuv,
+ int width, int height,int rotate_angle);
+extern "C" int arm_camera_yuv420_scale_arm(int v4l2_fmt_src, int v4l2_fmt_dst,
+ char *srcbuf, char *dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool mirror,int zoom_value);
+extern "C" char* getCallingProcess();
+
+extern "C" void arm_yuyv_to_nv12(int src_w, int src_h,char *srcbuf, char *dstbuf);
+
+extern "C" int cameraFormatConvert(int v4l2_fmt_src, int v4l2_fmt_dst, const char *android_fmt_dst,
+ char *srcbuf, char *dstbuf,long srcphy,long dstphy,int src_size,
+ int src_w, int src_h, int srcbuf_w,
+ int dst_w, int dst_h, int dstbuf_w,
+ bool mirror);
+
+extern "C" int rga_nv12_scale_crop(
+ int src_width, int src_height, char *src, short int *dst,
+ int dst_width,int dst_height,int zoom_val,
+ bool mirror,bool isNeedCrop,bool isDstNV21
+#if defined(RK_DRM_GRALLOC)
+ ,int dst_stride = 0
+ ,bool vir_addr = false
+#endif
+ );
+extern "C" int rk_camera_zoom_ipp(int v4l2_fmt_src, int srcbuf, int src_w, int src_h,int dstbuf,int zoom_value);
+extern "C" void generateJPEG(uint8_t* data,int w, int h,unsigned char* outbuf,int* outSize);
+extern "C" int util_get_gralloc_buf_fd(buffer_handle_t handle,int* fd);
+
+extern rk_cam_info_t gCamInfos[CAMERAS_SUPPORT_MAX];
+extern bool g_ctsV_flag;
+
+namespace android {
+
+/*
+* CAMERA HAL VERSION NOTE
+*
+*v0.2.0x00: add continues focus/auto focus, log level;
+*v0.3.0x00: add usb camera support;
+*v0.4.0x00: add flash;
+*v0.5.0x00: check whether focus and flash is available
+*v0.6.0x00: sync camerahal
+*v0.7.0x00:
+ (1)add lock to protect mFrameInfoArray ,fix mFrameInfoArray corrupt
+ (2)remove compile warnings
+ (3)sync mid v0.6.01
+*v0.8.0x00:
+ support fake camera
+*v0.9.0x00
+ do nothing if mFrameInfoArray has been cleared when return frame.
+*v0.0x0a.0x00:
+* 1) support no cam_board.xml;
+*v0.0x0b.0x00:
+* 1) support setVideoSize function;
+* 2) add cameraConfig function;
+* 3) sync mid v0.0x0a.01
+*v0.0x0c.0x00
+ 1) support mi output yuv420 when sensor output yuv422
+*v0.0x0d.0x00:
+* 1) add continues video focus, but is fixed focus for video recording;
+* 2) stop continus af before af oneShot, marvin isp af afOnshot and afProcessFrame may be conflict;
+*v0.0x0e.0x00:
+* 1) support digital zoom
+*v0.f.0:
+* this version sync below version:
+* v0.d.1:
+* 1) add continues video focus, but is fixed focus for video recording;
+* v0.d.2:
+* 1) invalidate <= 1080p picture size, because the flash feature is not yet perfect;
+* v0.d.3:
+* 1) stop focus and set max focus in disconnect camera for ov8825 vcm noise;
+* 2) set max focus in continues video focus;
+*v0.10.0:
+ 1) optimize flash unit
+ 2) fix zoom erro in cameraConfig function
+ v0.0x11.0
+ support ov8858 ov13850 sensor dirver for preview
+*v0.0x12.0
+ 1) add flash trig pol control
+*v0.0x13.0:
+ merge source code frome mid,include following version:
+ v0.d.0x04:
+ 1) support mirror frame which sended by mDataCb and from front camera,
+ config by CONFIG_CAMERA_FRONT_MIRROR_MDATACB;
+ v0.d.0x05:
+ 1) add wechat in CONFIG_CAMERA_FRONT_MIRROR_MDATACB_APK
+ v0.d.0x06:
+ 1) add support picture size to increase the speed of taking capture in DV
+ v0.d.0x07:
+ 1) support fake camera
+ v0.d.0x08:
+ 1) fill the flash info into exif
+*v0.0x14.0:
+ 1) zoom feature can config in board xml
+*v0.0x15.0:
+ 1) support flash auto mode
+ 2) uvc support iommu
+ 3) judge whether need to enable flash in preview status.
+ 4) flash pol must be initialized in setparameter,fix it
+*v0.0x16.0:
+ 1)add interface of getting awb param from isp.
+ 2)use makernote tag as customer infomation.
+*v0.0x17.0:
+ 1) modify the timing of getting awb param from isp, move it to CameraIspAdapter::bufferCb.
+*v0.0x18.0:
+* 1) match for Libisp v0.0x15.0(af speed up and add vcm current setting);
+*v0.0x19.0:
+ 1) substitute rga for arm to do nv12 scale and crop to improve performance
+ 2) support continuos shot feature
+ 3) digital zoom by rga,prevent from decresing framerate,so front cam can support digital zoom well now.
+*v0.0x20.0:
+ 1) add version info of getting from sensor XML file to makernote.
+*v0.0x21.0:
+ 1) strcmp func fatal erro in funtion CameraIspAdapter::isNeedToEnableFlash if no KEY_SUPPORTED_FLASH_MODES supported,fix it
+*v0.0x22.0:
+* 1) autofocus and afListenThread and afProcessFrame(in Libisp) may be deadlock;
+* 2) startAfContinous and stopAf may be frequently, af cmd queue is full. Main thread hold;
+* 3) fix digital zoom by isp, must by rga;
+* 4) encProcessThread / bufferCb / setParameters(changeVideoSize->StopPreview)maybe deadlock;
+* 5) digital zoom by rga, needn't reconfig size;
+*v0.0x23.0:
+ 1) needn't disable CAMERA_MSG_SHUTTER during continuos pic taken.
+*v0.0x24.0:
+ 1) fix compile warnning due to last commit
+*v0.0x25.0:
+ 1) support isp tunning
+*v0.0x26.0:
+ 1) mIsSendToTunningTh dosen't initialize in last commit,fix it
+*v0.0x27.0
+ 1) merge source code frome mid,include following version:
+ v0.0x22.2:
+ 1) add support 500w ;
+ 2) autofocus and take picture failed , because fix startAfOneShot failed;
+*v0.0x28.0
+* 1) flash mode can config in board xml.
+* 2) support MWB/ME.
+* 3) add func getFlashStatus, fix wrong flash status in jpeg exif info.
+*v0.0x29.0
+* 1) modify isp tunning func
+* 2) zoom is not available when pic taken if pic size less than 5M,fix it
+*v0.0x2a.0
+* 1) fix CTS failed items as follows :
+ testPreviewCallback,testPreviewFormats,testPreviewFpsRange, testPreviewPictureSizesCombination
+*v0.0x2b.0
+* 1) fix CTS failed items as follows :
+ testInvalidParameters,testParameters,testSceneMode,testImmediateZoom
+*v0.0x2c.0
+* 1) fix CTS failed items as follows :
+ testFaceDetection,testFocusAreas
+*v0.0x2d.0
+* 1) fix CTS failed items as follows :
+ testJpegExif,testVideoSnapshot
+*v0.0x2e.0
+* 1) when preview stopped ,preview cb should be stopped ,or may cause CTS faile
+*v0.0x2f.0
+* 1) add sensor drv version
+*v0.0x30.0
+* 1) check illumination is support or not by chkAwbIllumination for MWB;
+*v0.0x31.0
+* 1) invalidate ME for soc sensor;
+*v0.0x32.0
+* 1) lock aec when take picture;
+* 2) add check sharpness for low illumnation;
+*v0.0x33.0
+* 1) support iommu;
+ 2) merge source code frome mid,include following version:
+ *v0.0x2d.1
+ * 1) fix rk312x compile warning
+ *v0.0x2d.2
+ * 1) move camera config file from device to hardware
+ *v0.0x2d.3
+ * 1) fix last commit bug
+ testJpegExif,testVideoSnapshot
+ *v0.0x2e.3
+ * 1) when preview stopped ,preview cb should be stopped ,or may cause CTS faile
+*v0.0x34.0
+* 1)add awb stable
+* 2)add fov parameters
+* 3)modiy to fit more than 2 camera sensor board info
+*v0.0x35.0
+* 1) file is opened in func ispTuneStoreBuffer but not been closed,fix it;
+*v0.0x36.0
+* 1) modify fov format from int to float
+*v0.0x37.0
+* 1) set mwb when capture picture with changing resolution.
+*v0.0x38.0
+* 1) merge source code frome mid,include following version:
+* *v0.0x36.1
+ * 1) modify to pass cts verifier FOV
+ *v0.0x36.2
+ * 1) modify to pass cts verifier orientation
+ * 2) use arm scale when display,because VOIP need NV21
+ *v0.0x36.3
+ * 1) support new jpeg vpumalloc,size is set by cameraHal
+* 2) fix initDefaultParameters for previewsize/picturesize;
+*v0.0x39.0:
+* 1) setMe is invalidate when soc sensor;
+*v0.0x3a.0:
+* 1) enum sensor resolution and check for DV media_profiles.xml in CheckSensorSupportDV;
+*v0.0x3b.0:
+* 1) include box commit which usb adapter modify for initDefaultParameters
+*v1.0.0:
+* 1) fix media buffer may be lost when restart preview, because is only pause and start display;
+*v1.1.0:
+* 1) add face detection feature
+*v1.2.0:
+* 1) merge source code frome mid,include following version:
+ *v0.0x3b.1:
+ 1) support rk312x preview and picture taken .
+ *v0.0x3b.2:
+ 1) pre_scaling_mode must be set when using rga to do yuv tranform
+ *v1.0.1:
+ 1) 312x support iommu
+ 2) xml file is produced auto
+*v1.3.0:
+* 1) isp output uv may be isn't after y data closely for buffer address align, so must copy uv data;
+* 2) Don't panic when cam_board.xml and parser version check failed !
+*v1.4.0:
+* 1) isptuning add gammadisable
+*v1.5.0:
+* 1) add support TAE and TAF;
+*v1.6.0:
+* 1)uvc support 16 bit unaligned resolution.
+* 2)invalide auto,infinity,macro focus function for uvc.
+*v1.7.0:
+* 1) deadlock may happen between processFaceDetect with autofocu or takepic, fix it;
+ related commit refer to cameraservice.
+*v1.8.0:
+* 1) facedection cause framrate of preview decreased, fix it.
+*v1.9.0:
+* 1) register sensor device CAMSYS_REGISTER_DEVIO must check return value, may be this device id has been registered;
+*v1.a.0:
+* 1) facedection support bias face detect.
+*v1.0xb.0:
+* 1) Continue focus measure window from face region when face has been dected;
+*v1.0xc.0:
+* 1) register sensor device must transfer sensor name to driver, driver check this device is registed or not;
+*v1.0xd.0:
+* 1) optimize camera's memory, dynamic calculate the max buffer size that sensor need.
+* 2) support interpolation resolution and can config in cam_board.xml.
+*v1.0xe.0:
+* 1) AE and AF measure window return center after TAE and TAF;
+*
+*v1.0xf.0:
+* 1) support smile face detection.
+* 2) fix deadlock happend between takepic and preview data callback.
+ related commit refer to cameraservice.
+ 3) CTS all passed.
+*v1.0x10.0:
+ 1) selectPreferedDrvSize of isp get wrong size in last commit,fix it.
+*v1.0x11.0:
+ 1) modify for rga output format suport NV21.
+*v1.0x12.0:
+* 1) invalidate ME for auto framerate
+*v1.0x13.0:
+* 1) has something wrong with rga of rk312x mirror operation,fix it by yzm.
+* 2) correct illuminant name "Horizon" to "HORIZON".
+* 3) fix flashlight bug in mode 2.
+*v1.0x14.0:
+* 1) set contrast to improve quality.
+*v1.0x15.0:
+* 1) fix cancel pic error like this : takepic,then cancel pic quickly,enc thread havn't get frame at that thim.
+* 2) fix scan QR code error in wechat.
+*v1.0x16.0:
+* 1) 720p and 1080p framerate fix for video record;
+* 2) the preview resolution and framerate don't change after take picture;
+*v1.0x17.0:
+* 1) fix cts verify format test bugs(yv12);
+*v1.0x18.0:
+* 1) select capture resolution must check exposure time for preview solution; for Libisp v1.c.0
+*v1.0x19.0:
+* 1) the way of getting sensor xml version may cause array bound exceeded,fixed it.
+*v1.0x1a.0:
+* 1) select ressolution must check exposure time for capture,
+* check fps for video,
+* check fps and exposure time(*0.5) for dc preview;
+* for Libisp v1.0x0d.0;
+*v1.0x1b.0:
+* 1) Call mDataCb must trylock mainthread mutex, if trylock failed, this frame cancel. This operation is for deadlock;
+*v1.0x1c.0:
+* 1) support gamma out
+*v1.0x1d.0:
+* 1) fix ae cannot return centeral mesuremode after TAE when facedetect is disable;
+*v1.0x1e.0:
+* 1) add sensor lens config in cam_board.xml,lens name MUST be configed correctly according your hardware info.
+*v1.0x1f.0:
+* 1) Disable isp scale crop by CONFIG_CAMERA_SCALE_CROP_ISP for 1080p Sawtooth;
+ *v1.0x20.0:
+* 1) merge source code frome mid, version 1.0.0xe.
+*v1.0x21.0:
+* 1) merge source code frome mid,include following version:
+ *v1.0.2:
+ 1) fix uvc preview erro, caused by wrong MjpegDecoder phy addr.
+ *v1.0.3:
+ 1) fix uvc pic taken erro, caused by wrong phy addr(should use fd).
+ *v1.0.4:
+ 1) mMjpegDecoder didn't deinit ,this will cause mem leak,fix it
+ *v1.0.5:
+ 1) mMjpegDecoder.deInit in func CameraAdapter::cameraDestroy maybe called
+ when mMjpegDecoder havn't been initialized,fix it.
+ *v1.0.6:
+ 1) some variable of class soc adapter havn't been initialized ,fix it.
+ *v1.0.7:
+ 1) modify for rga output format suport NV21.
+ *v1.0.8:
+ 1) VIDIOC_QBUF operation in func getFrame MUST be protected by mCamDriverStreamLock
+ to ensure VIDIOC_QBUF sync with VIDIOC_STREAMOFF
+ *v1.0.9:
+ 1) fix 312x rga issues.
+ 2) disable cif soc sensor DV resolution 800x600(VPU IOMMU pagefault occured when
+ snapshot during recording)
+ *v1.0.a:
+ 1) video buffer should be aligned to 16.
+ *v1.0.b:
+ 1) add flip for weixin and MiTalk.
+ *v1.0.c:
+ 1) fix uvc exposure bug.
+ 2) uvc capture may crash in librk_vpuapi.so, fix it.
+ 3) filter not mjpeg data when uvc output format is mjpeg.
+ 4) invalide auto,infinity,macro focus function for uvc.
+ *v1.0.d:
+ 1) has something wrong with rga of rk312x mirror operation,fix it by yzm.
+ 2) correct illuminant name "Horizon" to "HORIZON".
+ 3) fix flashlight bug in mode 2.
+ *v1.0.e:
+ 1) uvc support 16bit unaligned resolution.
+ 2) uvc operation in camera_get_number_of_cameras func exist bug, fixed it.
+ 3) filter frames for isp soc camera.
+*v1.0x22.0:
+* include following version for ifive(hisense f415)
+* v1.0x1f.1:
+* 1) 1632x1224 -> 1600x1200 scale crop by isp, rga run error;
+*v1.0x23.0:
+* 1) support ext flash
+*v1.0x24.0:
+* 1) support OTP
+*v1.0x25.0:
+* 1) select max exposure time for capture, it is suit for Libisp v1.0x15.0;
+* 2) disable lock af for capture;
+* 3) disable lsc only low light for flash;
+*v1.0x26.0:
+* 1) merge source code frome mid,include following version:
+* v1.0x21.1:
+* 1) support V4L2 flash control of soc camera when picure size is the same as preview size.
+*v1.0x27.0:
+* 1) compatible with android 5.0 .
+*v1.0x28.0:
+* 1) Modify setBufferStatus Failed after stream off .
+ 2) Modify soc camera direction in XML
+*V1.0x29.0:
+* 1) fix bug caused by the path of media_profiles.xml in android kitkat.
+*V1.0x30.0:
+ 1) Invalid IOMMU_ENABLED defaultly, except for 312x.
+ 2) merge source code frome mid,include following version:
+ *V1.0x29.1:
+ * 1) add 480p in back camera's resolution.
+ *V1.0x29.2:
+ 1) use PLATFORM_SDK_VERSION instead of PLATFORM_VERSION
+ *V1.0x29.3:
+ 1) jpeg decoder interface has been changed , fix it.
+ *V1.0x29.4:
+ 1) uvc camera create buf err sometimes cause by "is_cif_driver", fix it.
+ 2) del 1200X900,add 720X480,if mCamDriverFrmWidthMax <= 1600 for soc camera.
+ *V1.0x29.5:
+ * 1) force thumb's w and h to 160x128
+ *V1.0x29.6:
+ 1) use new ion interface,commit corresponding isp lib
+ *V1.0x29.7:
+ * 1) fix V1.0x29.5
+ *V1.0x29.8:
+ * 1) bug exist in 'Internal' flash control,fix it.
+ *V1.0x29.9:
+ * 1) fix something for pass cts
+*V1.0x31.0:
+* 1) support sensor otp i2c info for read and write
+*V1.0x32.0:
+ 1) compatible with ion no matter new or old version.
+ 2) merge source code frome mid,include following version:
+ *V1.0x30.1:
+ 1) vpu input bufsize must be an integer multiple of 16,fix it.
+ 2) remove 1080p if driver not reallly support for soc camera.
+*V1.0x33.0:
+* 1) modify android 5.x compile condition.
+*V1.0x34.0:
+* 1) switch disser off in Camerahalutil rag scale
+*V1.0x35.0:
+ 1) support rk3368, CameraHal support 32bit and 64bit.
+ 2) merge from mid, include following versions:
+ *v1.0x32.1:
+ 1) pauseDisplay,notifyNewFrame,displayThread are async,following case may happen:
+ (1) pauseDisplay (2)notifyNewFrame (3) displayThread do the really pause job
+ (4)displayThread get new frame,cause displayThread has been pause now,so this frame
+ will not been processed , and this frame buffer didn't been returned to provider,then
+ this buffer lost.
+ *v1.0x32.2:
+ fix something to pass cts.
+ *v1.0x32.3:
+ 1) fix focus mode for soc camera.
+ 2) The maximum output dst_width is 2048 on rga1.0,when dst_width more then 2048,must be divided into more times
+ *v1.0x32.4:
+ Modify rga do scale and crop when dst_width more then RGA_ACTIVE_W.
+*V1.0x36.0:
+ 1) merge from mid, include following versions:
+ *v1.0x32.5:
+ Modify and unified rga interface.
+*V1.0x37.0:
+ 1) support OV8858 of different versions with the same lens but different calibration xmls.
+*V1.0x38.0:
+* 1) the judgment occasion of different OV8858 versions is wrong, correct it.
+*V1.0x39.0:
+* 1) CameraHal_board_xml_parse.cpp: strncpy is not safer,replace it with strlncpy.
+*V1.0x3a.0:
+* 1) risk exist in V1.0x39.0, fix it.
+*V1.0x3b.0:
+* 1) create callback thread for interactive with cameraservice.
+* 2) 1296x972 crop to 1280x720 used by isp to avoid edge shift.
+*V1.0x3c.0:
+ 1) support sensor powerup sequence configurable.
+*V1.0x3d.0:
+* 1) modify the matching method of raw-sensor's IQ file.
+* 2) add property 'sys_graphic.cam_otp'.
+*V1.0x3e.0:
+ 1) merge from mid, include following versions:
+ *V1.0x36.1:
+ 1) TARGET_RK3288 had change to TARGET_RK32,fix it.
+ *v1.0x36.2:
+ 1) fix something to pass cts, especially testYuvAndJpeg item by huangjinghua.
+ *v1.0x36.3:
+ 1) support rk3188 platform.
+ *v1.0x36.4:
+ 1) fix Luma value to 45 in auto flash mode.
+ *v1.0x36.5:
+ 1) support rk3188,android5.1.
+ *v1.0x36.6:
+ 1) fix rk3188 thumbnails
+ *v1.0x36.7:
+ 1) Support the query of iommu_enabled for usb camera.
+ *v1.0x36.8:
+ 1) bug exist in v1.0x36.7, fix it.
+ *v1.0x36.9:
+ 1) merge with 3368 camera branch,include follow versions:
+ *v1.0x30.1:
+ 1) CameraHal_board_xml_parse.cpp: strncpy is not safer,replace it with strlncpy.
+ *v1.0x30.2:
+ 1) risk exist in v1.0x30.1, fix it.
+ *v1.0x30.3:
+ 1) enable neon for isp soc camera.
+ *v1.0x30.4:
+ 1) ensure that input size and offset is 2 alignment for rga.
+ *v1.0x30.5:
+ 1) src x_offset must be 32 alignment, rga's bug.
+ *v1.0x36.a:
+ 1) support rk3188 scale by ipp.
+ *v1.0x36.b:
+ 1) fix some bugs.
+ *v1.0x36.c:
+ 1) fix bugs in v1.0x36.b.
+ 2) bug in rga_nv12_scale_crop func, fix it.
+ 3) add some setting for usb camera.
+ *v1.0x36.d:
+ 1) fix risk in v1.0x36.c.
+ *v1.0x36.e:
+ 1) fix bug in v1.0x36.7:dynamic query iommu status for usb camera.
+ 2) macro IOMMU is invalide, remove it.
+ 3) avoid the access of mDisplayBufInfo when it is NULL.
+ *v1.0x37.0:
+ compatible with android 6.0
+ *v1.0x37.1:
+ compatible with 64bit
+ *v1.0x37.2:
+ 1) modify for RK3368 Android6.0
+ 2) when sensor service doesn't start
+ sensor listener will return at time
+ 2)support rk3366,rk3399
+*V1.0x3f.0:
+ 1) modify the rule of sensor tuning file matching.
+
+*v1.0x40.0:
+* 1) add support usb camera format for H264.
+*
+*V1.0x41.0:
+ 1) rk_sensor_info memory release.
+ 2) remove macro MACHINE_HAS_CAMERA.
+*V1.0x41.1:
+ 1) modify for passing most cts and verifier tests about camera.
+ 2) box and vr platform no need to copy xml and driver library to out directory.
+*V1.0x41.2:
+ 1) UVC camera statu detect.
+*V1.0x41.3:
+ 1) merged from camera develop project.
+*V1.0x42.0:
+ 1)add iFive_F516 IMX214 tuning
+ 2)merge conflict 69 server
+ *V1.0x40.0:
+ 1) add camera info property.
+ 2) add judgement of camera initialize.
+ 3) fix bug of divided by zero.
+ 4) munmap buffers in cameraStop func.
+V1.0x42.1:
+ 1) set CAMERAS_SUPPORTED_SIMUL_MAX to 2, since two cameras works at the same time.
+V1.0x42.2:
+ 1) add FIXED focus mode support.
+V1.0x43.0:
+ 1) isLowIllumin and minus_level_2 parameter.
+V1.0x43.1:
+ 1) fix some cts problem and set preview minimum fps to 15.
+V1.0x43.2:
+ 1) change 13M resolution to 12.5M because the rga only support max 4096 vir_width.
+ 2) change preview minimum fps to 20.
+V1.0x43.3:
+ 1) fix the bug can't switch to front camera.
+V1.0x44.0:
+ 1) support UVNR.
+ 2) Touch AE support cam_board switch off/on.
+ 3) support isLowIllumin set.
+V1.0x44.1:
+ 1) atuo ae CentreWeightMetering.
+V1.0x45.0:
+ 1) support android Nougat.
+V1.0x46.0:
+ 1) gammaout curve configuration moved to IQ xml.
+V1.0x47.0:
+ 1) update cameragl library(support android nougat).
+V1.0x48.0:
+ 1) support 3DNR.
+v1.0x49.0:
+ 1) support gralloc drm buffer allocation
+v1.0x4a.0:
+ 1) support drm rga
+V1.0x4b.0:
+ 1) support CprocConfig and ConvRange distinguish between preview and capture.
+v1.0x4c.0:
+ 1) usb camera support face detection.
+v1.0x4d.0:
+ 1) enable drm rga, drm rga support virtual addr
+v1.0x4e.0:
+ 1)merge from 29 server:
+ v1.0x49.1:
+ 1) enable drm rga,support virtual address
+ v1.0x49.2:
+ 1) change 13M resolution to 12.5M because the rga only support max 4096 vir_width.
+ v1.0x49.3:
+ 1) Support software jpeg encode at rk3328 platform on android Nougat.
+ 2) Fix bug of getting a frame after stream off.
+ v1.0x49.4:
+ 1) fix the bug that usb camera throw out error in probabilistic when taking pictures
+ v1.0x49.5:
+ 1) use virAddr instead of fd for drm RGA, cause the fd manner has performance issue now.
+ 2) display window virtual width should be 32 bytes aligned,which is GPU required.
+ v1.0x49.6:
+ 1) camera usb adapter support face detection.
+ v1.0x49.7:
+ 1) fix face detection ratation error.
+ v1.0x49.8:
+ 1) enable torch mode when camera supports.
+ v1.0x49.9:
+ 1) Usb camera 1080p priview size should not be filtered.
+ 2) Let debugShowFPS to be a member function of CameraAdapter.
+ 3) Fix bug of creating uvc camera supported video size.
+ 4) rk3328,rk322x and rk312x platform do not support face detection temporary.
+ v1.0x49.a:
+ 1) fix cts fail for usb camera.
+v1.0x4f.0:
+ 1) fix compile error.
+ 2) fix rga_nv12_scale_crop param error caused by merging code.
+ v1.0x4f.1:
+ 1) fix read /proc/pid/cmdline fail when selinux of android7.1 is on.
+ 2) fix weixin video call orientation problem.
+ v1.0x4f.2:
+ 1) When width or height is not 16 alignment,no need to memcopy frame.
+ 2) Modify some log format for easier to debug code.
+ v1.0x4f.3:
+ 1) Modify some cts test code.
+ v1.0x4f.4:
+ 1) Fix the bug that camera can not record video at platform rk312x on android nougat.
+ v1.0x4f.5:
+ 1) Update camera cts test property from sys.cts_gts.status to sys.cts_camera.status.
+ v1.0x4f.6:
+ 1) walkround fix qq self camera record problem with rga,replace
+ rga with arm when this callingprocess.
+ v1.0x4f.7:
+ 1) android7.1 compatible with 3368h.
+*/
+
+
+#define CONFIG_CAMERAHAL_VERSION KERNEL_VERSION(1, 0x4f, 7)
+
+
+/* */
+#define CAMERA_DISPLAY_FORMAT_YUV420P CameraParameters::PIXEL_FORMAT_YUV420P
+#define CAMERA_DISPLAY_FORMAT_YUV420SP CameraParameters::PIXEL_FORMAT_YUV420SP
+#define CAMERA_DISPLAY_FORMAT_RGB565 CameraParameters::PIXEL_FORMAT_RGB565
+#define CAMERA_DISPLAY_FORMAT_NV12 "nv12"
+/*
+*NOTE:
+* CONFIG_CAMERA_DISPLAY_FORCE and CONFIG_CAMERA_DISPLAY_FORCE_FORMAT is debug macro,
+* CONFIG_CAMERA_DISPLAY_FORCE must equal to 0 in official version.
+*/
+#define CONFIG_CAMERA_DISPLAY_FORCE 1
+#define CONFIG_CAMERA_DISPLAY_FORCE_FORMAT CAMERA_DISPLAY_FORMAT_RGB565
+
+#define CONFIG_CAMERA_SINGLE_SENSOR_FORCE_BACK_FOR_CTS 0
+#define CONFIG_CAMERA_FRAME_DV_PROC_STAT 0
+#define CONFIG_CAMERA_FRONT_MIRROR_MDATACB 1
+#define CONFIG_CAMERA_FRONT_MIRROR_MDATACB_ALL 0
+#ifdef LAPTOP
+#define CONFIG_CAMERA_FRONT_MIRROR_MDATACB_APK "<com.skype.raider>,<com.yahoo.mobile.client.andro>,\
+ <com.tencent.mobileqq:peak>,<com.tencent.mobileqq:MSF>,\
+ <com.tencent.mm:tools>"
+#define CONFIG_CAMERA_FRONT_FLIP_MDATACB_APK "<com.tencent.mm>,<com.xiaomi.channel>,<com.tencent.mobileqq:video>"
+#else
+#define CONFIG_CAMERA_FRONT_MIRROR_MDATACB_APK "<com.skype.raider>,<com.yahoo.mobile.client.andro>"
+#define CONFIG_CAMERA_FRONT_FLIP_MDATACB_APK "<com.tencent.mm>,<com.xiaomi.channel>"
+#endif
+#define CONFIG_CAMERA_SETVIDEOSIZE 0
+
+#define CONFIG_CAMERA_PREVIEW_BUF_CNT 4
+#define CONFIG_CAMERA_DISPLAY_BUF_CNT 4
+#define CONFIG_CAMERA_VIDEO_BUF_CNT 4
+#define CONFIG_CAMERA_VIDEOENC_BUF_CNT 3
+#define CONFIG_CAMERA_ISP_BUF_REQ_CNT 8
+
+#define CONFIG_CAMERA_UVC_MJPEG_SUPPORT 1
+#define CONFIG_CAMERA_UVC_MANEXP 1
+#define CONFIG_CAMERA_UVC_INVAL_FRAMECNT 5
+#define CONFIG_CAMERA_ORIENTATION_SKYPE 0
+#define CONFIG_CAMERA_FRONT_ORIENTATION_SKYPE 0
+#define CONFIG_CAMERA_BACK_ORIENTATION_SKYPE 0
+
+#define CONFIG_CAMERA_FRONT_PREVIEW_FPS_MIN 3000 // 3fps
+#define CONFIG_CAMERA_FRONT_PREVIEW_FPS_MAX 40000 //30fps
+#define CONFIG_CAMERA_BACK_PREVIEW_FPS_MIN 3000
+#define CONFIG_CAMERA_BACK_PREVIEW_FPS_MAX 40000
+
+#define CONFIG_CAMERA_SCALE_CROP_ISP 1
+
+#define CAMERAHAL_VERSION_PROPERTY_KEY "sys_graphic.cam_hal.ver"
+#define CAMERAHAL_CAMSYS_VERSION_PROPERTY_KEY "sys_graphic.cam_drv_camsys.ver"
+#define CAMERAHAL_V4L2_VERSION_PROPERTY_KEY "sys_graphic.cam_dri_v4l2.ver"
+#define CAMERAHAL_LIBISP_PROPERTY_KEY "sys_graphic.cam_libisp.ver"
+#define CAMERAHAL_ISI_PROPERTY_KEY "sys_graphic.cam_isi.ver"
+#define CAMERAHAL_CAMBOARDXML_PARSER_PROPERTY_KEY "sys_graphic.cam_camboard.ver"
+#define CAMERAHAL_TRACE_LEVEL_PROPERTY_KEY "sys_graphic.cam_trace"
+#define CAMERAHAL_CAM_OTP_PROPERTY_KEY "sys_graphic.cam_otp"
+
+
+#define CAMERA_PMEM_NAME "/dev/pmem_cam"
+#define CAMERA_DRIVER_SUPPORT_FORMAT_MAX 32
+
+#define RAW_BUFFER_SIZE_8M (( mCamDriverPreviewFmt == V4L2_PIX_FMT_RGB565) ? 0xF40000:0xB70000)
+#define RAW_BUFFER_SIZE_5M (( mCamDriverPreviewFmt == V4L2_PIX_FMT_RGB565) ? 0x9A0000:0x740000)
+#define RAW_BUFFER_SIZE_3M (( mCamDriverPreviewFmt == V4L2_PIX_FMT_RGB565) ?0x600000 :0x480000)
+#define RAW_BUFFER_SIZE_2M (( mCamDriverPreviewFmt == V4L2_PIX_FMT_RGB565) ?0x3A0000 :0x2c0000)
+#define RAW_BUFFER_SIZE_1M (( mCamDriverPreviewFmt == V4L2_PIX_FMT_RGB565)? 0x180000 :0x1d0000)
+#define RAW_BUFFER_SIZE_0M3 (( mCamDriverPreviewFmt == V4L2_PIX_FMT_RGB565)?0x150000 :0x100000)
+
+#define JPEG_BUFFER_SIZE_8M 0x700000
+#define JPEG_BUFFER_SIZE_5M 0x400000
+#define JPEG_BUFFER_SIZE_3M 0x300000
+#define JPEG_BUFFER_SIZE_2M 0x300000
+#define JPEG_BUFFER_SIZE_1M 0x200000
+#define JPEG_BUFFER_SIZE_0M3 0x100000
+
+#define OPTIMIZE_MEMORY_USE
+#define VIDEO_ENC_BUFFER 0x151800
+#define FILTER_FRAME_NUMBER (3)
+
+#if (defined(TARGET_RK32) || defined(TARGET_RK3368))
+#define RGA_VER (2.0)
+#define RGA_ACTIVE_W (4096)
+#define RGA_VIRTUAL_W (4096)
+#define RGA_ACTIVE_H (4096)
+#define RGA_VIRTUAL_H (4096)
+
+#else
+#define RGA_VER (1.0)
+#define RGA_ACTIVE_W (2048)
+#define RGA_VIRTUAL_W (4096)
+#define RGA_ACTIVE_H (2048)
+#define RGA_VIRTUAL_H (2048)
+
+#endif
+
+#define JPEG_BUFFER_DYNAMIC (1)
+
+#define V4L2_BUFFER_MAX 32
+#define V4L2_BUFFER_MMAP_MAX 16
+#define PAGE_ALIGN(x) (((x) + 0xFFF) & (~0xFFF)) // Set as multiple of 4K
+
+#define KEY_CONTINUOUS_PIC_NUM "rk-continous-pic-num"
+#define KEY_CONTINUOUS_PIC_INTERVAL_TIME "rk-continous-pic-interval-time"
+#define KEY_CONTINUOUS_SUPPORTED "rk-continous-supported"
+#define KEY_PREVIEW_W_FORCE "rk-previwe-w-force"
+#define KEY_PREVIEW_H_FORCE "rk-previwe-h-force"
+
+
+#define CAMHAL_GRALLOC_USAGE GRALLOC_USAGE_HW_TEXTURE | \
+ GRALLOC_USAGE_HW_RENDER | \
+ GRALLOC_USAGE_SW_WRITE_OFTEN | \
+ GRALLOC_USAGE_SW_READ_OFTEN /*| \
+ GRALLOC_USAGE_SW_WRITE_MASK| \
+ GRALLOC_USAGE_SW_READ_RARELY*/
+#define CAMERA_IPP_NAME "/dev/rk29-ipp"
+
+
+
+
+#if defined(TARGET_BOARD_PLATFORM_RK30XX) || defined(TARGET_RK29) || defined(TARGET_BOARD_PLATFORM_RK2928)
+ #define NATIVE_HANDLE_TYPE private_handle_t
+ #define PRIVATE_HANDLE_GET_W(hd) (hd->width)
+ #define PRIVATE_HANDLE_GET_H(hd) (hd->height)
+#elif defined(TARGET_BOARD_PLATFORM_RK30XXB) || defined(TARGET_RK3368) || defined(TARGET_RK3328) || defined(TARGET_RK312x) || defined(TARGET_RK3288)
+ #define NATIVE_HANDLE_TYPE IMG_native_handle_t
+ #define PRIVATE_HANDLE_GET_W(hd) (hd->iWidth)
+ #define PRIVATE_HANDLE_GET_H(hd) (hd->iHeight)
+#endif
+
+#define RK_VIDEOBUF_CODE_CHK(rk_code) ((rk_code&(('R'<<24)|('K'<<16)))==(('R'<<24)|('K'<<16)))
+
+
+class FrameProvider
+{
+public:
+ virtual int returnFrame(long index,int cmd)=0;
+ virtual ~FrameProvider(){};
+ FrameProvider(){};
+ FramInfo_s mPreviewFrameInfos[CONFIG_CAMERA_PREVIEW_BUF_CNT];
+};
+
+typedef struct rk_buffer_info {
+ Mutex* lock;
+ long phy_addr;
+ long vir_addr;
+ int share_fd;
+ int buf_state;
+} rk_buffer_info_t;
+
+class BufferProvider{
+public:
+ int createBuffer(int count,int perbufsize,buffer_type_enum buftype,bool is_cif_driver);
+ int freeBuffer();
+ virtual int setBufferStatus(int bufindex,int status,int cmd=0);
+ virtual int getOneAvailableBuffer(long *buf_phy,long *buf_vir);
+ int getBufferStatus(int bufindex);
+ int getBufCount();
+ long getBufPhyAddr(int bufindex);
+ long getBufVirAddr(int bufindex);
+ int getBufShareFd(int bufindex);
+ int flushBuffer(int bufindex);
+ BufferProvider(MemManagerBase* memManager):mBufInfo(NULL),mCamBuffer(memManager){}
+ virtual ~BufferProvider(){mCamBuffer = NULL;mBufInfo = NULL;}
+
+ bool is_cif_driver;
+protected:
+ rk_buffer_info_t* mBufInfo;
+ int mBufCount;
+ buffer_type_enum mBufType;
+ MemManagerBase* mCamBuffer;
+};
+
+//preview buffer
+class PreviewBufferProvider:public BufferProvider
+{
+public:
+
+ enum PREVIEWBUFSTATUS {
+ CMD_PREVIEWBUF_DISPING = 0x01,
+ CMD_PREVIEWBUF_VIDEO_ENCING = 0x02,
+ CMD_PREVIEWBUF_SNAPSHOT_ENCING = 0x04,
+ CMD_PREVIEWBUF_DATACB = 0x08,
+ CMD_PREVIEWBUF_WRITING = 0x10,
+ };
+
+#define CAMERA_PREVIEWBUF_ALLOW_DISPLAY(a) ((a&CMD_PREVIEWBUF_WRITING)==0x00)
+#define CAMERA_PREVIEWBUF_ALLOW_ENC(a) ((a&CMD_PREVIEWBUF_WRITING)==0x00)
+#define CAMERA_PREVIEWBUF_ALLOW_ENC_PICTURE(a) ((a&CMD_PREVIEWBUF_WRITING)==0x00)
+#define CAMERA_PREVIEWBUF_ALLOW_DATA_CB(a) ((a&CMD_PREVIEWBUF_WRITING)==0x00)
+#define CAMERA_PREVIEWBUF_ALLOW_WRITE(a) ((a&(CMD_PREVIEWBUF_DISPING|CMD_PREVIEWBUF_VIDEO_ENCING|CMD_PREVIEWBUF_SNAPSHOT_ENCING|CMD_PREVIEWBUF_DATACB|CMD_PREVIEWBUF_WRITING))==0x00)
+
+ virtual int setBufferStatus(int bufindex,int status,int cmd);
+
+ PreviewBufferProvider(MemManagerBase* memManager):BufferProvider(memManager){}
+ ~PreviewBufferProvider(){}
+};
+
+
+class DisplayAdapter;
+class AppMsgNotifier;
+typedef struct cameraparam_info cameraparam_info_s;
+//diplay buffer display adapterй
+
+/* mjpeg decoder interface in libvpu.*/
+typedef void* (*getMjpegDecoderFun)(void);
+typedef void (*destroyMjpegDecoderFun)(void* jpegDecoder);
+
+typedef int (*initMjpegDecoderFun)(void* jpegDecoder);
+typedef int (*deInitMjpegDecoderFun)(void* jpegDecoder);
+
+typedef int (*mjpegDecodeOneFrameFun)(void * jpegDecoder,uint8_t* aOutBuffer, uint32_t *aOutputLength,
+ uint8_t* aInputBuf, uint32_t* aInBufSize, ulong_t out_phyaddr);
+
+typedef struct mjpeg_interface {
+ void* decoder;
+ int state;
+
+ getMjpegDecoderFun get;
+ destroyMjpegDecoderFun destroy;
+ initMjpegDecoderFun init;
+ deInitMjpegDecoderFun deInit;
+ mjpegDecodeOneFrameFun decode;
+} mjpeg_interface_t;
+
+/*************************
+CameraAdapter ͨţΪ֡ݵṩߣΪdisplaymsgcallbackṩݡ
+***************************/
+class CameraAdapter:public FrameProvider
+{
+public:
+ CameraAdapter(int cameraId);
+ virtual ~CameraAdapter();
+
+ void setImageAllFov(bool sw){mImgAllFovReq=sw;}
+ void setCtsTestFlag(bool isCts){mIsCtsTest = isCts;}
+ DisplayAdapter* getDisplayAdapterRef(){return mRefDisplayAdapter;}
+ void setDisplayAdapterRef(DisplayAdapter& refDisplayAdap);
+ void setEventNotifierRef(AppMsgNotifier& refEventNotify);
+ void setPreviewBufProvider(BufferProvider* bufprovider);
+ CameraParameters & getParameters();
+ virtual int getCurPreviewState(int *drv_w,int *drv_h);
+ virtual int getCurVideoSize(int *video_w, int *video_h);
+ virtual bool isNeedToRestartPreview();
+ int getCameraFd();
+ int initialize();
+
+ virtual status_t startPreview(int preview_w,int preview_h,int w, int h, int fmt,bool is_capture);
+ virtual status_t stopPreview();
+ // virtual int initialize() = 0;
+ virtual int returnFrame(long index,int cmd);
+ virtual int setParameters(const CameraParameters &params_set,bool &isRestartValue);
+ virtual void initDefaultParameters(int camFd);
+ virtual status_t autoFocus();
+ virtual status_t cancelAutoFocus();
+
+ virtual void dump(int cameraId);
+ virtual void getCameraParamInfo(cameraparam_info_s &paraminfo);
+ virtual bool getFlashStatus();
+ virtual int selectPreferedDrvSize(int *width,int * height,bool is_capture){ return 0;}
+ virtual int faceNotify(struct RectFace* faces, int* num);
+ virtual void debugShowFPS();
+ virtual int flashcontrol();
+
+ bool cif_driver_iommu;
+
+
+protected:
+ //talk to driver
+ virtual int cameraCreate(int cameraId);
+ virtual int cameraDestroy();
+
+ virtual int cameraSetSize(int w, int h, int fmt, bool is_capture);
+ virtual int cameraStream(bool on);
+ virtual int cameraStart();
+ virtual int cameraStop();
+ //dqbuf
+ virtual int getFrame(FramInfo_s** frame);
+ virtual int cameraAutoFocus(bool auto_trig_only);
+
+ //qbuf
+ // virtual status_t fillThisBuffer();
+
+ //define the frame info ,such as w, h ,fmt ,dealflag(preview callback ? display ? video enc ? picture?)
+ virtual int reprocessFrame(FramInfo_s* frame);
+ virtual int adapterReturnFrame(long index,int cmd);
+
+private:
+ class CameraPreviewThread :public Thread
+ {
+ //deque ֡ҪַDisplayAdapter༰EventNotifierࡣ
+ CameraAdapter* mPreivewCameraAdapter;
+ public:
+ CameraPreviewThread(CameraAdapter* adapter)
+ : Thread(false), mPreivewCameraAdapter(adapter) { }
+
+ virtual bool threadLoop() {
+ mPreivewCameraAdapter->previewThread();
+
+ return false;
+ }
+ };
+
+ void autofocusThread();
+ class AutoFocusThread : public Thread {
+ CameraAdapter* mCameraAdapter;
+ public:
+ AutoFocusThread(CameraAdapter* hw)
+ : Thread(false), mCameraAdapter(hw) { }
+
+ virtual bool threadLoop() {
+ mCameraAdapter->autofocusThread();
+
+ return false;
+ }
+ };
+
+ sp<AutoFocusThread> mAutoFocusThread;
+ Mutex mAutoFocusLock;
+ bool mExitAutoFocusThread;
+ Condition mAutoFocusCond;
+ camera_notify_callback mNotifyCb;
+
+
+
+protected:
+ virtual void previewThread();
+protected:
+ DisplayAdapter* mRefDisplayAdapter;
+ AppMsgNotifier* mRefEventNotifier;
+ sp<CameraPreviewThread> mCameraPreviewThread;
+ int mPreviewRunning;
+ int mPictureRunning;
+ BufferProvider* mPreviewBufProvider;
+ int mCamDrvWidth;
+ int mCamDrvHeight;
+ int mCamPreviewH ;
+ int mCamPreviewW ;
+ int mVideoWidth;
+ int mVideoHeight;
+ bool mImgAllFovReq;
+ bool mIsCtsTest;
+
+ unsigned int mCamDriverSupportFmt[CAMERA_DRIVER_SUPPORT_FORMAT_MAX];
+ enum v4l2_memory mCamDriverV4l2MemType;
+
+ unsigned int mCamDriverPreviewFmt;
+ struct v4l2_capability mCamDriverCapability;
+
+ int mPreviewErrorFrameCount;
+ int mPreviewFrameIndex;
+
+ Mutex mCamDriverStreamLock;
+ bool mCamDriverStream;
+
+ bool camera_device_error;
+
+ CameraParameters mParameters;
+ unsigned int CameraHal_SupportFmt[6];
+
+ char *mCamDriverV4l2Buffer[V4L2_BUFFER_MAX];
+ unsigned int mCamDriverV4l2BufferLen;
+
+ mjpeg_interface_t mMjpegDecoder;
+ void* mLibstageLibHandle;
+
+ int mZoomVal;
+ int mZoomMin;
+ int mZoomMax;
+ int mZoomStep;
+
+ int mCamFd;
+ int mCamId;
+};
+
+//soc camera adapter
+class CameraSOCAdapter: public CameraAdapter
+{
+public:
+ CameraSOCAdapter(int cameraId);
+ virtual ~CameraSOCAdapter();
+
+ /*********************
+ talk to driver
+ **********************/
+ //parameters
+ virtual int setParameters(const CameraParameters &params_set,bool &isRestartValue);
+ virtual void initDefaultParameters(int camFd);
+ virtual int cameraAutoFocus(bool auto_trig_only);
+ virtual int selectPreferedDrvSize(int *width,int * height,bool is_capture);
+ virtual int flashcontrol();
+private:
+ static unsigned int mFrameSizesEnumTable[][2];
+
+ typedef struct frameSize_s{
+ unsigned int width;
+ unsigned int height;
+ unsigned int fmt;
+ int framerate;
+ }frameSize_t;
+ Vector<frameSize_t> mFrameSizeVector;
+
+ int cameraFramerateQuery(unsigned int format, unsigned int w, unsigned int h, int *min, int *max);
+ int cameraFpsInfoSet(CameraParameters &params);
+ int cameraConfig(const CameraParameters &tmpparams,bool isInit,bool &isRestartValue);
+
+
+ unsigned int mCamDriverFrmWidthMax;
+ unsigned int mCamDriverFrmHeightMax;
+ String8 mSupportPreviewSizeReally;
+
+ struct v4l2_querymenu mWhiteBalance_menu[20];
+ int mWhiteBalance_number;
+
+ struct v4l2_querymenu mEffect_menu[20];
+ int mEffect_number;
+
+ struct v4l2_querymenu mScene_menu[20];
+ int mScene_number;
+
+ struct v4l2_querymenu mAntibanding_menu[20];
+ int mAntibanding_number;
+
+
+ struct v4l2_querymenu mFlashMode_menu[20];
+ int mFlashMode_number;
+
+ //TAE & TAF
+ __u32 m_focus_mode;
+ static const __u32 focus_fixed = 0xFFFFFFFF;
+ __u32 m_focus_value;
+ __s32 m_taf_roi[4];
+
+ int GetAFParameters(const CameraParameters params);
+ int AndroidZoneMapping(
+ const char* tag,
+ __s32 pre_w,
+ __s32 pre_h,
+ __s32 drv_w,
+ __s32 drv_h,
+ bool bPre2Drv,
+ __s32 *zone);
+
+};
+
+//usb camera adapter
+
+
+
+class CameraUSBAdapter: public CameraAdapter
+{
+public:
+ CameraUSBAdapter(int cameraId);
+ virtual ~CameraUSBAdapter();
+ virtual int cameraStop();
+ virtual int setParameters(const CameraParameters &params_set,bool &isRestartValue);
+ virtual void initDefaultParameters(int camFd);
+ virtual int reprocessFrame(FramInfo_s* frame);
+
+
+private:
+ int cameraConfig(const CameraParameters &tmpparams,bool isInit,bool &isRestartValue);
+
+ int mCamDriverFrmWidthMax;
+ int mCamDriverFrmHeightMax;
+ String8 mSupportPreviewSizeReally;
+
+ struct v4l2_querymenu mWhiteBalance_menu[20];
+ int mWhiteBalance_number;
+
+ struct v4l2_querymenu mEffect_menu[20];
+ int mEffect_number;
+
+ struct v4l2_querymenu mScene_menu[20];
+ int mScene_number;
+
+ struct v4l2_querymenu mAntibanding_menu[20];
+ int mAntibanding_number;
+
+ int mZoomMin;
+ int mZoomMax;
+ int mZoomStep;
+
+ struct v4l2_querymenu mFlashMode_menu[20];
+ int mFlashMode_number;
+
+};
+/*************************
+DisplayAdapter Ϊ֡ߣCameraAdapter֡ݲʾ
+***************************/
+class DisplayAdapter//:public CameraHal_Tracer
+{
+ class DisplayThread : public Thread {
+ DisplayAdapter* mDisplayAdapter;
+ public:
+ DisplayThread(DisplayAdapter* disadap)
+ : Thread(false), mDisplayAdapter(disadap){}
+
+ virtual bool threadLoop() {
+ mDisplayAdapter->displayThread();
+
+ return false;
+ }
+ };
+
+public:
+ enum DisplayThreadCommands {
+ // Comands
+ CMD_DISPLAY_PAUSE,
+ CMD_DISPLAY_START,
+ CMD_DISPLAY_STOP,
+ CMD_DISPLAY_FRAME,
+ CMD_DISPLAY_INVAL
+ };
+
+ enum DISPLAY_STATUS{
+ STA_DISPLAY_RUNNING,
+ STA_DISPLAY_PAUSE,
+ STA_DISPLAY_STOP,
+ };
+ enum DisplayCommandStatus{
+ CMD_DISPLAY_START_PREPARE = 1,
+ CMD_DISPLAY_START_DONE,
+ CMD_DISPLAY_PAUSE_PREPARE,
+ CMD_DISPLAY_PAUSE_DONE,
+ CMD_DISPLAY_STOP_PREPARE,
+ CMD_DISPLAY_STOP_DONE,
+ CMD_DISPLAY_FRAME_PREPARE,
+ CMD_DISPLAY_FRAME_DONE,
+ };
+
+ typedef struct rk_displaybuf_info {
+ Mutex* lock;
+ buffer_handle_t* buffer_hnd;
+ NATIVE_HANDLE_TYPE *priv_hnd;
+ long phy_addr;
+ long vir_addr;
+ int buf_state;
+ int stride;
+ } rk_displaybuf_info_t;
+
+ bool isNeedSendToDisplay();
+ void notifyNewFrame(FramInfo_s* frame);
+
+ int startDisplay(int width, int height);
+ int stopDisplay();
+ int pauseDisplay();
+
+ int setPreviewWindow(struct preview_stream_ops* window);
+ int getDisplayStatus(void);
+ void setFrameProvider(FrameProvider* framePro);
+ void dump();
+
+ DisplayAdapter();
+ ~DisplayAdapter();
+
+ struct preview_stream_ops* getPreviewWindow();
+private:
+ int cameraDisplayBufferCreate(int width, int height, const char *fmt,int numBufs);
+ int cameraDisplayBufferDestory(void);
+ void displayThread();
+ void setBufferState(int index,int status);
+ void setDisplayState(int state);
+
+ rk_displaybuf_info_t* mDisplayBufInfo;
+ int mDislayBufNum;
+ int mDisplayWidth;
+ int mDisplayHeight;
+ int mDisplayRuning;
+ char mDisplayFormat[30];
+
+ int mDispBufUndqueueMin;
+ preview_stream_ops_t* mANativeWindow;
+ FrameProvider* mFrameProvider;
+
+ Mutex mDisplayLock;
+ Condition mDisplayCond;
+ int mDisplayState;
+
+ MessageQueue displayThreadCommandQ;
+ sp<DisplayThread> mDisplayThread;
+};
+
+typedef struct cameraparam_info{
+ float ExposureTime;
+ float ISOSpeedRatings;
+ float f_RgProj;
+ float f_s;
+ float f_s_Max1;
+ float f_s_Max2;
+ float f_Bg1;
+ float f_Rg1;
+ float f_Bg2;
+ float f_Rg2;
+ float expPriorOut;
+ float expPriorIn;
+ int illuIdx;
+ int region;
+
+ float likehood[32];
+ float wight[32];
+ char illuName[32][20];
+ int count;
+
+ char XMLVersion[64];
+}cameraparam_info_s;
+
+//takepicture used
+ typedef struct picture_info{
+ int num;
+ int w;
+ int h;
+ int fmt;
+ int quality ;
+ int rotation;
+ int thumbquality;
+ int thumbwidth;
+ int thumbheight;
+
+ //gps info
+ double altitude;
+ double latitude;
+ double longtitude;
+ long timestamp;
+ char getMethod[33];//getMethod : len <= 32
+
+ int focalen;
+ int isovalue;
+ int flash;
+ int whiteBalance;
+
+ cameraparam_info_s cameraparam;
+ }picture_info_s;
+
+
+//picture encode used
+struct CamCaptureInfo_s
+{
+ int input_phy_addr;
+ int input_vir_addr;
+ int output_phy_addr;
+ int output_vir_addr;
+ int output_buflen;
+};
+
+typedef void (*FaceDetector_start_func)(void *context,int width, int height, int format);
+typedef void (*FaceDetector_stop_func)(void *context);
+typedef int (*FaceDetector_prepare_func)(void *context, void* src);
+typedef int (*FaceDetector_findFaces_func)(void *context, int orientation, float angle, int isDrawRect, int *smileMode,struct RectFace** faces, int* num);
+typedef void* (*FaceDetector_initizlize_func)(int type, float threshold, int smileMode);
+typedef void (*FaceDetector_destory_func)(void *context);
+typedef int (*FaceDector_nofity_func)(struct RectFace* faces, int* num);
+
+struct face_detector_func_s{
+ void* mLibFaceDetectLibHandle;
+ FaceDetector_start_func mFaceDectStartFunc;
+ FaceDetector_stop_func mFaceDectStopFunc;
+ FaceDetector_prepare_func mFaceDectprepareFunc;
+ FaceDetector_findFaces_func mFaceDectFindFaceFun;
+ FaceDetector_initizlize_func mFaceDetector_initizlize_func;
+ FaceDetector_destory_func mFaceDetector_destory_func;
+};
+
+/**************************************
+EventNotifier msg Ļصջ¼ӰʱҲΪ֡ݵߡ
+**************************************/
+class AppMsgNotifier
+{
+private:
+ enum CameraStatus{
+ CMD_EVENT_PREVIEW_DATA_CB_PREPARE = 0x01,
+ CMD_EVENT_PREVIEW_DATA_CB_DONE = 0x02,
+ CMD_EVENT_PREVIEW_DATA_CB_MASK = 0x03,
+
+ CMD_EVENT_VIDEO_ENCING_PREPARE = 0x04,
+ CMD_EVENT_VIDEO_ENCING_DONE = 0x08,
+ CMD_EVENT_VIDEO_ENCING_MASK = 0x0C,
+
+ CMD_EVENT_PAUSE_PREPARE = 0x10,
+ CMD_EVENT_PAUSE_DONE = 0x20,
+ CMD_EVENT_PAUSE_MASK = 0x30,
+
+ CMD_EVENT_EXIT_PREPARE = 0x40,
+ CMD_EVENT_EXIT_DONE = 0x80,
+ CMD_EVENT_EXIT_MASK = 0xC0,
+
+ CMD_ENCPROCESS_SNAPSHOT_PREPARE = 0x100,
+ CMD_ENCPROCESS_SNAPSHOT_DONE = 0x200,
+ CMD_ENCPROCESS_SNAPSHOT_MASK = 0x300,
+
+ CMD_ENCPROCESS_PAUSE_PREPARE = 0x400,
+ CMD_ENCPROCESS_PAUSE_DONE = 0x800,
+ CMD_ENCPROCESS_PAUSE_MASK = 0xC00,
+
+ CMD_ENCPROCESS_EXIT_PREPARE = 0x1000,
+ CMD_ENCPROCESS_EXIT_DONE = 0x2000,
+ CMD_ENCPROCESS_EXIT_MASK = 0x3000,
+
+ STA_RECORD_RUNNING = 0x4000,
+ STA_RECEIVE_PIC_FRAME = 0x8000,
+ STA_RECEIVE_PREVIEWCB_FRAME = 0x10000,
+ STA_RECEIVE_FACEDEC_FRAME = 0x20000,
+ };
+ typedef struct rk_videobuf_info {
+ Mutex* lock;
+ buffer_handle_t* buffer_hnd;
+ NATIVE_HANDLE_TYPE *priv_hnd;
+ long phy_addr;
+ long vir_addr;
+ int buf_state;
+ int stride;
+ } rk_videobuf_info_t;
+
+
+ //preview data cbvideo enc
+ class CameraAppMsgThread :public Thread
+ {
+ public:
+ enum EVENT_THREAD_CMD{
+ CMD_EVENT_PREVIEW_DATA_CB,
+ CMD_EVENT_VIDEO_ENCING,
+ CMD_EVENT_PAUSE,
+ CMD_EVENT_EXIT
+ };
+ protected:
+ AppMsgNotifier* mAppMsgNotifier;
+ public:
+ CameraAppMsgThread(AppMsgNotifier* hw)
+ : Thread(false),mAppMsgNotifier(hw) { }
+
+ virtual bool threadLoop() {
+ mAppMsgNotifier->eventThread();
+
+ return false;
+ }
+ };
+
+ // face detection
+ class CameraAppFaceDetThread :public Thread
+ {
+ public:
+ enum FACEDET_THREAD_CMD{
+ CMD_FACEDET_FACE_DETECT,
+ CMD_FACEDET_PAUSE,
+ CMD_FACEDET_EXIT
+ };
+ protected:
+ AppMsgNotifier* mAppMsgNotifier;
+ public:
+ CameraAppFaceDetThread(AppMsgNotifier* hw)
+ : Thread(false),mAppMsgNotifier(hw) { }
+
+ virtual bool threadLoop() {
+ mAppMsgNotifier->faceDetectThread();
+
+ return false;
+ }
+ };
+
+ //picture
+ class EncProcessThread : public Thread {
+ public:
+ enum ENC_THREAD_CMD{
+ CMD_ENCPROCESS_SNAPSHOT,
+ CMD_ENCPROCESS_PAUSE,
+ CMD_ENCPROCESS_EXIT,
+ };
+ protected:
+ AppMsgNotifier* mAppMsgNotifier;
+ public:
+ EncProcessThread(AppMsgNotifier* hw)
+ : Thread(false),mAppMsgNotifier(hw) { }
+
+ virtual bool threadLoop() {
+ mAppMsgNotifier->encProcessThread();
+
+ return false;
+ }
+ };
+ class CameraAppCallbackThread :public Thread
+ {
+ public:
+ enum CALLBACK_THREAD_CMD{
+ CMD_MSG_PREVIEW_FRAME,
+ CMD_MSG_SHUTTER,
+ CMD_MSG_RAW_IMAGE,
+ CMD_MSG_RAW_IMAGE_NOTIFY,
+ CMD_MSG_COMPRESSED_IMAGE,
+ CMD_MSG_PREVIEW_METADATA,
+ CMD_MSG_VIDEO_FRAME,
+ CMD_MSG_ERROR,
+ CMD_CALLBACK_PAUSE,
+ CMD_CALLBACK_EXIT
+ };
+ protected:
+ AppMsgNotifier* mAppMsgNotifier;
+ public:
+ CameraAppCallbackThread(AppMsgNotifier* hw)
+ : Thread(false),mAppMsgNotifier(hw) { }
+
+ virtual bool threadLoop() {
+ mAppMsgNotifier->callbackThread();
+
+ return false;
+ }
+ };
+
+ friend class EncProcessThread;
+public:
+ AppMsgNotifier(CameraAdapter *camAdp);
+ ~AppMsgNotifier();
+
+ void setPictureRawBufProvider(BufferProvider* bufprovider);
+ void setPictureJpegBufProvider(BufferProvider* bufprovider);
+
+ int takePicture(picture_info_s picinfo);
+ int flushPicture();
+ int pausePreviewCBFrameProcess();
+
+ void setVideoBufProvider(BufferProvider* bufprovider);
+ int startRecording(int w,int h);
+ int stopRecording();
+ void releaseRecordingFrame(const void *opaque);
+ int msgEnabled(int32_t msg_type);
+ int storeMetadataInBuffer(bool meta);
+ void notifyCbMsg(int msg,int ret);
+
+ int startFaceDection(int width,int height, int faceNum);
+ void stopFaceDection();
+ void onOrientationChanged(uint32_t new_orien,int cam_orien,int face);
+ bool isNeedSendToFaceDetect();
+ void notifyNewFaceDecFrame(FramInfo_s* frame);
+
+ bool isNeedSendToVideo();
+ bool isNeedSendToPicture();
+ bool isNeedSendToDataCB();
+ bool isNeedToDisableCaptureMsg();
+ void notifyNewPicFrame(FramInfo_s* frame);
+ void notifyNewPreviewCbFrame(FramInfo_s* frame);
+ void notifyNewVideoFrame(FramInfo_s* frame);
+ void callback_notify_shutter();
+ void callback_preview_frame(camera_memory_t* datacbFrameMem);
+ void callback_raw_image(camera_memory_t* frame);
+ void callback_notify_raw_image();
+ void callback_compressed_image(camera_memory_t* frame);
+ void callback_notify_error();
+ void callback_preview_metadata(camera_memory_t* datacbFrameMem, camera_frame_metadata_t *facedata, struct RectFace *faces);
+ void callback_video_frame(camera_memory_t* video_frame);
+ int enableMsgType(int32_t msgtype);
+ int disableMsgType(int32_t msgtype);
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user,
+ Mutex *mainthread_lock);
+ void setFrameProvider(FrameProvider * framepro);
+ int setPreviewDataCbRes(int w,int h, const char *fmt);
+
+ void stopReceiveFrame();
+ void startReceiveFrame();
+ void dump();
+ void setDatacbFrontMirrorFlipState(bool mirror,bool mirrorFlip);
+ picture_info_s& getPictureInfoRef();
+ vpu_display_mem_pool *pool;
+ int grallocDevInit();
+ int grallocDevDeinit();
+ void grallocVideoBuf();
+ void grallocVideoBufAlloc(const char *camPixFmt, unsigned int width, unsigned int height);
+ void grallocVideoBufFree();
+ void grallocVideoBufLock();
+ void grallocVideoBufUnlock();
+ int grallocVideoBufGetAvailable();
+private:
+
+ void encProcessThread();
+ void eventThread();
+ void faceDetectThread();
+ void callbackThread();
+
+ int captureEncProcessPicture(FramInfo_s* frame);
+ int processPreviewDataCb(FramInfo_s* frame);
+ int processVideoCb(FramInfo_s* frame);
+ int processFaceDetect(FramInfo_s* frame, long frame_used_flag);
+
+ int copyAndSendRawImage(void *raw_image, int size);
+ int copyAndSendCompressedImage(void *compressed_image, int size);
+ int Jpegfillexifinfo(RkExifInfo *exifInfo,picture_info_s &params);
+ int Jpegfillgpsinfo(RkGPSInfo *gpsInfo,picture_info_s &params);
+ int initializeFaceDetec(int width,int height);
+ void deInitializeFaceDetec();
+
+ CameraAdapter *mCamAdp;
+ int32_t mMsgTypeEnabled;
+ char mCallingProcess[100];
+
+ picture_info_s mPictureInfo;
+ // bool mReceivePictureFrame;
+ int mEncPictureNum;
+ Mutex mPictureLock;
+
+ Mutex mRecordingLock;
+// bool mRecordingRunning;
+ int mRecordW;
+ int mRecordH;
+ bool mIsStoreMD;
+
+ Mutex mDataCbLock;
+ Mutex mFaceDecLock;
+ Mutex *mMainThreadLockRef;
+ int mCurOrintation;
+ float mCurBiasAngle;
+ int mFaceDetecW;
+ int mFaceDetectH;
+ int32_t mFaceNum;
+ int32_t mFaceFrameNum;
+ bool mFaceDetecInit;
+ void* mFaceContext;
+ bool mFaceDetectionDone;
+
+ //applied to this situation: msgtype CAMERA_MSG_PREVIEW_FRAME is enabled
+ //but hal status isn't allowed to send this msg,
+ bool mRecPrevCbDataEn;
+ bool mRecMetaDataEn;
+
+ sp<CameraAppMsgThread> mCameraAppMsgThread;
+ sp<EncProcessThread> mEncProcessThread;
+ sp<CameraAppFaceDetThread> mFaceDetThread;
+ sp<CameraAppCallbackThread> mCallbackThread;
+
+ BufferProvider* mRawBufferProvider;
+ BufferProvider* mJpegBufferProvider;
+ BufferProvider* mVideoBufferProvider;
+ FrameProvider* mFrameProvider;
+
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mRequestMemory;
+ void *mCallbackCookie;
+
+ MessageQueue encProcessThreadCommandQ;
+ MessageQueue eventThreadCommandQ;
+ MessageQueue faceDetThreadCommandQ;
+ MessageQueue callbackThreadCommandQ;
+
+ camera_memory_t* mVideoBufs[CONFIG_CAMERA_VIDEO_BUF_CNT];
+
+ char mPreviewDataFmt[30];
+ int mPreviewDataW;
+ int mPreviewDataH;
+ int mRunningState;
+ bool mDataCbFrontMirror;
+ bool mDataCbFrontFlip;
+ int mPicSize;
+ camera_memory_t* mPicture;
+ face_detector_func_s mFaceDetectorFun;
+ FaceDector_nofity_func mFaceDectNotify;
+
+ const gralloc_module_t *mGrallocModule;
+ struct alloc_device_t *mGrallocAllocDev;
+ rk_videobuf_info_t *mGrallocVideoBuf[CONFIG_CAMERA_VIDEO_BUF_CNT];
+};
+
+
+/***********************
+CameraHalฺcameraserviceϵʵ
+cameraserviceҪʵֵĽӿڡֻ𹫹Դ룬Լķַ
+***********************/
+class CameraHal
+{
+public:
+/*--------------------Interface Methods---------------------------------*/
+ /** Set the ANativeWindow to which preview frames are sent */
+ int setPreviewWindow(struct preview_stream_ops *window);
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ /**
+ * The following three functions all take a msg_type, which is a bitmask of
+ * the messages defined in include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msg_type);
+
+ /**
+ * Disable a message, or a set of messages.
+ *
+ * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera
+ * HAL should not rely on its client to call releaseRecordingFrame() to
+ * release video recording frames sent out by the cameral HAL before and
+ * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL
+ * clients must not modify/access any video recording frame after calling
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
+ */
+ void disableMsgType(int32_t msg_type);
+
+ /**
+ * Query whether a message, or a set of messages, is enabled. Note that
+ * this is operates as an AND, if any of the messages queried are off, this
+ * will return false.
+ */
+ int msgTypeEnabled(int32_t msg_type);
+
+ /**
+ * Start preview mode.
+ */
+ int startPreview();
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview();
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ int previewEnabled();
+
+ /**
+ * Request the camera HAL to store meta data or real YUV data in the video
+ * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If
+ * it is not called, the default camera HAL behavior is to store real YUV
+ * data in the video buffers.
+ *
+ * This method should be called before startRecording() in order to be
+ * effective.
+ *
+ * If meta data is stored in the video buffers, it is up to the receiver of
+ * the video buffers to interpret the contents and to find the actual frame
+ * data with the help of the meta data in the buffer. How this is done is
+ * outside of the scope of this method.
+ *
+ * Some camera HALs may not support storing meta data in the video buffers,
+ * but all camera HALs should support storing real YUV data in the video
+ * buffers. If the camera HAL does not support storing the meta data in the
+ * video buffers when it is requested to do do, INVALID_OPERATION must be
+ * returned. It is very useful for the camera HAL to pass meta data rather
+ * than the actual frame data directly to the video encoder, since the
+ * amount of the uncompressed frame data can be very large if video size is
+ * large.
+ *
+ * @param enable if true to instruct the camera HAL to store
+ * meta data in the video buffers; false to instruct
+ * the camera HAL to store real YUV data in the video
+ * buffers.
+ *
+ * @return OK on success.
+ */
+ int storeMetaDataInBuffers(int enable);
+
+ /**
+ * Start record mode. When a record image is available, a
+ * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding
+ * frame. Every record frame must be released by a camera HAL client via
+ * releaseRecordingFrame() before the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+ * responsibility to manage the life-cycle of the video recording frames,
+ * and the client must not modify/access any video recording frames.
+ */
+ int startRecording();
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording();
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled();
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ *
+ * It is camera HAL client's responsibility to release video recording
+ * frames sent out by the camera HAL before the camera HAL receives a call
+ * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+ * responsibility to manage the life-cycle of the video recording frames.
+ */
+ void releaseRecordingFrame(const void *opaque);
+
+ /**
+ * Start auto focus, the notification callback routine is called with
+ * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be
+ * called again if another auto focus is needed.
+ */
+ int autoFocus();
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress or
+ * not, this function will return the focus position to the default. If
+ * the camera does not support auto-focus, this is a no-op.
+ */
+ int cancelAutoFocus();
+
+ /**
+ * Take a picture.
+ */
+ int takePicture();
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this method
+ * when no picture is being taken is a no-op.
+ */
+ int cancelPicture();
+
+ /**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported.
+ */
+ int setParameters(const char *parms);
+ int setParameters(const CameraParameters &params_set);
+ int setParametersUnlock(const CameraParameters &params_set);
+
+ /** Retrieve the camera parameters. The buffer returned by the camera HAL
+ must be returned back to it with put_parameters, if put_parameters
+ is not NULL.
+ */
+ char* getParameters();
+
+ /** The camera HAL uses its own memory to pass us the parameters when we
+ call get_parameters. Use this function to return the memory back to
+ the camera HAL, if put_parameters is not NULL. If put_parameters
+ is NULL, then you have to use free() to release the memory.
+ */
+ void putParameters(char *);
+
+ /**
+ * Send command to camera driver.
+ */
+ int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release();
+
+ /**
+ * Dump state of the camera hardware
+ */
+ int dump(int fd);
+ void dump_mem(__u16 addr_s, __u16 len, __u8* content);
+/*--------------------Internal Member functions - Public---------------------------------*/
+ /** Constructor of CameraHal */
+ CameraHal(int cameraId);
+
+ // Destructor of CameraHal
+ virtual ~CameraHal();
+
+ int getCameraFd();
+ int getCameraId();
+ bool isNeedToDisableCaptureMsg();
+
+public:
+ CameraAdapter* mCameraAdapter;
+ DisplayAdapter* mDisplayAdapter;
+ AppMsgNotifier* mEventNotifier;
+ MemManagerBase* mMemoryManager;
+ BufferProvider* mPreviewBuf;
+ BufferProvider* mVideoBuf;
+ BufferProvider* mRawBuf;
+ BufferProvider* mJpegBuf;
+ MemManagerBase* mCamMemManager;
+public:
+ bool mInitState;
+
+private:
+ int selectPreferedDrvSize(int *width,int * height,bool is_capture);
+ int fillPicturInfo(picture_info_s& picinfo);
+ void setCamStatus(int status, int type);
+ int checkCamStatus(int cmd);
+ enum CommandThreadCommands {
+ // Comands
+ CMD_PREVIEW_START = 1,
+ CMD_PREVIEW_STOP,
+ CMD_PREVIEW_CAPTURE_CANCEL,
+ CMD_CONTINUOS_PICTURE,
+
+ CMD_AF_START,
+ CMD_AF_CANCEL,
+
+ CMD_SET_PREVIEW_WINDOW,
+ CMD_SET_PARAMETERS,
+ CMD_START_FACE_DETECTION,
+ CMD_EXIT,
+
+ };
+
+ enum CommandThreadStatus{
+ CMD_STATUS_RUN ,
+ CMD_STATUS_STOP ,
+
+ };
+
+ enum CommandStatus{
+ CMD_PREVIEW_START_PREPARE = 0x01,
+ CMD_PREVIEW_START_DONE = 0x02,
+ CMD_PREVIEW_START_MASK = 0x03,
+
+ CMD_PREVIEW_STOP_PREPARE = 0x04,
+ CMD_PREVIEW_STOP_DONE = 0x08,
+ CMD_PREVIEW_STOP_MASK = 0x0C,
+
+ CMD_CONTINUOS_PICTURE_PREPARE = 0x10,
+ CMD_CONTINUOS_PICTURE_DONE = 0x20,
+ CMD_CONTINUOS_PICTURE_MASK = 0x30,
+
+ CMD_PREVIEW_CAPTURE_CANCEL_PREPARE = 0x40,
+ CMD_PREVIEW_CAPTURE_CANCEL_DONE = 0x80,
+ CMD_PREVIEW_CAPTURE_CANCEL_MASK = 0xC0,
+
+ CMD_AF_START_PREPARE = 0x100,
+ CMD_AF_START_DONE = 0x200,
+ CMD_AF_START_MASK = 0x300,
+
+ CMD_AF_CANCEL_PREPARE = 0x400,
+ CMD_AF_CANCEL_DONE = 0x800,
+ CMD_AF_CANCEL_MASK = 0xC00,
+
+ CMD_SET_PREVIEW_WINDOW_PREPARE = 0x1000,
+ CMD_SET_PREVIEW_WINDOW_DONE = 0x2000,
+ CMD_SET_PREVIEW_WINDOW_MASK = 0x3000,
+
+ CMD_SET_PARAMETERS_PREPARE = 0x4000,
+ CMD_SET_PARAMETERS_DONE = 0x8000,
+ CMD_SET_PARAMETERS_MASK = 0xC000,
+
+ CMD_EXIT_PREPARE = 0x10000,
+ CMD_EXIT_DONE = 0x20000,
+ CMD_EXIT_MASK = 0x30000,
+
+ STA_RECORD_RUNNING = 0x40000,
+ STA_PREVIEW_CMD_RECEIVED = 0x80000,
+
+ STA_DISPLAY_RUNNING = 0x100000,
+ STA_DISPLAY_PAUSE = 0x200000,
+ STA_DISPLAY_STOP = 0x400000,
+ STA_DISPLAY_MASK = 0x700000,
+ };
+
+ class CommandThread : public Thread {
+ CameraHal* mHardware;
+ public:
+ CommandThread(CameraHal* hw)
+ : Thread(false), mHardware(hw) { }
+
+ virtual bool threadLoop() {
+ mHardware->commandThread();
+
+ return false;
+ }
+ };
+
+ friend class CommandThread;
+ void commandThread();
+ sp<CommandThread> mCommandThread;
+ MessageQueue commandThreadCommandQ;
+ int mCommandRunning;
+
+ void updateParameters(CameraParameters & tmpPara);
+ CameraParameters mParameters;
+
+
+ mutable Mutex mLock; // API lock -- all public methods
+ // bool mRecordRunning;
+ // bool mPreviewCmdReceived;
+ int mCamFd;
+ unsigned int mCameraStatus;
+ int mCamId;
+ sp<SensorListener> mSensorListener;
+};
+
+}
+
+#endif
+
+
diff --git a/CameraHal/CameraHalUtil.cpp b/CameraHal/CameraHalUtil.cpp
new file mode 100755
index 0000000..9241568
--- /dev/null
+++ b/CameraHal/CameraHalUtil.cpp
@@ -0,0 +1,1697 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <sys/ioctl.h>
+#include <utils/Log.h>
+#include <linux/videodev2.h>
+#include <binder/IPCThreadState.h>
+#include "CameraHal.h"
+
+
+#include <camera/CameraParameters.h>
+
+#if defined(TARGET_RK30) && (defined(TARGET_BOARD_PLATFORM_RK30XX) || (defined(TARGET_BOARD_PLATFORM_RK2928)))
+#include "../libgralloc/gralloc_priv.h"
+#if (CONFIG_CAMERA_INVALIDATE_RGA==0)
+#include <hardware/rga.h>
+#endif
+#elif defined(TARGET_RK30) && defined(TARGET_BOARD_PLATFORM_RK30XXB)
+#include <hardware/hal_public.h>
+#include <hardware/rga.h>
+#elif defined(TARGET_RK3368) || defined(TARGET_RK3288)
+#include <hardware/img_gralloc_public.h>
+#include <hardware/rga.h>
+#elif defined(TARGET_RK29)
+#include "../libgralloc/gralloc_priv.h"
+#endif
+
+#include "../jpeghw/release/encode_release/rk29-ipp.h"
+#include <utils/CallStack.h>
+#if defined(RK_DRM_GRALLOC)
+#if defined(TARGET_RK3368)
+#include <hardware/gralloc.h>
+#else
+#include <gralloc_drm.h>
+#endif
+#endif
+
+#define MIN(x,y) ((x<y) ? x: y)
+#define MAX(x,y) ((x>y) ? x: y)
+
+#ifdef ALOGD
+#define LOGD ALOGD
+#endif
+#ifdef ALOGV
+#define LOGV ALOGV
+#endif
+#ifdef ALOGE
+#define LOGE ALOGE
+#endif
+#ifdef ALOGI
+#define LOGI ALOGI
+#endif
+
+#define CAMERA_DISPLAY_FORMAT_NV12 "nv12"
+
+
+extern "C" int cameraFormatConvert(int v4l2_fmt_src, int v4l2_fmt_dst, const char *android_fmt_dst,
+ char *srcbuf, char *dstbuf,long srcphy,long dstphy,int src_size,
+ int src_w, int src_h, int srcbuf_w,
+ int dst_w, int dst_h, int dstbuf_w,
+ bool mirror);
+
+
+extern "C" int getCallingPid() {
+ return android::IPCThreadState::self()->getCallingPid();
+}
+
+
+extern "C" void callStack(){
+ android::CallStack stack;
+ stack.update();
+ stack.log("CameraHal");
+
+}
+extern "C" char* getCallingProcess()
+{
+ int fp = -1;
+ char cameraCallProcess[100];
+ memset(cameraCallProcess,0x00,sizeof(cameraCallProcess));
+ sprintf(cameraCallProcess,"/proc/%d/cmdline",getCallingPid());
+
+ fp = open(cameraCallProcess, O_RDONLY);
+
+ if (fp < 0) {
+ char value[PROPERTY_VALUE_MAX];
+ property_get("sys.camera.callprocess", value, "none");
+ memset(cameraCallProcess,0x00,sizeof(cameraCallProcess));
+ strcpy(cameraCallProcess, value);
+ if (!strcmp("none", value))
+ LOGE("Obtain calling process info failed");
+ else
+ LOGD("Calling process from sys.camera.callprocess is: %s", cameraCallProcess);
+ }
+ else {
+ memset(cameraCallProcess,0x00,sizeof(cameraCallProcess));
+ read(fp, cameraCallProcess, 99);
+ close(fp);
+ fp = -1;
+ LOGD("Calling process is: %s",cameraCallProcess);
+ }
+ return cameraCallProcess;
+
+}
+
+static float hwcGetBytesPerPixelForRga(int fmt)
+{
+ float bpp = 4;
+ switch(fmt){
+ case RK_FORMAT_RGB_565:
+ bpp = 2;
+ break;
+ case RK_FORMAT_RGB_888:
+ bpp = 3;
+ break;
+ case RK_FORMAT_RGBA_8888:
+ case RK_FORMAT_RGBX_8888:
+ case RK_FORMAT_BGRA_8888:
+ bpp = 4;
+ break;
+ case RK_FORMAT_YCbCr_420_SP:
+ case RK_FORMAT_YCrCb_420_SP:
+ bpp = 1.5;
+ break;
+ default:
+ break;
+ }
+ return bpp;
+}
+
+extern "C" int cameraPixFmt2HalPixFmt(const char *fmt)
+{
+ int hal_pixel_format=HAL_PIXEL_FORMAT_YCrCb_NV12;
+
+ if (strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_RGB_565;
+ } else if (strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_NV12;
+ }else if(strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0){
+ hal_pixel_format = HAL_PIXEL_FORMAT_YV12;
+ }else if (strcmp(fmt,CAMERA_DISPLAY_FORMAT_NV12) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_NV12;
+ } else if (strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_YUV422SP) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_YCbCr_422_SP;
+ } else {
+ hal_pixel_format = -EINVAL;
+ LOGE("%s(%d): pixel format %s is unknow!",__FUNCTION__,__LINE__,fmt);
+ }
+
+ return hal_pixel_format;
+}
+extern "C" void arm__scale_crop_nv12torgb565(int srcW, int srcH,int dstW,int dstH, char *srcbuf, short int *dstbuf)
+{
+#if 0
+ int line, col;
+ int uv_src_w = srcW/2;
+ int uv_src_h = srcH/2;
+ int src_w = srcW,src_h = srcH;
+ int dst_w = dstW,dst_h = dstH;
+ int ratio,cropW,cropH,left_offset,top_offset;
+ char* src,*psY,*psUV,*dst,*pdUV,*pdY;
+ long zoomindstxIntInv,zoomindstyIntInv;
+ long sX,sY,sY_UV;
+
+ long yCoeff00_y,yCoeff01_y,xCoeff00_y,xCoeff01_y;
+ long yCoeff00_uv,yCoeff01_uv,xCoeff00_uv,xCoeff01_uv;
+ long r0,r1,a,b,c,d,ry,ru,rv;
+
+ int y, u, v, yy, vr, ug, vg, ub,r,g,b1;
+
+
+
+ src = psY = (unsigned char*)(srcbuf)+top_offset*src_w+left_offset;
+ //psUV = psY +src_w*src_h+top_offset*src_w/2+left_offset;
+ psUV = (unsigned char*)(srcbuf) +src_w*src_h+top_offset*src_w/2+left_offset;
+
+
+
+
+ dst = pdY = (unsigned char*)dstbuf;
+ pdUV = pdY + dst_w*dst_h;
+
+ //need crop ?
+ if((src_w*100/src_h) != (dst_w*100/dst_h)){
+ ratio = ((src_w*100/dst_w) >= (src_h*100/dst_h))?(src_h*100/dst_h):(src_w*100/dst_w);
+ cropW = ratio*dst_w/100;
+ cropH = ratio*dst_h/100;
+
+ left_offset=((src_w-cropW)>>1) & (~0x01);
+ top_offset=((src_h-cropH)>>1) & (~0x01);
+ }else{
+ cropW = src_w;
+ cropH = src_h;
+ top_offset=0;
+ left_offset=0;
+ }
+
+ zoomindstxIntInv = ((unsigned long)(cropW)<<16)/dstW + 1;
+ zoomindstyIntInv = ((unsigned long)(cropH)<<16)/dstH + 1;
+
+ for (line = 0; line < dstH; line++) {
+ yCoeff00_y = (line*zoomindstyIntInv)&0xffff;
+ yCoeff01_y = 0xffff - yCoeff00_y;
+ sY = (line*zoomindstyIntInv >> 16);
+ sY = (sY >= srcH - 1)? (srcH - 2) : sY;
+
+ sY_UV = = ((line/2)*zoomindstyIntInv >> 16);
+ sY_UV = (sY_UV >= uv_src_h - 1)? (uv_src_h - 2) : sY_UV;
+ for (col = 0; col < dstW; col++) {
+
+
+ //get y
+ xCoeff00_y = (col*zoomindstxIntInv)&0xffff;
+ xCoeff01_y = 0xffff - xCoeff00_y;
+ sX = (col*zoomindstxIntInv >> 16);
+ sX = (sX >= srcW -1)?(srcW- 2) : sX;
+ a = psY[sY*srcW + sX];
+ b = psY[sY*srcW + sX + 1];
+ c = psY[(sY+1)*srcW + sX];
+ d = psY[(sY+1)*srcW + sX + 1];
+
+ r0 = (a * xCoeff01_y + b * xCoeff00_y)>>16 ;
+ r1 = (c * xCoeff01_y + d * xCoeff00_y)>>16 ;
+ ry = (r0 * yCoeff01_y + r1 * yCoeff00_y)>>16;
+
+ //get u & v
+ xCoeff00_uv = ((col/2)*zoomindstxIntInv)&0xffff;
+ xCoeff01_uv = 0xffff - xCoeff00_uv;
+ sX = ((col/2)*zoomindstxIntInv >> 16);
+ sX = (sX >= uv_src_w -1)?(uv_src_w- 2) : sX;
+ //U
+ a = psUV[(sY*uv_src_w + sX)*2];
+ b = psUV[(sY*uv_src_w + sX + 1)*2];
+ c = psUV[((sY+1)*uv_src_w + sX)*2];
+ d = psUV[((sY+1)*uv_src_w + sX + 1)*2];
+
+ r0 = (a * xCoeff01_uv + b * xCoeff00_uv)>>16 ;
+ r1 = (c * xCoeff01_uv + d * xCoeff00_uv)>>16 ;
+ ru = (r0 * yCoeff01_uv + r1 * yCoeff00_uv)>>16;
+
+ //v
+
+ a = psUV[(sY*uv_src_w + sX)*2 + 1];
+ b = psUV[(sY*uv_src_w + sX + 1)*2 + 1];
+ c = psUV[((sY+1)*uv_src_w + sX)*2 + 1];
+ d = psUV[((sY+1)*uv_src_w + sX + 1)*2 + 1];
+
+ r0 = (a * xCoeff01_uv + b * xCoeff00_uv)>>16 ;
+ r1 = (c * xCoeff01_uv + d * xCoeff00_uv)>>16 ;
+ rv = (r0 * yCoeff01_uv + r1 * yCoeff00_uv)>>16;
+
+
+
+ yy = ry << 8;
+ u = ru - 128;
+ ug = 88 * u;
+ ub = 454 * u;
+ v = rv - 128;
+ vg = 183 * v;
+ vr = 359 * v;
+
+ r = (yy + vr) >> 8;
+ g = (yy - ug - vg) >> 8;
+ b = (yy + ub ) >> 8;
+ if (r < 0) r = 0;
+ if (r > 255) r = 255;
+ if (g < 0) g = 0;
+ if (g > 255) g = 255;
+ if (b < 0) b = 0;
+ if (b > 255) b = 255;
+
+
+ }
+ }
+#endif
+}
+
+
+extern "C" void arm_nv12torgb565(int width, int height, char *src, short int *dst,int dstbuf_w)
+{
+ int line, col;
+ int y, u, v, yy, vr, ug, vg, ub;
+ int r, g, b;
+ char *py, *pu, *pv;
+
+ py = src;
+ pu = py + (width * height);
+ pv = pu + 1;
+ y = *py++;
+ yy = y << 8;
+ u = *pu - 128;
+ ug = 88 * u;
+ ub = 454 * u;
+ v = *pv - 128;
+ vg = 183 * v;
+ vr = 359 * v;
+
+ for (line = 0; line < height; line++) {
+ for (col = 0; col < width; col++) {
+ r = (yy + vr) >> 8;
+ g = (yy - ug - vg) >> 8;
+ b = (yy + ub ) >> 8;
+ if (r < 0) r = 0;
+ if (r > 255) r = 255;
+ if (g < 0) g = 0;
+ if (g > 255) g = 255;
+ if (b < 0) b = 0;
+ if (b > 255) b = 255;
+
+ *dst++ = (((__u16)r>>3)<<11) | (((__u16)g>>2)<<5) | (((__u16)b>>3)<<0);
+
+ y = *py++;
+ yy = y << 8;
+ if (col & 1) {
+ pu += 2;
+ pv = pu+1;
+ u = *pu - 128;
+ ug = 88 * u;
+ ub = 454 * u;
+ v = *pv - 128;
+ vg = 183 * v;
+ vr = 359 * v;
+ }
+ }
+ dst += dstbuf_w - width;
+ if ((line & 1) == 0) {
+ //even line: rewind
+ pu -= width;
+ pv = pu+1;
+ }
+ }
+}
+
+
+
+
+extern "C" int rga_nv12torgb565(int src_width, int src_height, char *src, short int *dst, int dstbuf_width,int dst_width,int dst_height)
+{
+ int rgafd = -1,ret = -1;
+
+ //LOGD("rga_nv12torgb565: (%dx%d)->(%dx%d),src_buf = 0x%x,dst_buf = 0x%x",src_width,src_height,dst_width,dst_height,src,dst);
+
+#ifdef TARGET_RK30
+
+ if((rgafd = open("/dev/rga",O_RDWR)) < 0) {
+ LOGE("%s(%d):open rga device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ return ret;
+ }
+
+#if (CONFIG_CAMERA_INVALIDATE_RGA==0)
+ struct rga_req Rga_Request;
+ int err = 0;
+
+ memset(&Rga_Request,0x0,sizeof(Rga_Request));
+
+ unsigned char *psY, *psUV;
+ int srcW,srcH,cropW,cropH;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+//need crop ?
+ if((src_width*100/src_height) != (dst_width*100/dst_height)){
+ ratio = ((src_width*100/dst_width) >= (src_height*100/dst_height))?(src_height*100/dst_height):(src_width*100/dst_width);
+ cropW = ratio*dst_width/100;
+ cropH = ratio*dst_height/100;
+
+ left_offset=((src_width-cropW)>>1) & (~0x01);
+ top_offset=((src_height-cropH)>>1) & (~0x01);
+ }else{
+ cropW = src_width;
+ cropH = src_height;
+ top_offset=0;
+ left_offset=0;
+ }
+
+ psY = (unsigned char*)(src)+top_offset*src_width+left_offset;
+ psUV = (unsigned char*)(src) +src_width*src_height+top_offset*src_width/2+left_offset;
+
+ Rga_Request.src.yrgb_addr = (long)psY;
+ Rga_Request.src.uv_addr = (long)psUV;
+ Rga_Request.src.v_addr = Rga_Request.src.uv_addr;
+ Rga_Request.src.vir_w = src_width;
+ Rga_Request.src.vir_h = src_height;
+ Rga_Request.src.format = RK_FORMAT_YCbCr_420_SP;
+ Rga_Request.src.act_w = cropW;
+ Rga_Request.src.act_h = cropH;
+ Rga_Request.src.x_offset = 0;
+ Rga_Request.src.y_offset = 0;
+
+ Rga_Request.dst.yrgb_addr = (long)dst;
+ Rga_Request.dst.uv_addr = 0;
+ Rga_Request.dst.v_addr = 0;
+ Rga_Request.dst.vir_w = dstbuf_width;
+ Rga_Request.dst.vir_h = dst_height;
+ Rga_Request.dst.format = RK_FORMAT_RGB_565;
+ Rga_Request.clip.xmin = 0;
+ Rga_Request.clip.xmax = dst_width - 1;
+ Rga_Request.clip.ymin = 0;
+ Rga_Request.clip.ymax = dst_height - 1;
+ Rga_Request.dst.act_w = dst_width;
+ Rga_Request.dst.act_h = dst_height;
+ Rga_Request.dst.x_offset = 0;
+ Rga_Request.dst.y_offset = 0;
+ Rga_Request.mmu_info.mmu_en = 1;
+ Rga_Request.mmu_info.mmu_flag = ((2 & 0x3) << 4) | 1;
+ if (hwcGetBytesPerPixelForRga(Rga_Request.src.format) !=
+ hwcGetBytesPerPixelForRga(Rga_Request.dst.format)){/*avoid dither: dalon.zhang@rock-chips.com*/
+ Rga_Request.alpha_rop_flag |= (1 << 5); /* ddl@rock-chips.com: v0.4.3 */
+ }
+
+ if((src_width!=dst_width) || ( src_height!=dst_height)){
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0x10000;
+ Rga_Request.rotate_mode = 1;
+ Rga_Request.scale_mode = 1;
+ }else{
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0;
+ Rga_Request.rotate_mode = 0;
+ Rga_Request.scale_mode = 0;
+ }
+
+ if(ioctl(rgafd, RGA_BLIT_SYNC, &Rga_Request) != 0) {
+ LOGE("%s(%d): RGA_BLIT_ASYNC Failed ,err: %s", __FUNCTION__, __LINE__,strerror(errno));
+ err = -1;
+ }
+ return err;
+#else
+ LOGE("%s(%d): RGA is invalidate!",__FUNCTION__, __LINE__);
+ return 0;
+#endif
+#else
+ LOGE("%s(%d): rk29 havn't RGA device in chips!!",__FUNCTION__, __LINE__);
+ return -1;
+#endif
+}
+
+extern "C" int arm_camera_yuv420_scale_arm(int v4l2_fmt_src, int v4l2_fmt_dst,
+ char *srcbuf, char *dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool mirror,int zoom_val);
+
+extern "C" int util_get_gralloc_buf_fd(buffer_handle_t handle,int* fd){
+ int err = 0;
+#if defined(RK_DRM_GRALLOC)
+ gralloc_module_t *gralloc_module = NULL;
+ hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&gralloc_module);
+ if (!gralloc_module)
+ ALOGE("error gralloc_module !!!!!");
+ int mem_fd = -1;
+ err = gralloc_module->perform(
+ gralloc_module,
+ GRALLOC_MODULE_PERFORM_GET_HADNLE_PRIME_FD,
+ handle, fd);
+ if (err)
+ LOGE("%s:get buffer fd error %d!",__func__,err);
+#else
+ err = -1;
+ LOGE("%s:not support !",__func__);
+#endif
+ return err;
+}
+
+
+#if defined(RK_DRM_GRALLOC)
+#include <RockchipRga.h>
+
+extern "C" int rga_nv12_scale_crop(
+ int src_width, int src_height, char *src_fd, short int *dst_fd,
+ int dst_width,int dst_height,int zoom_val,bool mirror,
+ bool isNeedCrop,bool isDstNV21,int dst_stride,bool vir_addr)
+{
+ int ret = 0;
+ rga_info_t src,dst;
+ int src_cropW,src_cropH,dst_cropW,dst_cropH,zoom_cropW,zoom_cropH;
+ int ratio = 0;
+ int src_top_offset=0,src_left_offset=0,dst_top_offset=0,dst_left_offset=0,zoom_top_offset=0,zoom_left_offset=0;
+
+ RockchipRga& rkRga(RockchipRga::get());
+
+ memset(&src, 0, sizeof(rga_info_t));
+ if (vir_addr) {
+ src.fd = -1;
+ src.virAddr = (void*)src_fd;
+ } else
+ src.fd = (unsigned long)src_fd;
+ src.mmuFlag = ((2 & 0x3) << 4) | 1 | (1 << 8) | (1 << 10);
+ memset(&dst, 0, sizeof(rga_info_t));
+ if (vir_addr) {
+ dst.fd = -1;
+ dst.virAddr = (void*)dst_fd;
+ } else
+ dst.fd = (unsigned long)dst_fd;
+ dst.mmuFlag = ((2 & 0x3) << 4) | 1 | (1 << 8) | (1 << 10);
+
+ //src.hnd = NULL;
+ //dst.hnd = NULL;
+
+ /*has something wrong with rga of rk312x mirror operation*/
+#if defined(TARGET_RK312x)
+ if(mirror){
+ LOGE("%s:rk312x rga not support mirror",__func__);
+ ret = -1;
+ goto failed;
+ }
+#endif
+ /*rk3188 do not support yuv to yuv scale by rga*/
+#if defined(TARGET_RK3188)
+ {
+ LOGE("%s:rk3188 rga not yuv scale",__func__);
+ ret = -1;
+ goto failed;
+ }
+#endif
+
+ if((dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H)){
+ LOGE("%s(%d):(dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H), switch to arm ",__FUNCTION__,__LINE__);
+ ret = -1;
+ goto failed;
+ }
+
+ //2) use rga
+
+ //need crop ? when cts FOV,don't crop
+ if(isNeedCrop && (src_width*100/src_height) != (dst_width*100/dst_height)){
+ ratio = ((src_width*100/dst_width) >= (src_height*100/dst_height))?(src_height*100/dst_height):(src_width*100/dst_width);
+ zoom_cropW = ratio*dst_width/100;
+ zoom_cropH = ratio*dst_height/100;
+
+ zoom_left_offset=((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset=((src_height-zoom_cropH)>>1) & (~0x01);
+ }else{
+ zoom_cropW = src_width;
+ zoom_cropH = src_height;
+ zoom_left_offset=0;
+ zoom_top_offset=0;
+ }
+
+ if(zoom_val > 100){
+ zoom_cropW = zoom_cropW*100/zoom_val;
+ zoom_cropH = zoom_cropH*100/zoom_val;
+ zoom_left_offset = ((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset= ((src_height-zoom_cropH)>>1) & (~0x01);
+ }
+
+ rga_set_rect(&src.rect, zoom_left_offset,zoom_top_offset,
+ zoom_cropW,zoom_cropH,src_width,src_height,HAL_PIXEL_FORMAT_YCrCb_NV12);
+ if (isDstNV21)
+ rga_set_rect(&dst.rect, 0,0,dst_width,dst_height,
+ dst_stride ? dst_stride : dst_width,
+ dst_height,HAL_PIXEL_FORMAT_YCrCb_420_SP);
+ else
+ rga_set_rect(&dst.rect, 0,0,dst_width,dst_height,
+ dst_stride ? dst_stride : dst_width,
+ dst_height,HAL_PIXEL_FORMAT_YCrCb_NV12);
+ if (mirror)
+ src.rotation = DRM_RGA_TRANSFORM_FLIP_H;
+ //TODO:sina,cosa,scale_mode,render_mode
+ ret = rkRga.RkRgaBlit(&src, &dst, NULL);
+ if (ret) {
+ LOGE("%s:rga blit failed",__func__);
+ goto failed;
+ }
+
+ return ret;
+failed:
+ return ret;
+}
+#else
+extern "C" int rga_nv12_scale_crop(int src_width, int src_height, char *src, short int *dst,
+ int dst_width,int dst_height,int zoom_val,bool mirror,bool isNeedCrop,bool isDstNV21)
+{
+ int rgafd = -1,ret = -1;
+ int scale_times_w = 0,scale_times_h = 0,h = 0,w = 0;
+ struct rga_req Rga_Request;
+ int err = 0;
+ unsigned char *psY, *psUV;
+ int src_cropW,src_cropH,dst_cropW,dst_cropH,zoom_cropW,zoom_cropH;
+ int ratio = 0;
+ int src_top_offset=0,src_left_offset=0,dst_top_offset=0,dst_left_offset=0,zoom_top_offset=0,zoom_left_offset=0;
+
+ /*has something wrong with rga of rk312x mirror operation*/
+ #if defined(TARGET_RK312x)
+ if(mirror){
+ return arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, (isDstNV21 ? V4L2_PIX_FMT_NV21:V4L2_PIX_FMT_NV12),
+ src, (char *)dst,src_width, src_height,dst_width, dst_height,mirror,zoom_val);
+ }
+ #endif
+ /*rk3188 do not support yuv to yuv scale by rga*/
+ #if defined(TARGET_RK3188)
+ return arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, (isDstNV21 ? V4L2_PIX_FMT_NV21:V4L2_PIX_FMT_NV12),
+ src, (char *)dst,src_width, src_height,dst_width, dst_height,mirror,zoom_val);
+ #endif
+
+ if((dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H)){
+ LOGE("%s(%d):(dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H), switch to arm ",__FUNCTION__,__LINE__);
+
+ return arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, (isDstNV21 ? V4L2_PIX_FMT_NV21:V4L2_PIX_FMT_NV12),
+ src, (char *)dst,src_width, src_height,dst_width, dst_height,mirror,zoom_val);
+ }
+
+ //need crop ? when cts FOV,don't crop
+ if(isNeedCrop && (src_width*100/src_height) != (dst_width*100/dst_height)){
+ ratio = ((src_width*100/dst_width) >= (src_height*100/dst_height))?(src_height*100/dst_height):(src_width*100/dst_width);
+ zoom_cropW = ratio*dst_width/100;
+ zoom_cropH = ratio*dst_height/100;
+
+ zoom_left_offset=((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset=((src_height-zoom_cropH)>>1) & (~0x01);
+ }else{
+ zoom_cropW = src_width;
+ zoom_cropH = src_height;
+ zoom_left_offset=0;
+ zoom_top_offset=0;
+ }
+
+ if(zoom_val > 100){
+ zoom_cropW = zoom_cropW*100/zoom_val;
+ zoom_cropH = zoom_cropH*100/zoom_val;
+ zoom_left_offset = ((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset= ((src_height-zoom_cropH)>>1) & (~0x01);
+ }
+
+
+ if(dst_width > RGA_ACTIVE_W){
+ scale_times_w = (dst_width/RGA_ACTIVE_W);
+ scale_times_w++;
+ }else{
+ scale_times_w = 1;
+ }
+ if(dst_height > RGA_ACTIVE_H){
+ scale_times_h = (dst_height/RGA_ACTIVE_H);
+ scale_times_h++;
+ } else {
+ scale_times_h = 1;
+ }
+ if((rgafd = open("/dev/rga",O_RDWR)) < 0) {
+ LOGE("%s(%d):open rga device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ return ret;
+ }
+
+ src_cropW = zoom_cropW/scale_times_w;
+ src_cropH = zoom_cropH/scale_times_h;
+
+ dst_cropW = dst_width/scale_times_w;
+ dst_cropH = dst_height/scale_times_h;
+
+ for(h = 0; h< scale_times_h; h++){
+ for(w = 0; w< scale_times_w; w++){
+ memset(&Rga_Request,0x0,sizeof(Rga_Request));
+
+ src_left_offset = zoom_left_offset + w*src_cropW;
+ src_top_offset = zoom_top_offset + h*src_cropH;
+
+ dst_left_offset = w*dst_cropW;
+ dst_top_offset = h*dst_cropH;
+
+ psY = (unsigned char*)(src);
+
+ #if defined(TARGET_RK3188)
+ Rga_Request.src.yrgb_addr = (long)psY;
+ Rga_Request.src.uv_addr = (long)psY + src_width * src_height;
+ #else
+ Rga_Request.src.yrgb_addr = 0;
+ Rga_Request.src.uv_addr = (long)psY;
+ #endif
+ Rga_Request.src.v_addr = 0;
+ Rga_Request.src.vir_w = src_width;
+ Rga_Request.src.vir_h = src_height;
+ Rga_Request.src.format = RK_FORMAT_YCbCr_420_SP;
+ Rga_Request.src.act_w = src_cropW & (~0x01);
+ Rga_Request.src.act_h = src_cropH & (~0x01);
+#if defined(TARGET_RK3368)
+ Rga_Request.src.x_offset = src_left_offset & (~0x1f);//32 alignment,rga's bug
+ Rga_Request.src.y_offset = src_top_offset & (~0xf);
+#else
+ Rga_Request.src.x_offset = src_left_offset & (~0x01);
+ Rga_Request.src.y_offset = src_top_offset & (~0x01);
+#endif
+ #if defined(TARGET_RK3188)
+ Rga_Request.dst.yrgb_addr = (long)dst;
+ Rga_Request.dst.uv_addr = (long)dst + dst_width*dst_height;
+ #else
+ Rga_Request.dst.yrgb_addr = 0;
+ Rga_Request.dst.uv_addr = (long)dst;
+ #endif
+ Rga_Request.dst.v_addr = 0;
+ Rga_Request.dst.vir_w = dst_width;
+ Rga_Request.dst.vir_h = dst_height;
+ if(isDstNV21)
+ Rga_Request.dst.format = RK_FORMAT_YCrCb_420_SP;
+ else
+ Rga_Request.dst.format = RK_FORMAT_YCbCr_420_SP;
+ Rga_Request.clip.xmin = 0;
+ Rga_Request.clip.xmax = dst_width - 1;
+ Rga_Request.clip.ymin = 0;
+ Rga_Request.clip.ymax = dst_height - 1;
+ Rga_Request.dst.act_w = dst_cropW;
+ Rga_Request.dst.act_h = dst_cropH;
+ Rga_Request.dst.x_offset = dst_left_offset;
+ Rga_Request.dst.y_offset = dst_top_offset;
+
+ Rga_Request.mmu_info.mmu_en = 1;
+ Rga_Request.mmu_info.mmu_flag = ((2 & 0x3) << 4) | 1 | (1 << 8) | (1 << 10);
+ if (hwcGetBytesPerPixelForRga(Rga_Request.src.format) !=
+ hwcGetBytesPerPixelForRga(Rga_Request.dst.format)){/*avoid dither: dalon.zhang@rock-chips.com*/
+ Rga_Request.alpha_rop_flag |= (1 << 5); /* ddl@rock-chips.com: v0.4.3 */
+ }
+
+ #if defined(TARGET_RK312x)
+ /* wrong operation of nv12 to nv21 ,not scale */
+ if(1/*(cropW != dst_width) || ( cropH != dst_height)*/){
+ #else
+ if((src_cropW != dst_width) || ( src_cropH != dst_height)){
+ #endif
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0x10000;
+ Rga_Request.scale_mode = 1;
+ Rga_Request.rotate_mode = mirror ? 2:1;
+ }else{
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0;
+ Rga_Request.scale_mode = 0;
+ Rga_Request.rotate_mode = mirror ? 2:0;
+ Rga_Request.render_mode = pre_scaling_mode;
+ }
+
+
+ if(ioctl(rgafd, RGA_BLIT_SYNC, &Rga_Request) != 0) {
+ LOGE("%s(%d): RGA_BLIT_ASYNC Failed ,err: %s", __FUNCTION__, __LINE__,strerror(errno));
+ err = -1;
+ }
+ }
+ }
+ close(rgafd);
+
+ return err;
+}
+#endif
+
+extern "C" int arm_camera_yuv420_scale_arm(int v4l2_fmt_src, int v4l2_fmt_dst,
+ char *srcbuf, char *dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool mirror,int zoom_val)
+{
+ unsigned char *psY,*pdY,*psUV,*pdUV;
+ unsigned char *src,*dst;
+ int srcW,srcH,cropW,cropH,dstW,dstH;
+ long zoomindstxIntInv,zoomindstyIntInv;
+ long x,y;
+ long yCoeff00,yCoeff01,xCoeff00,xCoeff01;
+ long sX,sY;
+ long r0,r1,a,b,c,d;
+ int ret = 0;
+ bool nv21DstFmt = false;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+ if((v4l2_fmt_src != V4L2_PIX_FMT_NV12) ||
+ ((v4l2_fmt_dst != V4L2_PIX_FMT_NV12) && (v4l2_fmt_dst != V4L2_PIX_FMT_NV21) )){
+ LOGE("%s:%d,not suppport this format ",__FUNCTION__,__LINE__);
+ return -1;
+ }
+
+ //just copy ?
+ if((v4l2_fmt_src == v4l2_fmt_dst) && (mirror == false)
+ &&(src_w == dst_w) && (src_h == dst_h) && (zoom_val == 100)){
+ memcpy(dstbuf,srcbuf,src_w*src_h*3/2);
+ return 0;
+ }else if((v4l2_fmt_dst == V4L2_PIX_FMT_NV21)
+ && (src_w == dst_w) && (src_h == dst_h)
+ && (mirror == false) && (zoom_val == 100)){
+ //just convert fmt
+
+ cameraFormatConvert(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV21, NULL,
+ srcbuf, dstbuf,0,0,src_w*src_h*3/2,
+ src_w, src_h,src_w,
+ dst_w, dst_h,dst_w,
+ mirror);
+ return 0;
+
+ }
+
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21)){
+ nv21DstFmt = true;
+
+ }
+
+ //need crop ?
+ if((src_w*100/src_h) != (dst_w*100/dst_h)){
+ ratio = ((src_w*100/dst_w) >= (src_h*100/dst_h))?(src_h*100/dst_h):(src_w*100/dst_w);
+ cropW = ratio*dst_w/100;
+ cropH = ratio*dst_h/100;
+
+ left_offset=((src_w-cropW)>>1) & (~0x01);
+ top_offset=((src_h-cropH)>>1) & (~0x01);
+ }else{
+ cropW = src_w;
+ cropH = src_h;
+ top_offset=0;
+ left_offset=0;
+ }
+
+ //zoom ?
+ if(zoom_val > 100){
+ cropW = cropW*100/zoom_val;
+ cropH = cropH*100/zoom_val;
+ left_offset=((src_w-cropW)>>1) & (~0x01);
+ top_offset=((src_h-cropH)>>1) & (~0x01);
+ }
+
+ src = psY = (unsigned char*)(srcbuf)+top_offset*src_w+left_offset;
+ //psUV = psY +src_w*src_h+top_offset*src_w/2+left_offset;
+ psUV = (unsigned char*)(srcbuf) +src_w*src_h+top_offset*src_w/2+left_offset;
+
+
+ srcW =src_w;
+ srcH = src_h;
+// cropW = src_w;
+// cropH = src_h;
+
+
+ dst = pdY = (unsigned char*)dstbuf;
+ pdUV = pdY + dst_w*dst_h;
+ dstW = dst_w;
+ dstH = dst_h;
+
+ zoomindstxIntInv = ((unsigned long)(cropW)<<16)/dstW + 1;
+ zoomindstyIntInv = ((unsigned long)(cropH)<<16)/dstH + 1;
+ //y
+ //for(y = 0; y<dstH - 1 ; y++ ) {
+ for(y = 0; y<dstH; y++ ) {
+ yCoeff00 = (y*zoomindstyIntInv)&0xffff;
+ yCoeff01 = 0xffff - yCoeff00;
+ sY = (y*zoomindstyIntInv >> 16);
+ sY = (sY >= srcH - 1)? (srcH - 2) : sY;
+ for(x = 0; x<dstW; x++ ) {
+ xCoeff00 = (x*zoomindstxIntInv)&0xffff;
+ xCoeff01 = 0xffff - xCoeff00;
+ sX = (x*zoomindstxIntInv >> 16);
+ sX = (sX >= srcW -1)?(srcW- 2) : sX;
+ a = psY[sY*srcW + sX];
+ b = psY[sY*srcW + sX + 1];
+ c = psY[(sY+1)*srcW + sX];
+ d = psY[(sY+1)*srcW + sX + 1];
+
+ r0 = (a * xCoeff01 + b * xCoeff00)>>16 ;
+ r1 = (c * xCoeff01 + d * xCoeff00)>>16 ;
+ r0 = (r0 * yCoeff01 + r1 * yCoeff00)>>16;
+
+ if(mirror)
+ pdY[dstW -1 - x] = r0;
+ else
+ pdY[x] = r0;
+ }
+ pdY += dstW;
+ }
+
+ dstW /= 2;
+ dstH /= 2;
+ srcW /= 2;
+ srcH /= 2;
+
+ //UV
+ //for(y = 0; y<dstH - 1 ; y++ ) {
+ for(y = 0; y<dstH; y++ ) {
+ yCoeff00 = (y*zoomindstyIntInv)&0xffff;
+ yCoeff01 = 0xffff - yCoeff00;
+ sY = (y*zoomindstyIntInv >> 16);
+ sY = (sY >= srcH -1)? (srcH - 2) : sY;
+ for(x = 0; x<dstW; x++ ) {
+ xCoeff00 = (x*zoomindstxIntInv)&0xffff;
+ xCoeff01 = 0xffff - xCoeff00;
+ sX = (x*zoomindstxIntInv >> 16);
+ sX = (sX >= srcW -1)?(srcW- 2) : sX;
+ //U
+ a = psUV[(sY*srcW + sX)*2];
+ b = psUV[(sY*srcW + sX + 1)*2];
+ c = psUV[((sY+1)*srcW + sX)*2];
+ d = psUV[((sY+1)*srcW + sX + 1)*2];
+
+ r0 = (a * xCoeff01 + b * xCoeff00)>>16 ;
+ r1 = (c * xCoeff01 + d * xCoeff00)>>16 ;
+ r0 = (r0 * yCoeff01 + r1 * yCoeff00)>>16;
+
+ if(mirror && nv21DstFmt)
+ pdUV[dstW*2-1- (x*2)] = r0;
+ else if(mirror)
+ pdUV[dstW*2-1-(x*2+1)] = r0;
+ else if(nv21DstFmt)
+ pdUV[x*2 + 1] = r0;
+ else
+ pdUV[x*2] = r0;
+ //V
+ a = psUV[(sY*srcW + sX)*2 + 1];
+ b = psUV[(sY*srcW + sX + 1)*2 + 1];
+ c = psUV[((sY+1)*srcW + sX)*2 + 1];
+ d = psUV[((sY+1)*srcW + sX + 1)*2 + 1];
+
+ r0 = (a * xCoeff01 + b * xCoeff00)>>16 ;
+ r1 = (c * xCoeff01 + d * xCoeff00)>>16 ;
+ r0 = (r0 * yCoeff01 + r1 * yCoeff00)>>16;
+
+ if(mirror && nv21DstFmt)
+ pdUV[dstW*2-1- (x*2+1) ] = r0;
+ else if(mirror)
+ pdUV[dstW*2-1-(x*2)] = r0;
+ else if(nv21DstFmt)
+ pdUV[x*2] = r0;
+ else
+ pdUV[x*2 + 1] = r0;
+ }
+ pdUV += dstW*2;
+ }
+ return 0;
+}
+
+extern "C" int rk_camera_zoom_ipp(int v4l2_fmt_src, int srcbuf, int src_w, int src_h,int dstbuf,int zoom_value)
+{
+ int vipdata_base;
+
+ struct rk29_ipp_req ipp_req;
+ int src_y_offset,src_uv_offset,dst_y_offset,dst_uv_offset,src_y_size,dst_y_size;
+ int scale_w_times =0,scale_h_times = 0,w,h;
+ int ret = 0;
+ int ippFd = -1;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+ int cropW,cropH;
+
+
+ if((ippFd = open("/dev/rk29-ipp",O_RDWR)) < 0) {
+ LOGE("%s(%d):open /dev/rk29-ipp device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ goto do_ipp_err;
+ }
+
+ /*
+ *ddl@rock-chips.com:
+ * IPP Dest image resolution is 2047x1088, so scale operation break up some times
+ */
+ if ((src_w > 0x7f0) || (src_h > 0x430)) {
+ scale_w_times = ((src_w/0x7f0)>(src_h/0x430))?(src_w/0x7f0):(src_h/0x430);
+ scale_h_times = scale_w_times;
+ scale_w_times++;
+ scale_h_times++;
+ } else {
+ scale_w_times = 1;
+ scale_h_times = 1;
+ }
+ memset(&ipp_req, 0, sizeof(struct rk29_ipp_req));
+
+
+ //compute zoom
+ cropW = (src_w*100/zoom_value)& (~0x03);
+ cropH = (src_h*100/zoom_value)& (~0x03);
+ left_offset=MAX((((src_w-cropW)>>1)-1),0);
+ top_offset=MAX((((src_h-cropH)>>1)-1),0);
+ left_offset &= ~0x01;
+ top_offset &=~0x01;
+
+ ipp_req.timeout = 3000;
+ ipp_req.flag = IPP_ROT_0;
+ ipp_req.store_clip_mode =1;
+ ipp_req.src0.w = cropW/scale_w_times;
+ ipp_req.src0.h = cropH/scale_h_times;
+ ipp_req.src_vir_w = src_w;
+ ipp_req.src0.fmt = IPP_Y_CBCR_H2V2;
+ ipp_req.dst0.w = src_w/scale_w_times;
+ ipp_req.dst0.h = src_h/scale_h_times;
+ ipp_req.dst_vir_w = src_w;
+ ipp_req.dst0.fmt = IPP_Y_CBCR_H2V2;
+ vipdata_base = srcbuf;
+ src_y_size = src_w*src_h;
+ dst_y_size = src_w*src_h;
+
+ for (h=0; h<scale_h_times; h++) {
+ for (w=0; w<scale_w_times; w++) {
+ int ipp_times = 3;
+ src_y_offset = (top_offset + h*cropH/scale_h_times)* src_w
+ + left_offset + w*cropW/scale_w_times;
+ src_uv_offset = (top_offset + h*cropH/scale_h_times)* src_w/2
+ + left_offset + w*cropW/scale_w_times;
+
+ dst_y_offset = src_w*src_h*h/scale_h_times + src_w*w/scale_w_times;
+ dst_uv_offset = src_w*src_h*h/scale_h_times/2 + src_w*w/scale_w_times;
+
+ ipp_req.src0.YrgbMst = vipdata_base + src_y_offset;
+ ipp_req.src0.CbrMst = vipdata_base + src_y_size + src_uv_offset;
+ ipp_req.dst0.YrgbMst = dstbuf + dst_y_offset;
+ ipp_req.dst0.CbrMst = dstbuf + dst_y_size + dst_uv_offset;
+ while(ipp_times-- > 0) {
+ if (ioctl(ippFd,IPP_BLIT_SYNC,&ipp_req)){
+ LOGE("ipp do erro,do again,ipp_times = %d ,err: %s \n",ipp_times,strerror(errno));
+ } else {
+ break;
+ }
+ }
+ if (ipp_times <= 0) {
+ ret = -1;
+ goto do_ipp_err;
+ }
+ }
+ }
+
+do_ipp_err:
+ if(ippFd > 0)
+ close(ippFd);
+
+ return ret;
+}
+
+extern "C" int rk_camera_yuv_scale_crop_ipp(int v4l2_fmt_src, int v4l2_fmt_dst,
+ long srcbuf, long dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool rotation_180)
+{
+ long vipdata_base;
+
+ struct rk29_ipp_req ipp_req;
+ int src_y_offset,src_uv_offset,dst_y_offset,dst_uv_offset,src_y_size,dst_y_size;
+ int scale_w_times =0,scale_h_times = 0,w,h;
+ int ret = 0;
+ int ippFd = -1;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+ int cropW,cropH;
+
+
+ if((ippFd = open("/dev/rk29-ipp",O_RDWR)) < 0) {
+ LOGE("%s(%d):open /dev/rk29-ipp device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ goto do_ipp_err;
+ }
+
+
+ /*
+ *ddl@rock-chips.com:
+ * IPP Dest image resolution is 2047x1088, so scale operation break up some times
+ */
+ if ((dst_w > 0x7f0) || (dst_h > 0x430)) {
+ scale_w_times = ((dst_w/0x7f0)>(dst_h/0x430))?(dst_w/0x7f0):(dst_h/0x430);
+ scale_h_times = scale_w_times;
+ scale_w_times++;
+ scale_h_times++;
+ } else {
+ scale_w_times = 1;
+ scale_h_times = 1;
+ }
+ memset(&ipp_req, 0, sizeof(struct rk29_ipp_req));
+
+ //need crop ?
+ if((src_w*100/src_h) != (dst_w*100/dst_h)){
+ ratio = ((src_w*100/dst_w) >= (src_h*100/dst_h))?(src_h*100/dst_h):(src_w*100/dst_w);
+ cropW = (ratio*dst_w/100)& (~0x03);
+ cropH = (ratio*dst_h/100)& (~0x03);
+
+ left_offset=MAX((((src_w-cropW)>>1)-1),0);
+ top_offset=MAX((((src_h-cropH)>>1)-1),0);
+ left_offset &= ~0x01;
+ top_offset &=~0x01;
+ }else{
+ cropW = src_w;
+ cropH = src_h;
+ top_offset=0;
+ left_offset=0;
+ }
+#if 1
+ if((src_w == 2592) && (src_h == 1944) && (dst_w == 2592) && (dst_h == 1458)){
+ scale_w_times= 2;
+ scale_h_times = 3;
+ cropW = dst_w;
+ cropH = dst_h;
+
+ left_offset=0;
+ top_offset=242;
+ }
+#endif
+ ipp_req.timeout = 3000;
+ if(rotation_180)
+ ipp_req.flag = IPP_ROT_180;
+ else
+ ipp_req.flag = IPP_ROT_0;
+ ipp_req.store_clip_mode =1;
+ ipp_req.src0.w = cropW/scale_w_times;
+ ipp_req.src0.h = cropH/scale_h_times;
+ ipp_req.src_vir_w = src_w;
+ if(v4l2_fmt_src == V4L2_PIX_FMT_NV12)
+ ipp_req.src0.fmt = IPP_Y_CBCR_H2V2;
+ else if(v4l2_fmt_src == V4L2_PIX_FMT_NV21)
+ ipp_req.src0.fmt = IPP_Y_CBCR_H2V1;
+ ipp_req.dst0.w = dst_w/scale_w_times;
+ ipp_req.dst0.h = dst_h/scale_h_times;
+ ipp_req.dst_vir_w = dst_w;
+ if(v4l2_fmt_dst == V4L2_PIX_FMT_NV12)
+ ipp_req.dst0.fmt = IPP_Y_CBCR_H2V2;
+ else if(v4l2_fmt_dst == V4L2_PIX_FMT_NV21)
+ ipp_req.dst0.fmt = IPP_Y_CBCR_H2V1;
+ vipdata_base = srcbuf;
+ src_y_size = src_w*src_h;
+ dst_y_size = dst_w*dst_h;
+ for (h=0; h<scale_h_times; h++) {
+ for (w=0; w<scale_w_times; w++) {
+ int ipp_times = 3;
+ src_y_offset = (top_offset + h*cropH/scale_h_times)* src_w
+ + left_offset + w*cropW/scale_w_times;
+ src_uv_offset = (top_offset + h*cropH/scale_h_times)* src_w/2
+ + left_offset + w*cropW/scale_w_times;
+
+ if(rotation_180){
+ dst_y_offset = dst_w*dst_h*(scale_h_times-1-h)/scale_h_times + dst_w*(scale_w_times-1-w)/scale_w_times;
+ dst_uv_offset = dst_w*dst_h*(scale_h_times-1-h)/scale_h_times/2 + dst_w*(scale_w_times-1-w)/scale_w_times;
+ }
+ else{
+ dst_y_offset = dst_w*dst_h*h/scale_h_times + dst_w*w/scale_w_times;
+ dst_uv_offset = dst_w*dst_h*h/scale_h_times/2 + dst_w*w/scale_w_times;
+ }
+
+ ipp_req.src0.YrgbMst = vipdata_base + src_y_offset;
+ ipp_req.src0.CbrMst = vipdata_base + src_y_size + src_uv_offset;
+ ipp_req.dst0.YrgbMst = dstbuf + dst_y_offset;
+ ipp_req.dst0.CbrMst = dstbuf + dst_y_size + dst_uv_offset;
+ while(ipp_times-- > 0) {
+ if (ioctl(ippFd,IPP_BLIT_SYNC,&ipp_req)){
+ LOGE("ipp do erro,do again,ipp_times = %d ,err: %s \n",ipp_times,strerror(errno));
+ } else {
+ break;
+ }
+ }
+ if (ipp_times <= 0) {
+ ret = -1;
+ goto do_ipp_err;
+ }
+ }
+ }
+
+do_ipp_err:
+ if(ippFd > 0)
+ close(ippFd);
+ return ret;
+}
+
+extern "C" int YData_Mirror_Line(int v4l2_fmt_src, int *psrc, int *pdst, int w)
+{
+ int i;
+
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>24)&0x000000ff) | ((*psrc>>8)&0x0000ff00)
+ | ((*psrc<<8)&0x00ff0000) | ((*psrc<<24)&0xff000000);
+ psrc++;
+ pdst--;
+ }
+
+ return 0;
+}
+extern "C" int UVData_Mirror_Line(int v4l2_fmt_src, int *psrc, int *pdst, int w)
+{
+ int i;
+
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>16)&0x0000ffff) | ((*psrc<<16)&0xffff0000);
+ psrc++;
+ pdst--;
+ }
+
+ return 0;
+}
+extern "C" int YuvData_Mirror_Flip(int v4l2_fmt_src, char *pdata, char *pline_tmp, int w, int h)
+{
+ int *pdata_tmp = NULL;
+ int *ptop, *pbottom;
+ int err = 0,i,j;
+
+ pdata_tmp = (int*)pline_tmp;
+
+ // Y mirror and flip
+ ptop = (int*)pdata;
+ pbottom = (int*)(pdata+w*(h-1));
+ for (j=0; j<(h>>1); j++) {
+ YData_Mirror_Line(v4l2_fmt_src, ptop, pdata_tmp+((w>>2)-1),w);
+ YData_Mirror_Line(v4l2_fmt_src, pbottom, ptop+((w>>2)-1), w);
+ memcpy(pbottom, pdata_tmp, w);
+ ptop += (w>>2);
+ pbottom -= (w>>2);
+ }
+ // UV mirror and flip
+ ptop = (int*)(pdata+w*h);
+ pbottom = (int*)(pdata+w*(h*3/2-1));
+ for (j=0; j<(h>>2); j++) {
+ UVData_Mirror_Line(v4l2_fmt_src, ptop, pdata_tmp+((w>>2)-1),w);
+ UVData_Mirror_Line(v4l2_fmt_src, pbottom, ptop+((w>>2)-1), w);
+ memcpy(pbottom, pdata_tmp, w);
+ ptop += (w>>2);
+ pbottom -= (w>>2);
+ }
+YuvData_Mirror_Flip_end:
+ return err;
+}
+extern "C" int YUV420_rotate(const unsigned char* srcy, int src_stride, unsigned char* srcuv,
+ unsigned char* dsty, int dst_stride, unsigned char* dstuv,
+ int width, int height,int rotate_angle){
+ int i = 0,j = 0;
+ // 90 , y plane
+ if(rotate_angle == 90){
+ srcy += src_stride * (height - 1);
+ srcuv += src_stride * ((height >> 1)- 1);
+ src_stride = -src_stride;
+ }else if(rotate_angle == 270){
+ dsty += dst_stride * (width - 1);
+ dstuv += dst_stride * ((width>>1) - 1);
+ dst_stride = -dst_stride;
+ }
+
+ for (i = 0; i < width; ++i)
+ for (j = 0; j < height; ++j)
+ *(dsty+i * dst_stride + j) = *(srcy+j * src_stride + i);
+
+ //uv
+ unsigned char av_u0,av_v0;
+ for (i = 0; i < width; i += 2)
+ for (j = 0; j < (height>>1); ++j) {
+ av_u0 = *(srcuv+i + (j * src_stride));
+ av_v0 = *(srcuv+i + (j * src_stride)+1);
+ *(dstuv+((j<<1) + ((i >> 1) * dst_stride)))= av_u0;
+ *(dstuv+((j<<1) + ((i >> 1) * dst_stride)+1)) = av_v0;
+ }
+
+ return 0;
+ }
+
+ extern "C" int cameraFormatConvert(int v4l2_fmt_src, int v4l2_fmt_dst, const char *android_fmt_dst,
+ char *srcbuf, char *dstbuf,long srcphy,long dstphy,int src_size,
+ int src_w, int src_h, int srcbuf_w,
+ int dst_w, int dst_h, int dstbuf_w,
+ bool mirror)
+ {
+ int y_size,i,j;
+ int ret = -1;
+ /*
+ if (v4l2_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x,%dx%d)->'%c%c%c%c'@(0x%x,0x%x,%dx%d) ",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ (int)srcbuf, srcphy,src_w,src_h,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (int)dstbuf,dstphy,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x,%dx%d)->%s@(0x%x,0x%x,%dx%d)",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (int)srcbuf, srcphy,src_w,src_h,android_fmt_dst, (int)dstbuf,dstphy,
+ dst_w,dst_h);
+ }
+ */
+
+ y_size = src_w*src_h;
+ switch (v4l2_fmt_src)
+ {
+ case V4L2_PIX_FMT_YUV420:
+ {
+// if (CAMERA_IS_UVC_CAMERA()
+// || (CAMERA_IS_RKSOC_CAMERA() && (mCamDriverCapability.version != KERNEL_VERSION(0, 0, 1)))) {
+// goto cameraFormatConvert_default;
+// }
+ }
+ case V4L2_PIX_FMT_NV12:
+ {
+ int *dst_vu, *src_uv;
+
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV12) ||
+ (android_fmt_dst && (strcmp(android_fmt_dst,CAMERA_DISPLAY_FORMAT_NV12)==0))) {
+ if (dstbuf && (dstbuf != srcbuf)) {
+ if (dstbuf_w == dst_w) {
+ memcpy(dstbuf,srcbuf, y_size*3/2);
+ } else { /* ddl@rock-chips.com: v0.4.1 */
+ for (i=0;i<(dst_h*3/2);i++) {
+ memcpy(dstbuf,srcbuf, dst_w);
+ dstbuf += dstbuf_w;
+ srcbuf += srcbuf_w;
+ }
+ }
+ ret = 0;
+ }
+ } else if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21) ||
+ (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420SP)==0))) {
+ if ((src_w == dst_w) && (src_h == dst_h)) {
+ if (mirror == false) {
+ if (dstbuf != srcbuf)
+ memcpy(dstbuf,srcbuf, y_size);
+ src_uv = (int*)(srcbuf + y_size);
+ dst_vu = (int*)(dstbuf+y_size);
+ for (i=0; i<(y_size>>3); i++) {
+ *dst_vu = ((*src_uv&0x00ff00ff)<<8) | ((*src_uv&0xff00ff00)>>8);
+ dst_vu++;
+ src_uv++;
+ }
+ } else {
+ char *psrc,*pdst;
+ psrc = srcbuf;
+ pdst = dstbuf + dst_w-1;
+ for (i=0; i<src_h; i++) {
+ for (j=0; j<src_w; j++) {
+ *pdst-- = *psrc++;
+ }
+ pdst += 2*dst_w;
+ }
+
+ psrc = srcbuf + y_size;
+ pdst = dstbuf + y_size + dst_w-1;
+ for (i=0; i<src_h/2; i++) {
+ for (j=0; j<src_w; j++) {
+ *pdst-- = *psrc++;
+ }
+ pdst += 2*dst_w;
+ }
+ }
+ ret = 0;
+ } else {
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21) ||
+ (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420SP)==0))) {
+ int *dst_uv,*src_uv;
+ unsigned *dst_y,*src_y,*src_y1;
+ int a, b, c, d;
+ if ((src_w == dst_w*4) && (src_h == dst_h*4)) {
+ dst_y = (unsigned int*)dstbuf;
+ src_y = (unsigned int*)srcbuf;
+ src_y1= src_y + (src_w*3)/4;
+ for (i=0; i<dst_h; i++) {
+ for(j=0; j<dst_w/4; j++) {
+ a = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ a >>= 2;
+ src_y++;
+ src_y1++;
+ b = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ b >>= 2;
+ src_y++;
+ src_y1++;
+ c = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ c >>= 2;
+ src_y++;
+ src_y1++;
+ d = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ d >>= 2;
+ src_y++;
+ src_y1++;
+ *dst_y++ = a | (b<<8) | (c<<16) | (d<<24);
+ }
+ //dst_y = (int*)(srcbuf+src_w*(i+1));
+ src_y += (src_w*3)/4;
+ src_y1= src_y + (src_w*3)/4;
+ }
+ dst_uv = (int*)(dstbuf+dst_w*dst_h);
+ //dst_uv = (int*)(srcbuf+y_size);
+ src_uv = (int*)(srcbuf+y_size);
+ for (i=0; i<dst_h/2; i++) {
+ for(j=0; j<dst_w/4; j++) {
+ *dst_uv = (*src_uv&0xffff0000)|((*(src_uv+2)&0xffff0000)>>16);
+ *dst_uv = ((*dst_uv&0x00ff00ff)<<8)|((*dst_uv&0xff00ff00)>>8);
+ dst_uv++;
+ src_uv += 4;
+ }
+ //dst_uv = (int*)(srcbuf+y_size+src_w*(i+1));
+ src_uv += src_w*3/4;
+ }
+ }
+ ret = 0;
+ } else {
+ if (v4l2_fmt_dst) {
+ LOGE("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->'%c%c%c%c'@(0x%lx,0x%lx), %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (long)srcbuf, srcphy, (long)dstbuf,dstphy,src_w,src_h,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->%s@(0x%lx,0x%lx) %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (long)srcbuf, srcphy,android_fmt_dst, (long)dstbuf,dstphy,
+ src_w,src_h,dst_w,dst_h);
+ }
+ }
+ }
+ } else if (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_RGB565)==0)) {
+/*
+ if (srcphy && dstphy) {
+ #ifdef TARGET_RK29
+ YUV2RGBParams para;
+
+ memset(&para, 0x00, sizeof(YUV2RGBParams));
+ para.yuvAddr = srcphy;
+ para.outAddr = dstphy;
+ para.inwidth = (src_w + 15)&(~15);
+ para.inheight = (src_h + 15)&(~15);
+ para.outwidth = (dst_w + 15)&(~15);
+ para.outheight = (dst_h + 15)&(~15);
+ para.inColor = PP_IN_YUV420sp;
+ para.outColor = PP_OUT_RGB565;
+
+ ret = doYuvToRgb(&para);
+ #else
+ LOGE("%s(%d): Convert nv12 to rgb565 isn't support physical address in current paltform",__FUNCTION__,__LINE__);
+ #endif
+ } else if (srcbuf && dstbuf) {
+ if(mRGAFd > 0) {
+ ret = rga_nv12torgb565(mRGAFd,src_w,src_h,srcbuf, (short int*)dstbuf,dstbuf_w);
+ } else {
+ ret = arm_nv12torgb565(src_w,src_h,srcbuf, (short int*)dstbuf,dstbuf_w);
+ }
+ }
+*/ } else if (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420P)==0)) {
+ char *dst_u,*dst_v,*src_y,*dst_y,*srcuv;
+ int dstc_size,dsty_size, align_dstw,align_dsthalfw;
+
+ if ((src_w == dst_w) && (src_h == dst_h)) {
+ align_dstw = ((dst_w+15)&0xfffffff0);
+ align_dsthalfw = ((dst_w/2+15)&0xfffffff0);
+ dsty_size = align_dstw*dst_h;
+ dstc_size = align_dsthalfw*dst_h/2;
+ src_y = srcbuf;
+ dst_y = dstbuf;
+
+ if (mirror == false) {
+ for (j=0; j<src_h; j++) {
+ for (i=0; i<src_w; i++) {
+ *dst_y++ = *src_y++;
+ }
+ dst_y += align_dstw-src_w;
+ }
+
+ srcuv = (char*)(srcbuf + y_size);
+ dst_u = (char*)(dstbuf+dsty_size);
+ dst_v = dst_u + dstc_size;
+
+ for (j=0; j<src_h/2; j++) {
+ for (i=0; i<src_w/2; i++) {
+ *dst_v++ = *srcuv++;
+ *dst_u++ = *srcuv++;
+ }
+ dst_u += align_dsthalfw-src_w/2;
+ dst_v += align_dsthalfw-src_w/2;
+ }
+
+ } else {
+ char *psrc,*pdst;
+ psrc = srcbuf;
+ pdst = dstbuf + dst_w-1;
+ for (i=0; i<src_h; i++) {
+ for (j=0; j<src_w; j++) {
+ *pdst-- = *psrc++;
+ }
+ pdst += 2*align_dstw - (align_dstw - dst_w);
+ }
+
+ psrc = srcbuf + y_size;
+ dst_u = dstbuf + dsty_size + dst_w/2-1;
+ dst_v = dst_u + dstc_size;
+ for (i=0; i<src_h/2; i++) {
+ for (j=0; j<src_w/2; j++) {
+ *dst_v-- = *psrc++;
+ *dst_u-- = *psrc++;
+ }
+ dst_u += align_dsthalfw*2 - (align_dsthalfw - dst_w/2);
+ dst_v += align_dsthalfw*2 - (align_dsthalfw - dst_w/2);
+ }
+ }
+
+ ret = 0;
+ }
+ }
+ break;
+ }
+ case V4L2_PIX_FMT_YUV422P:
+ {
+// if (CAMERA_IS_UVC_CAMERA()
+// || (mCamDriverCapability.version != KERNEL_VERSION(0, 0, 1))) {
+// goto cameraFormatConvert_default;
+// }
+ }
+ case V4L2_PIX_FMT_NV16:
+ {
+ break;
+ }
+ case V4L2_PIX_FMT_YUYV:
+ {
+ char *srcbuf_begin;
+ int *dstint_y, *dstint_uv, *srcint;
+
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV12) ||
+ ((v4l2_fmt_dst == V4L2_PIX_FMT_YUV420)/* && CAMERA_IS_RKSOC_CAMERA()
+ && (mCamDriverCapability.version == KERNEL_VERSION(0, 0, 1))*/)) {
+ if ((src_w == dst_w) && (src_h == dst_h)) {
+ dstint_y = (int*)dstbuf;
+ srcint = (int*)srcbuf;
+ dstint_uv = (int*)(dstbuf + y_size);
+ /*
+ * author :zyh
+ * neon code for YUYV to YUV420
+ */
+#ifdef HAVE_ARM_NEON
+ for(i=0;i<src_h;i++) {
+ int n = src_w;
+ char tmp = i%2;//get uv only when in even row
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel swap \n\t"
+ " vld2.8 {q0,q1} , [%[src]]! @ q0 = y q1 = uv \n\t"
+ " vst1.16 {q0},[%[dst_y]]! @ now q0 -> dst \n\t"
+ " cmp %[tmp], #1 \n\t"
+ " bge 1f \n\t"
+ " vst1.16 {q1},[%[dst_uv]]! @ now q1 -> dst \n\t"
+ "1: @ don't need get uv in odd row \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+ : [dst_y] "+r" (dstint_y), [dst_uv] "+r" (dstint_uv),[src] "+r" (srcint), [n] "+r" (n),[tmp] "+r" (tmp)
+ : [src_stride] "r" (src_w)
+ : "cc", "memory", "q0", "q1", "q2"
+ );
+ }
+ //LOGE("---------------neon code YUY to YUV420-----------------------------");
+ /*
+ * C code YUYV to YUV420
+ */
+#else
+ for(i=0;i<src_h; i++) {
+ for (j=0; j<(src_w>>2); j++) {
+ if(i%2 == 0){
+ *dstint_uv++ = (*(srcint+1)&0xff000000)|((*(srcint+1)&0x0000ff00)<<8)
+ |((*srcint&0xff000000)>>16)|((*srcint&0x0000ff00)>>8);
+ }
+ *dstint_y++ = ((*(srcint+1)&0x00ff0000)<<8)|((*(srcint+1)&0x000000ff)<<16)
+ |((*srcint&0x00ff0000)>>8)|(*srcint&0x000000ff);
+ srcint += 2;
+ }
+ }
+ //LOGE("---------------c code YUY to YUV420-----------------------------");
+#endif
+ ret = 0;
+ } else {
+ if (v4l2_fmt_dst) {
+ LOGE("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->'%c%c%c%c'@(0x%x,0x%x), %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (long)srcbuf, srcphy, (long)dstbuf,dstphy,src_w,src_h,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->%s@(0x%x,0x%x) %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (long)srcbuf, srcphy,android_fmt_dst, (long)dstbuf,dstphy,
+ src_w,src_h,dst_w,dst_h);
+ }
+ }
+
+ } else if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21)||
+ (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420SP)==0))) {
+ if ((src_w==dst_w) && (src_h==dst_h)) {
+ dstint_y = (int*)dstbuf;
+ srcint = (int*)srcbuf;
+ for(i=0;i<(y_size>>2);i++) {
+ *dstint_y++ = ((*(srcint+1)&0x00ff0000)<<8)|((*(srcint+1)&0x000000ff)<<16)
+ |((*srcint&0x00ff0000)>>8)|(*srcint&0x000000ff);
+ srcint += 2;
+ }
+ dstint_uv = (int*)(dstbuf + y_size);
+ srcint = (int*)srcbuf;
+ for(i=0;i<src_h/2; i++) {
+ for (j=0; j<(src_w>>2); j++) {
+ *dstint_uv++ = ((*(srcint+1)&0xff000000)>>8)|((*(srcint+1)&0x0000ff00)<<16)
+ |((*srcint&0xff000000)>>24)|(*srcint&0x0000ff00);
+ srcint += 2;
+ }
+ srcint += (src_w>>1);
+ }
+ ret = 0;
+ } else {
+ if (v4l2_fmt_dst) {
+ LOGE("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->'%c%c%c%c'@(0x%x,0x%x), %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (long)srcbuf, srcphy, (long)dstbuf,dstphy,src_w,src_h,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->%s@(0x%x,0x%x) %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (long)srcbuf, srcphy,android_fmt_dst, (long)dstbuf,dstphy,
+ src_w,src_h,dst_w,dst_h);
+ }
+ }
+ }
+ break;
+ }
+ case V4L2_PIX_FMT_RGB565:
+ {
+ if (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_RGB565)==0)){
+ if (srcbuf && dstbuf && (srcbuf != dstbuf)){
+ /* if(mRGAFd > 0) {
+ ret = rga_rgb565_cp(mRGAFd,src_w,src_h,srcbuf, (short int*)dstbuf);
+ } else {
+ memcpy(dstbuf,srcbuf,src_w*src_h*2);
+ ret = 0;
+ }*/
+ }
+ }
+ break;
+ }
+
+ case V4L2_PIX_FMT_MJPEG:
+ {
+ #if 0
+ VPU_FRAME outbuf;
+ unsigned int output_len;
+ unsigned int input_len,i,j,w,h;
+ FILE *fp;
+ char filename[50];
+ unsigned int *psrc,*pdst;
+
+ output_len = 0;
+ input_len = src_size;
+ if (v4l2_fmt_dst == V4L2_PIX_FMT_NV12) {
+
+ ret = mMjpegDecoder.decode(mMjpegDecoder.decoder,(unsigned char*)&outbuf, &output_len, (unsigned char*)srcbuf, &input_len);
+ if ((ret >= 0) && (output_len == sizeof(VPU_FRAME))) {
+ VPUMemLink(&outbuf.vpumem);
+ /* following codes are used to get yuv data after decoder */
+ VPUMemInvalidate(&outbuf.vpumem);
+ w = ((outbuf.DisplayWidth+15)&(~15));
+ h = ((outbuf.DisplayHeight+15)&(~15));
+ if (mirror == false) {
+ memcpy(dstbuf, outbuf.vpumem.vir_addr,w*h*3/2);
+ } else {
+ pdst = (unsigned int*)(dstbuf);
+ psrc = (unsigned int*)outbuf.vpumem.vir_addr;
+ pdst += ((w>>2)-1);
+ for (j=0; j<h; j++) {
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>24)&0x000000ff) | ((*psrc>>8)&0x0000ff00)
+ | ((*psrc<<8)&0x00ff0000) | ((*psrc<<24)&0xff000000);
+ psrc++;
+ pdst--;
+ }
+ pdst += (w>>1);
+ }
+
+ pdst = (unsigned int*)dstbuf;
+ psrc = (unsigned int*)outbuf.vpumem.vir_addr;
+
+ pdst += (w*h/4);
+ psrc += (w*h/4);
+ pdst += ((w>>2)-1);
+ for (j=0; j<(h/2); j++) {
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>16)&0x0000ffff) | ((*psrc<<16)&0xffff0000);
+ psrc++;
+ pdst--;
+ }
+ pdst += (w>>1);
+ }
+ }
+ VPUFreeLinear(&outbuf.vpumem);
+ } else {
+ LOGE("%s(%d): mjpeg decode failed! ret:%d output_len: 0x%x, src_buf: %p, input_len: %d",__FUNCTION__,__LINE__,
+ ret,output_len,srcbuf, input_len);
+ }
+ }
+ #endif
+ break;
+ }
+
+ cameraFormatConvert_default:
+ default:
+ if (android_fmt_dst) {
+ LOGE("%s(%d): CameraHal is not support (%c%c%c%c -> %s)",__FUNCTION__,__LINE__,
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF, android_fmt_dst);
+ } else if (v4l2_fmt_dst) {
+ LOGE("%s(%d): CameraHal is not support (%c%c%c%c -> %c%c%c%c)",__FUNCTION__,__LINE__,
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF);
+ }
+ break;
+ }
+ return ret;
+
+ }
+
+
diff --git a/CameraHal/CameraHalUtil.cpp~ b/CameraHal/CameraHalUtil.cpp~
new file mode 100755
index 0000000..0ce5f1b
--- /dev/null
+++ b/CameraHal/CameraHalUtil.cpp~
@@ -0,0 +1,1697 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <sys/ioctl.h>
+#include <utils/Log.h>
+#include <linux/videodev2.h>
+#include <binder/IPCThreadState.h>
+#include "CameraHal.h"
+
+
+#include <camera/CameraParameters.h>
+
+#if defined(TARGET_RK30) && (defined(TARGET_BOARD_PLATFORM_RK30XX) || (defined(TARGET_BOARD_PLATFORM_RK2928)))
+#include "../libgralloc/gralloc_priv.h"
+#if (CONFIG_CAMERA_INVALIDATE_RGA==0)
+#include <hardware/rga.h>
+#endif
+#elif defined(TARGET_RK30) && defined(TARGET_BOARD_PLATFORM_RK30XXB)
+#include <hardware/hal_public.h>
+#include <hardware/rga.h>
+#elif defined(TARGET_RK3368) || defined(TARGET_RK3288)
+#include <hardware/img_gralloc_public.h>
+#include <hardware/rga.h>
+#elif defined(TARGET_RK29)
+#include "../libgralloc/gralloc_priv.h"
+#endif
+
+#include "../jpeghw/release/encode_release/rk29-ipp.h"
+#include <utils/CallStack.h>
+#if defined(RK_DRM_GRALLOC)
+#if defined(TARGET_RK3368)
+#include <hardware/gralloc.h>
+#else
+#include <gralloc_drm.h>
+#endif
+#endif
+
+#define MIN(x,y) ((x<y) ? x: y)
+#define MAX(x,y) ((x>y) ? x: y)
+
+#ifdef ALOGD
+#define LOGD ALOGD
+#endif
+#ifdef ALOGV
+#define LOGV ALOGV
+#endif
+#ifdef ALOGE
+#define LOGE ALOGE
+#endif
+#ifdef ALOGI
+#define LOGI ALOGI
+#endif
+
+#define CAMERA_DISPLAY_FORMAT_NV12 "nv12"
+
+
+extern "C" int cameraFormatConvert(int v4l2_fmt_src, int v4l2_fmt_dst, const char *android_fmt_dst,
+ char *srcbuf, char *dstbuf,long srcphy,long dstphy,int src_size,
+ int src_w, int src_h, int srcbuf_w,
+ int dst_w, int dst_h, int dstbuf_w,
+ bool mirror);
+
+
+extern "C" int getCallingPid() {
+ return android::IPCThreadState::self()->getCallingPid();
+}
+
+
+extern "C" void callStack(){
+ android::CallStack stack;
+ stack.update();
+ stack.log("CameraHal");
+
+}
+extern "C" char* getCallingProcess()
+{
+ int fp = -1;
+ char cameraCallProcess[100];
+ memset(cameraCallProcess,0x00,sizeof(cameraCallProcess));
+ sprintf(cameraCallProcess,"/proc/%d/cmdline",getCallingPid());
+
+ fp = open(cameraCallProcess, O_RDONLY);
+
+ if (fp < 0) {
+ char value[PROPERTY_VALUE_MAX];
+ property_get("sys.camera.callprocess", value, "none");
+ memset(cameraCallProcess,0x00,sizeof(cameraCallProcess));
+ strcpy(cameraCallProcess, value);
+ if (!strcmp("none", value))
+ LOGE("Obtain calling process info failed");
+ else
+ LOGD("Calling process from sys.camera.callprocess is: %s", cameraCallProcess);
+ }
+ else {
+ memset(cameraCallProcess,0x00,sizeof(cameraCallProcess));
+ read(fp, cameraCallProcess, 99);
+ close(fp);
+ fp = -1;
+ LOGD("Calling process is: %s",cameraCallProcess);
+ }
+ return cameraCallProcess;
+
+}
+
+static float hwcGetBytesPerPixelForRga(int fmt)
+{
+ float bpp = 4;
+ switch(fmt){
+ case RK_FORMAT_RGB_565:
+ bpp = 2;
+ break;
+ case RK_FORMAT_RGB_888:
+ bpp = 3;
+ break;
+ case RK_FORMAT_RGBA_8888:
+ case RK_FORMAT_RGBX_8888:
+ case RK_FORMAT_BGRA_8888:
+ bpp = 4;
+ break;
+ case RK_FORMAT_YCbCr_420_SP:
+ case RK_FORMAT_YCrCb_420_SP:
+ bpp = 1.5;
+ break;
+ default:
+ break;
+ }
+ return bpp;
+}
+
+extern "C" int cameraPixFmt2HalPixFmt(const char *fmt)
+{
+ int hal_pixel_format=HAL_PIXEL_FORMAT_YCrCb_NV12;
+
+ if (strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_RGB_565;
+ } else if (strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_NV12;
+ }else if(strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0){
+ hal_pixel_format = HAL_PIXEL_FORMAT_YV12;
+ }else if (strcmp(fmt,CAMERA_DISPLAY_FORMAT_NV12) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_NV12;
+ } else if (strcmp(fmt,android::CameraParameters::PIXEL_FORMAT_YUV422SP) == 0) {
+ hal_pixel_format = HAL_PIXEL_FORMAT_YCbCr_422_SP;
+ } else {
+ hal_pixel_format = -EINVAL;
+ LOGE("%s(%d): pixel format %s is unknow!",__FUNCTION__,__LINE__,fmt);
+ }
+
+ return hal_pixel_format;
+}
+extern "C" void arm__scale_crop_nv12torgb565(int srcW, int srcH,int dstW,int dstH, char *srcbuf, short int *dstbuf)
+{
+#if 0
+ int line, col;
+ int uv_src_w = srcW/2;
+ int uv_src_h = srcH/2;
+ int src_w = srcW,src_h = srcH;
+ int dst_w = dstW,dst_h = dstH;
+ int ratio,cropW,cropH,left_offset,top_offset;
+ char* src,*psY,*psUV,*dst,*pdUV,*pdY;
+ long zoomindstxIntInv,zoomindstyIntInv;
+ long sX,sY,sY_UV;
+
+ long yCoeff00_y,yCoeff01_y,xCoeff00_y,xCoeff01_y;
+ long yCoeff00_uv,yCoeff01_uv,xCoeff00_uv,xCoeff01_uv;
+ long r0,r1,a,b,c,d,ry,ru,rv;
+
+ int y, u, v, yy, vr, ug, vg, ub,r,g,b1;
+
+
+
+ src = psY = (unsigned char*)(srcbuf)+top_offset*src_w+left_offset;
+ //psUV = psY +src_w*src_h+top_offset*src_w/2+left_offset;
+ psUV = (unsigned char*)(srcbuf) +src_w*src_h+top_offset*src_w/2+left_offset;
+
+
+
+
+ dst = pdY = (unsigned char*)dstbuf;
+ pdUV = pdY + dst_w*dst_h;
+
+ //need crop ?
+ if((src_w*100/src_h) != (dst_w*100/dst_h)){
+ ratio = ((src_w*100/dst_w) >= (src_h*100/dst_h))?(src_h*100/dst_h):(src_w*100/dst_w);
+ cropW = ratio*dst_w/100;
+ cropH = ratio*dst_h/100;
+
+ left_offset=((src_w-cropW)>>1) & (~0x01);
+ top_offset=((src_h-cropH)>>1) & (~0x01);
+ }else{
+ cropW = src_w;
+ cropH = src_h;
+ top_offset=0;
+ left_offset=0;
+ }
+
+ zoomindstxIntInv = ((unsigned long)(cropW)<<16)/dstW + 1;
+ zoomindstyIntInv = ((unsigned long)(cropH)<<16)/dstH + 1;
+
+ for (line = 0; line < dstH; line++) {
+ yCoeff00_y = (line*zoomindstyIntInv)&0xffff;
+ yCoeff01_y = 0xffff - yCoeff00_y;
+ sY = (line*zoomindstyIntInv >> 16);
+ sY = (sY >= srcH - 1)? (srcH - 2) : sY;
+
+ sY_UV = = ((line/2)*zoomindstyIntInv >> 16);
+ sY_UV = (sY_UV >= uv_src_h - 1)? (uv_src_h - 2) : sY_UV;
+ for (col = 0; col < dstW; col++) {
+
+
+ //get y
+ xCoeff00_y = (col*zoomindstxIntInv)&0xffff;
+ xCoeff01_y = 0xffff - xCoeff00_y;
+ sX = (col*zoomindstxIntInv >> 16);
+ sX = (sX >= srcW -1)?(srcW- 2) : sX;
+ a = psY[sY*srcW + sX];
+ b = psY[sY*srcW + sX + 1];
+ c = psY[(sY+1)*srcW + sX];
+ d = psY[(sY+1)*srcW + sX + 1];
+
+ r0 = (a * xCoeff01_y + b * xCoeff00_y)>>16 ;
+ r1 = (c * xCoeff01_y + d * xCoeff00_y)>>16 ;
+ ry = (r0 * yCoeff01_y + r1 * yCoeff00_y)>>16;
+
+ //get u & v
+ xCoeff00_uv = ((col/2)*zoomindstxIntInv)&0xffff;
+ xCoeff01_uv = 0xffff - xCoeff00_uv;
+ sX = ((col/2)*zoomindstxIntInv >> 16);
+ sX = (sX >= uv_src_w -1)?(uv_src_w- 2) : sX;
+ //U
+ a = psUV[(sY*uv_src_w + sX)*2];
+ b = psUV[(sY*uv_src_w + sX + 1)*2];
+ c = psUV[((sY+1)*uv_src_w + sX)*2];
+ d = psUV[((sY+1)*uv_src_w + sX + 1)*2];
+
+ r0 = (a * xCoeff01_uv + b * xCoeff00_uv)>>16 ;
+ r1 = (c * xCoeff01_uv + d * xCoeff00_uv)>>16 ;
+ ru = (r0 * yCoeff01_uv + r1 * yCoeff00_uv)>>16;
+
+ //v
+
+ a = psUV[(sY*uv_src_w + sX)*2 + 1];
+ b = psUV[(sY*uv_src_w + sX + 1)*2 + 1];
+ c = psUV[((sY+1)*uv_src_w + sX)*2 + 1];
+ d = psUV[((sY+1)*uv_src_w + sX + 1)*2 + 1];
+
+ r0 = (a * xCoeff01_uv + b * xCoeff00_uv)>>16 ;
+ r1 = (c * xCoeff01_uv + d * xCoeff00_uv)>>16 ;
+ rv = (r0 * yCoeff01_uv + r1 * yCoeff00_uv)>>16;
+
+
+
+ yy = ry << 8;
+ u = ru - 128;
+ ug = 88 * u;
+ ub = 454 * u;
+ v = rv - 128;
+ vg = 183 * v;
+ vr = 359 * v;
+
+ r = (yy + vr) >> 8;
+ g = (yy - ug - vg) >> 8;
+ b = (yy + ub ) >> 8;
+ if (r < 0) r = 0;
+ if (r > 255) r = 255;
+ if (g < 0) g = 0;
+ if (g > 255) g = 255;
+ if (b < 0) b = 0;
+ if (b > 255) b = 255;
+
+
+ }
+ }
+#endif
+}
+
+
+extern "C" void arm_nv12torgb565(int width, int height, char *src, short int *dst,int dstbuf_w)
+{
+ int line, col;
+ int y, u, v, yy, vr, ug, vg, ub;
+ int r, g, b;
+ char *py, *pu, *pv;
+
+ py = src;
+ pu = py + (width * height);
+ pv = pu + 1;
+ y = *py++;
+ yy = y << 8;
+ u = *pu - 128;
+ ug = 88 * u;
+ ub = 454 * u;
+ v = *pv - 128;
+ vg = 183 * v;
+ vr = 359 * v;
+
+ for (line = 0; line < height; line++) {
+ for (col = 0; col < width; col++) {
+ r = (yy + vr) >> 8;
+ g = (yy - ug - vg) >> 8;
+ b = (yy + ub ) >> 8;
+ if (r < 0) r = 0;
+ if (r > 255) r = 255;
+ if (g < 0) g = 0;
+ if (g > 255) g = 255;
+ if (b < 0) b = 0;
+ if (b > 255) b = 255;
+
+ *dst++ = (((__u16)r>>3)<<11) | (((__u16)g>>2)<<5) | (((__u16)b>>3)<<0);
+
+ y = *py++;
+ yy = y << 8;
+ if (col & 1) {
+ pu += 2;
+ pv = pu+1;
+ u = *pu - 128;
+ ug = 88 * u;
+ ub = 454 * u;
+ v = *pv - 128;
+ vg = 183 * v;
+ vr = 359 * v;
+ }
+ }
+ dst += dstbuf_w - width;
+ if ((line & 1) == 0) {
+ //even line: rewind
+ pu -= width;
+ pv = pu+1;
+ }
+ }
+}
+
+
+
+
+extern "C" int rga_nv12torgb565(int src_width, int src_height, char *src, short int *dst, int dstbuf_width,int dst_width,int dst_height)
+{
+ int rgafd = -1,ret = -1;
+
+ //LOGD("rga_nv12torgb565: (%dx%d)->(%dx%d),src_buf = 0x%x,dst_buf = 0x%x",src_width,src_height,dst_width,dst_height,src,dst);
+
+#ifdef TARGET_RK30
+
+ if((rgafd = open("/dev/rga",O_RDWR)) < 0) {
+ LOGE("%s(%d):open rga device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ return ret;
+ }
+
+#if (CONFIG_CAMERA_INVALIDATE_RGA==0)
+ struct rga_req Rga_Request;
+ int err = 0;
+
+ memset(&Rga_Request,0x0,sizeof(Rga_Request));
+
+ unsigned char *psY, *psUV;
+ int srcW,srcH,cropW,cropH;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+//need crop ?
+ if((src_width*100/src_height) != (dst_width*100/dst_height)){
+ ratio = ((src_width*100/dst_width) >= (src_height*100/dst_height))?(src_height*100/dst_height):(src_width*100/dst_width);
+ cropW = ratio*dst_width/100;
+ cropH = ratio*dst_height/100;
+
+ left_offset=((src_width-cropW)>>1) & (~0x01);
+ top_offset=((src_height-cropH)>>1) & (~0x01);
+ }else{
+ cropW = src_width;
+ cropH = src_height;
+ top_offset=0;
+ left_offset=0;
+ }
+
+ psY = (unsigned char*)(src)+top_offset*src_width+left_offset;
+ psUV = (unsigned char*)(src) +src_width*src_height+top_offset*src_width/2+left_offset;
+
+ Rga_Request.src.yrgb_addr = (long)psY;
+ Rga_Request.src.uv_addr = (long)psUV;
+ Rga_Request.src.v_addr = Rga_Request.src.uv_addr;
+ Rga_Request.src.vir_w = src_width;
+ Rga_Request.src.vir_h = src_height;
+ Rga_Request.src.format = RK_FORMAT_YCbCr_420_SP;
+ Rga_Request.src.act_w = cropW;
+ Rga_Request.src.act_h = cropH;
+ Rga_Request.src.x_offset = 0;
+ Rga_Request.src.y_offset = 0;
+
+ Rga_Request.dst.yrgb_addr = (long)dst;
+ Rga_Request.dst.uv_addr = 0;
+ Rga_Request.dst.v_addr = 0;
+ Rga_Request.dst.vir_w = dstbuf_width;
+ Rga_Request.dst.vir_h = dst_height;
+ Rga_Request.dst.format = RK_FORMAT_RGB_565;
+ Rga_Request.clip.xmin = 0;
+ Rga_Request.clip.xmax = dst_width - 1;
+ Rga_Request.clip.ymin = 0;
+ Rga_Request.clip.ymax = dst_height - 1;
+ Rga_Request.dst.act_w = dst_width;
+ Rga_Request.dst.act_h = dst_height;
+ Rga_Request.dst.x_offset = 0;
+ Rga_Request.dst.y_offset = 0;
+ Rga_Request.mmu_info.mmu_en = 1;
+ Rga_Request.mmu_info.mmu_flag = ((2 & 0x3) << 4) | 1;
+ if (hwcGetBytesPerPixelForRga(Rga_Request.src.format) !=
+ hwcGetBytesPerPixelForRga(Rga_Request.dst.format)){/*avoid dither: dalon.zhang@rock-chips.com*/
+ Rga_Request.alpha_rop_flag |= (1 << 5); /* ddl@rock-chips.com: v0.4.3 */
+ }
+
+ if((src_width!=dst_width) || ( src_height!=dst_height)){
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0x10000;
+ Rga_Request.rotate_mode = 1;
+ Rga_Request.scale_mode = 1;
+ }else{
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0;
+ Rga_Request.rotate_mode = 0;
+ Rga_Request.scale_mode = 0;
+ }
+
+ if(ioctl(rgafd, RGA_BLIT_SYNC, &Rga_Request) != 0) {
+ LOGE("%s(%d): RGA_BLIT_ASYNC Failed ,err: %s", __FUNCTION__, __LINE__,strerror(errno));
+ err = -1;
+ }
+ return err;
+#else
+ LOGE("%s(%d): RGA is invalidate!",__FUNCTION__, __LINE__);
+ return 0;
+#endif
+#else
+ LOGE("%s(%d): rk29 havn't RGA device in chips!!",__FUNCTION__, __LINE__);
+ return -1;
+#endif
+}
+
+extern "C" int arm_camera_yuv420_scale_arm(int v4l2_fmt_src, int v4l2_fmt_dst,
+ char *srcbuf, char *dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool mirror,int zoom_val);
+
+extern "C" int util_get_gralloc_buf_fd(buffer_handle_t handle,int* fd){
+ int err = 0;
+#if defined(RK_DRM_GRALLOC)
+ gralloc_module_t *gralloc_module = NULL;
+ hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&gralloc_module);
+ if (!gralloc_module)
+ ALOGE("error gralloc_module !!!!!");
+ int mem_fd = -1;
+ err = gralloc_module->perform(
+ gralloc_module,
+ GRALLOC_MODULE_PERFORM_GET_HADNLE_PRIME_FD,
+ handle, fd);
+ if (err)
+ LOGE("%s:get buffer fd error %d!",__func__,err);
+#else
+ err = -1;
+ LOGE("%s:not support !",__func__);
+#endif
+ return err;
+}
+
+
+#if defined(RK_DRM_GRALLOC)
+#include <RockchipRga.h>
+
+extern "C" int rga_nv12_scale_crop(
+ int src_width, int src_height, char *src_fd, short int *dst_fd,
+ int dst_width,int dst_height,int zoom_val,bool mirror,
+ bool isNeedCrop,bool isDstNV21,int dst_stride,bool vir_addr)
+{
+ int ret = 0;
+ rga_info_t src,dst;
+ int src_cropW,src_cropH,dst_cropW,dst_cropH,zoom_cropW,zoom_cropH;
+ int ratio = 0;
+ int src_top_offset=0,src_left_offset=0,dst_top_offset=0,dst_left_offset=0,zoom_top_offset=0,zoom_left_offset=0;
+
+ RockchipRga& rkRga(RockchipRga::get());
+
+ memset(&src, 0, sizeof(rga_info_t));
+ if (vir_addr) {
+ src.fd = -1;
+ src.virAddr = (void*)src_fd;
+ } else
+ src.fd = (unsigned long)src_fd;
+ src.mmuFlag = ((2 & 0x3) << 4) | 1 | (1 << 8) | (1 << 10);
+ memset(&dst, 0, sizeof(rga_info_t));
+ if (vir_addr) {
+ dst.fd = -1;
+ dst.virAddr = (void*)dst_fd;
+ } else
+ dst.fd = (unsigned long)dst_fd;
+ dst.mmuFlag = ((2 & 0x3) << 4) | 1 | (1 << 8) | (1 << 10);
+
+ //src.hnd = NULL;
+ //dst.hnd = NULL;
+
+ /*has something wrong with rga of rk312x mirror operation*/
+#if defined(TARGET_RK312x)
+ if(mirror){
+ LOGE("%s:rk312x rga not support mirror",__func__);
+ ret = -1;
+ goto failed;
+ }
+#endif
+ /*rk3188 do not support yuv to yuv scale by rga*/
+#if defined(TARGET_RK3188)
+ {
+ LOGE("%s:rk3188 rga not yuv scale",__func__);
+ ret = -1;
+ goto failed;
+ }
+#endif
+
+ if((dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H)){
+ LOGE("%s(%d):(dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H), switch to arm ",__FUNCTION__,__LINE__);
+ ret = -1;
+ goto failed;
+ }
+
+ //2) use rga
+
+ //need crop ? when cts FOV,don't crop
+ if(isNeedCrop && (src_width*100/src_height) != (dst_width*100/dst_height)){
+ ratio = ((src_width*100/dst_width) >= (src_height*100/dst_height))?(src_height*100/dst_height):(src_width*100/dst_width);
+ zoom_cropW = ratio*dst_width/100;
+ zoom_cropH = ratio*dst_height/100;
+
+ zoom_left_offset=((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset=((src_height-zoom_cropH)>>1) & (~0x01);
+ }else{
+ zoom_cropW = src_width;
+ zoom_cropH = src_height;
+ zoom_left_offset=0;
+ zoom_top_offset=0;
+ }
+
+ if(zoom_val > 100){
+ zoom_cropW = zoom_cropW*100/zoom_val;
+ zoom_cropH = zoom_cropH*100/zoom_val;
+ zoom_left_offset = ((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset= ((src_height-zoom_cropH)>>1) & (~0x01);
+ }
+
+ rga_set_rect(&src.rect, zoom_left_offset,zoom_top_offset,
+ zoom_cropW,zoom_cropH,src_width,src_height,HAL_PIXEL_FORMAT_YCrCb_NV12);
+ if (isDstNV21)
+ rga_set_rect(&dst.rect, 0,0,dst_width,dst_height,
+ dst_stride ? dst_stride : dst_width,
+ dst_height,HAL_PIXEL_FORMAT_YCrCb_420_SP);
+ else
+ rga_set_rect(&dst.rect, 0,0,dst_width,dst_height,
+ dst_stride ? dst_stride : dst_width,
+ dst_height,HAL_PIXEL_FORMAT_YCrCb_NV12);
+ if (mirror)
+ src.rotation = DRM_RGA_TRANSFORM_FLIP_H;
+ //TODO:sina,cosa,scale_mode,render_mode
+ ret = rkRga.RkRgaBlit(&src, &dst, NULL);
+ if (ret) {
+ LOGE("%s:rga blit failed",__func__);
+ goto failed;
+ }
+
+ return ret;
+failed:
+ return ret;
+}
+#else
+extern "C" int rga_nv12_scale_crop(int src_width, int src_height, char *src, short int *dst,
+ int dst_width,int dst_height,int zoom_val,bool mirror,bool isNeedCrop,bool isDstNV21)
+{
+ int rgafd = -1,ret = -1;
+ int scale_times_w = 0,scale_times_h = 0,h = 0,w = 0;
+ struct rga_req Rga_Request;
+ int err = 0;
+ unsigned char *psY, *psUV;
+ int src_cropW,src_cropH,dst_cropW,dst_cropH,zoom_cropW,zoom_cropH;
+ int ratio = 0;
+ int src_top_offset=0,src_left_offset=0,dst_top_offset=0,dst_left_offset=0,zoom_top_offset=0,zoom_left_offset=0;
+
+ /*has something wrong with rga of rk312x mirror operation*/
+ #if defined(TARGET_RK312x)
+ if(mirror){
+ return arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, (isDstNV21 ? V4L2_PIX_FMT_NV21:V4L2_PIX_FMT_NV12),
+ src, (char *)dst,src_width, src_height,dst_width, dst_height,mirror,zoom_val);
+ }
+ #endif
+ /*rk3188 do not support yuv to yuv scale by rga*/
+ #if defined(TARGET_RK3188)
+ return arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, (isDstNV21 ? V4L2_PIX_FMT_NV21:V4L2_PIX_FMT_NV12),
+ src, (char *)dst,src_width, src_height,dst_width, dst_height,mirror,zoom_val);
+ #endif
+
+ if((dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H)){
+ LOGE("%s(%d):(dst_width > RGA_VIRTUAL_W) || (dst_height > RGA_VIRTUAL_H), switch to arm ",__FUNCTION__,__LINE__);
+
+ return arm_camera_yuv420_scale_arm(V4L2_PIX_FMT_NV12, (isDstNV21 ? V4L2_PIX_FMT_NV21:V4L2_PIX_FMT_NV12),
+ src, (char *)dst,src_width, src_height,dst_width, dst_height,mirror,zoom_val);
+ }
+
+ //need crop ? when cts FOV,don't crop
+ if(isNeedCrop && (src_width*100/src_height) != (dst_width*100/dst_height)){
+ ratio = ((src_width*100/dst_width) >= (src_height*100/dst_height))?(src_height*100/dst_height):(src_width*100/dst_width);
+ zoom_cropW = ratio*dst_width/100;
+ zoom_cropH = ratio*dst_height/100;
+
+ zoom_left_offset=((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset=((src_height-zoom_cropH)>>1) & (~0x01);
+ }else{
+ zoom_cropW = src_width;
+ zoom_cropH = src_height;
+ zoom_left_offset=0;
+ zoom_top_offset=0;
+ }
+
+ if(zoom_val > 100){
+ zoom_cropW = zoom_cropW*100/zoom_val;
+ zoom_cropH = zoom_cropH*100/zoom_val;
+ zoom_left_offset = ((src_width-zoom_cropW)>>1) & (~0x01);
+ zoom_top_offset= ((src_height-zoom_cropH)>>1) & (~0x01);
+ }
+
+
+ if(dst_width > RGA_ACTIVE_W){
+ scale_times_w = (dst_width/RGA_ACTIVE_W);
+ scale_times_w++;
+ }else{
+ scale_times_w = 1;
+ }
+ if(dst_height > RGA_ACTIVE_H){
+ scale_times_h = (dst_height/RGA_ACTIVE_H);
+ scale_times_h++;
+ } else {
+ scale_times_h = 1;
+ }
+ if((rgafd = open("/dev/rga",O_RDWR)) < 0) {
+ LOGE("%s(%d):open rga device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ return ret;
+ }
+
+ src_cropW = zoom_cropW/scale_times_w;
+ src_cropH = zoom_cropH/scale_times_h;
+
+ dst_cropW = dst_width/scale_times_w;
+ dst_cropH = dst_height/scale_times_h;
+
+ for(h = 0; h< scale_times_h; h++){
+ for(w = 0; w< scale_times_w; w++){
+ memset(&Rga_Request,0x0,sizeof(Rga_Request));
+
+ src_left_offset = zoom_left_offset + w*src_cropW;
+ src_top_offset = zoom_top_offset + h*src_cropH;
+
+ dst_left_offset = w*dst_cropW;
+ dst_top_offset = h*dst_cropH;
+
+ psY = (unsigned char*)(src);
+
+ #if defined(TARGET_RK3188)
+ Rga_Request.src.yrgb_addr = (long)psY;
+ Rga_Request.src.uv_addr = (long)psY + src_width * src_height;
+ #else
+ Rga_Request.src.yrgb_addr = 0;
+ Rga_Request.src.uv_addr = (long)psY;
+ #endif
+ Rga_Request.src.v_addr = 0;
+ Rga_Request.src.vir_w = src_width;
+ Rga_Request.src.vir_h = src_height;
+ Rga_Request.src.format = RK_FORMAT_YCbCr_420_SP;
+ Rga_Request.src.act_w = src_cropW & (~0x01);
+ Rga_Request.src.act_h = src_cropH & (~0x01);
+#if defined(TARGET_RK3368)
+ Rga_Request.src.x_offset = src_left_offset & (~0x1f);//32 alignment,rga's bug
+ Rga_Request.src.y_offset = src_top_offset & (~0xf);
+#else
+ Rga_Request.src.x_offset = src_left_offset & (~0x01);
+ Rga_Request.src.y_offset = src_top_offset & (~0x01);
+#endif
+ #if defined(TARGET_RK3188)
+ Rga_Request.dst.yrgb_addr = (long)dst;
+ Rga_Request.dst.uv_addr = (long)dst + dst_width*dst_height;
+ #else
+ Rga_Request.dst.yrgb_addr = 0;
+ Rga_Request.dst.uv_addr = (long)dst;
+ #endif
+ Rga_Request.dst.v_addr = 0;
+ Rga_Request.dst.vir_w = dst_width;
+ Rga_Request.dst.vir_h = dst_height;
+ if(isDstNV21)
+ Rga_Request.dst.format = RK_FORMAT_YCrCb_420_SP;
+ else
+ Rga_Request.dst.format = RK_FORMAT_YCbCr_420_SP;
+ Rga_Request.clip.xmin = 0;
+ Rga_Request.clip.xmax = dst_width - 1;
+ Rga_Request.clip.ymin = 0;
+ Rga_Request.clip.ymax = dst_height - 1;
+ Rga_Request.dst.act_w = dst_cropW;
+ Rga_Request.dst.act_h = dst_cropH;
+ Rga_Request.dst.x_offset = dst_left_offset;
+ Rga_Request.dst.y_offset = dst_top_offset;
+
+ Rga_Request.mmu_info.mmu_en = 1;
+ Rga_Request.mmu_info.mmu_flag = ((2 & 0x3) << 4) | 1 | (1 << 8) | (1 << 10);
+ if (hwcGetBytesPerPixelForRga(Rga_Request.src.format) !=
+ hwcGetBytesPerPixelForRga(Rga_Request.dst.format)){/*avoid dither: dalon.zhang@rock-chips.com*/
+ Rga_Request.alpha_rop_flag |= (1 << 5); /* ddl@rock-chips.com: v0.4.3 */
+ }
+
+ #if defined(TARGET_RK312x)
+ /* wrong operation of nv12 to nv21 ,not scale */
+ if(1/*(cropW != dst_width) || ( cropH != dst_height)*/){
+ #else
+ if((src_cropW != dst_width) || ( src_cropH != dst_height)){
+ #endif
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0x10000;
+ Rga_Request.scale_mode = 1;
+ Rga_Request.rotate_mode = mirror ? 2:1;
+ }else{
+ Rga_Request.sina = 0;
+ Rga_Request.cosa = 0;
+ Rga_Request.scale_mode = 0;
+ Rga_Request.rotate_mode = mirror ? 2:0;
+ Rga_Request.render_mode = pre_scaling_mode;
+ }
+
+
+ if(ioctl(rgafd, RGA_BLIT_SYNC, &Rga_Request) != 0) {
+ LOGE("%s(%d): RGA_BLIT_ASYNC Failed ,err: %s", __FUNCTION__, __LINE__,strerror(errno));
+ err = -1;
+ }
+ }
+ }
+ close(rgafd);
+
+ return err;
+}
+#endif
+
+extern "C" int arm_camera_yuv420_scale_arm(int v4l2_fmt_src, int v4l2_fmt_dst,
+ char *srcbuf, char *dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool mirror,int zoom_val)
+{
+ unsigned char *psY,*pdY,*psUV,*pdUV;
+ unsigned char *src,*dst;
+ int srcW,srcH,cropW,cropH,dstW,dstH;
+ long zoomindstxIntInv,zoomindstyIntInv;
+ long x,y;
+ long yCoeff00,yCoeff01,xCoeff00,xCoeff01;
+ long sX,sY;
+ long r0,r1,a,b,c,d;
+ int ret = 0;
+ bool nv21DstFmt = false;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+ if((v4l2_fmt_src != V4L2_PIX_FMT_NV12) ||
+ ((v4l2_fmt_dst != V4L2_PIX_FMT_NV12) && (v4l2_fmt_dst != V4L2_PIX_FMT_NV21) )){
+ LOGE("%s:%d,not suppport this format ",__FUNCTION__,__LINE__);
+ return -1;
+ }
+
+ //just copy ?
+ if((v4l2_fmt_src == v4l2_fmt_dst) && (mirror == false)
+ &&(src_w == dst_w) && (src_h == dst_h) && (zoom_val == 100)){
+ memcpy(dstbuf,srcbuf,src_w*src_h*3/2);
+ return 0;
+ }else if((v4l2_fmt_dst == V4L2_PIX_FMT_NV21)
+ && (src_w == dst_w) && (src_h == dst_h)
+ && (mirror == false) && (zoom_val == 100)){
+ //just convert fmt
+
+ cameraFormatConvert(V4L2_PIX_FMT_NV12, V4L2_PIX_FMT_NV21, NULL,
+ srcbuf, dstbuf,0,0,src_w*src_h*3/2,
+ src_w, src_h,src_w,
+ dst_w, dst_h,dst_w,
+ mirror);
+ return 0;
+
+ }
+
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21)){
+ nv21DstFmt = true;
+
+ }
+
+ //need crop ?
+ if((src_w*100/src_h) != (dst_w*100/dst_h)){
+ ratio = ((src_w*100/dst_w) >= (src_h*100/dst_h))?(src_h*100/dst_h):(src_w*100/dst_w);
+ cropW = ratio*dst_w/100;
+ cropH = ratio*dst_h/100;
+
+ left_offset=((src_w-cropW)>>1) & (~0x01);
+ top_offset=((src_h-cropH)>>1) & (~0x01);
+ }else{
+ cropW = src_w;
+ cropH = src_h;
+ top_offset=0;
+ left_offset=0;
+ }
+
+ //zoom ?
+ if(zoom_val > 100){
+ cropW = cropW*100/zoom_val;
+ cropH = cropH*100/zoom_val;
+ left_offset=((src_w-cropW)>>1) & (~0x01);
+ top_offset=((src_h-cropH)>>1) & (~0x01);
+ }
+
+ src = psY = (unsigned char*)(srcbuf)+top_offset*src_w+left_offset;
+ //psUV = psY +src_w*src_h+top_offset*src_w/2+left_offset;
+ psUV = (unsigned char*)(srcbuf) +src_w*src_h+top_offset*src_w/2+left_offset;
+
+
+ srcW =src_w;
+ srcH = src_h;
+// cropW = src_w;
+// cropH = src_h;
+
+
+ dst = pdY = (unsigned char*)dstbuf;
+ pdUV = pdY + dst_w*dst_h;
+ dstW = dst_w;
+ dstH = dst_h;
+
+ zoomindstxIntInv = ((unsigned long)(cropW)<<16)/dstW + 1;
+ zoomindstyIntInv = ((unsigned long)(cropH)<<16)/dstH + 1;
+ //y
+ //for(y = 0; y<dstH - 1 ; y++ ) {
+ for(y = 0; y<dstH; y++ ) {
+ yCoeff00 = (y*zoomindstyIntInv)&0xffff;
+ yCoeff01 = 0xffff - yCoeff00;
+ sY = (y*zoomindstyIntInv >> 16);
+ sY = (sY >= srcH - 1)? (srcH - 2) : sY;
+ for(x = 0; x<dstW; x++ ) {
+ xCoeff00 = (x*zoomindstxIntInv)&0xffff;
+ xCoeff01 = 0xffff - xCoeff00;
+ sX = (x*zoomindstxIntInv >> 16);
+ sX = (sX >= srcW -1)?(srcW- 2) : sX;
+ a = psY[sY*srcW + sX];
+ b = psY[sY*srcW + sX + 1];
+ c = psY[(sY+1)*srcW + sX];
+ d = psY[(sY+1)*srcW + sX + 1];
+
+ r0 = (a * xCoeff01 + b * xCoeff00)>>16 ;
+ r1 = (c * xCoeff01 + d * xCoeff00)>>16 ;
+ r0 = (r0 * yCoeff01 + r1 * yCoeff00)>>16;
+
+ if(mirror)
+ pdY[dstW -1 - x] = r0;
+ else
+ pdY[x] = r0;
+ }
+ pdY += dstW;
+ }
+
+ dstW /= 2;
+ dstH /= 2;
+ srcW /= 2;
+ srcH /= 2;
+
+ //UV
+ //for(y = 0; y<dstH - 1 ; y++ ) {
+ for(y = 0; y<dstH; y++ ) {
+ yCoeff00 = (y*zoomindstyIntInv)&0xffff;
+ yCoeff01 = 0xffff - yCoeff00;
+ sY = (y*zoomindstyIntInv >> 16);
+ sY = (sY >= srcH -1)? (srcH - 2) : sY;
+ for(x = 0; x<dstW; x++ ) {
+ xCoeff00 = (x*zoomindstxIntInv)&0xffff;
+ xCoeff01 = 0xffff - xCoeff00;
+ sX = (x*zoomindstxIntInv >> 16);
+ sX = (sX >= srcW -1)?(srcW- 2) : sX;
+ //U
+ a = psUV[(sY*srcW + sX)*2];
+ b = psUV[(sY*srcW + sX + 1)*2];
+ c = psUV[((sY+1)*srcW + sX)*2];
+ d = psUV[((sY+1)*srcW + sX + 1)*2];
+
+ r0 = (a * xCoeff01 + b * xCoeff00)>>16 ;
+ r1 = (c * xCoeff01 + d * xCoeff00)>>16 ;
+ r0 = (r0 * yCoeff01 + r1 * yCoeff00)>>16;
+
+ if(mirror && nv21DstFmt)
+ pdUV[dstW*2-1- (x*2)] = r0;
+ else if(mirror)
+ pdUV[dstW*2-1-(x*2+1)] = r0;
+ else if(nv21DstFmt)
+ pdUV[x*2 + 1] = r0;
+ else
+ pdUV[x*2] = r0;
+ //V
+ a = psUV[(sY*srcW + sX)*2 + 1];
+ b = psUV[(sY*srcW + sX + 1)*2 + 1];
+ c = psUV[((sY+1)*srcW + sX)*2 + 1];
+ d = psUV[((sY+1)*srcW + sX + 1)*2 + 1];
+
+ r0 = (a * xCoeff01 + b * xCoeff00)>>16 ;
+ r1 = (c * xCoeff01 + d * xCoeff00)>>16 ;
+ r0 = (r0 * yCoeff01 + r1 * yCoeff00)>>16;
+
+ if(mirror && nv21DstFmt)
+ pdUV[dstW*2-1- (x*2+1) ] = r0;
+ else if(mirror)
+ pdUV[dstW*2-1-(x*2)] = r0;
+ else if(nv21DstFmt)
+ pdUV[x*2] = r0;
+ else
+ pdUV[x*2 + 1] = r0;
+ }
+ pdUV += dstW*2;
+ }
+ return 0;
+}
+
+extern "C" int rk_camera_zoom_ipp(int v4l2_fmt_src, int srcbuf, int src_w, int src_h,int dstbuf,int zoom_value)
+{
+ int vipdata_base;
+
+ struct rk29_ipp_req ipp_req;
+ int src_y_offset,src_uv_offset,dst_y_offset,dst_uv_offset,src_y_size,dst_y_size;
+ int scale_w_times =0,scale_h_times = 0,w,h;
+ int ret = 0;
+ int ippFd = -1;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+ int cropW,cropH;
+
+
+ if((ippFd = open("/dev/rk29-ipp",O_RDWR)) < 0) {
+ LOGE("%s(%d):open /dev/rk29-ipp device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ goto do_ipp_err;
+ }
+
+ /*
+ *ddl@rock-chips.com:
+ * IPP Dest image resolution is 2047x1088, so scale operation break up some times
+ */
+ if ((src_w > 0x7f0) || (src_h > 0x430)) {
+ scale_w_times = ((src_w/0x7f0)>(src_h/0x430))?(src_w/0x7f0):(src_h/0x430);
+ scale_h_times = scale_w_times;
+ scale_w_times++;
+ scale_h_times++;
+ } else {
+ scale_w_times = 1;
+ scale_h_times = 1;
+ }
+ memset(&ipp_req, 0, sizeof(struct rk29_ipp_req));
+
+
+ //compute zoom
+ cropW = (src_w*100/zoom_value)& (~0x03);
+ cropH = (src_h*100/zoom_value)& (~0x03);
+ left_offset=MAX((((src_w-cropW)>>1)-1),0);
+ top_offset=MAX((((src_h-cropH)>>1)-1),0);
+ left_offset &= ~0x01;
+ top_offset &=~0x01;
+
+ ipp_req.timeout = 3000;
+ ipp_req.flag = IPP_ROT_0;
+ ipp_req.store_clip_mode =1;
+ ipp_req.src0.w = cropW/scale_w_times;
+ ipp_req.src0.h = cropH/scale_h_times;
+ ipp_req.src_vir_w = src_w;
+ ipp_req.src0.fmt = IPP_Y_CBCR_H2V2;
+ ipp_req.dst0.w = src_w/scale_w_times;
+ ipp_req.dst0.h = src_h/scale_h_times;
+ ipp_req.dst_vir_w = src_w;
+ ipp_req.dst0.fmt = IPP_Y_CBCR_H2V2;
+ vipdata_base = srcbuf;
+ src_y_size = src_w*src_h;
+ dst_y_size = src_w*src_h;
+
+ for (h=0; h<scale_h_times; h++) {
+ for (w=0; w<scale_w_times; w++) {
+ int ipp_times = 3;
+ src_y_offset = (top_offset + h*cropH/scale_h_times)* src_w
+ + left_offset + w*cropW/scale_w_times;
+ src_uv_offset = (top_offset + h*cropH/scale_h_times)* src_w/2
+ + left_offset + w*cropW/scale_w_times;
+
+ dst_y_offset = src_w*src_h*h/scale_h_times + src_w*w/scale_w_times;
+ dst_uv_offset = src_w*src_h*h/scale_h_times/2 + src_w*w/scale_w_times;
+
+ ipp_req.src0.YrgbMst = vipdata_base + src_y_offset;
+ ipp_req.src0.CbrMst = vipdata_base + src_y_size + src_uv_offset;
+ ipp_req.dst0.YrgbMst = dstbuf + dst_y_offset;
+ ipp_req.dst0.CbrMst = dstbuf + dst_y_size + dst_uv_offset;
+ while(ipp_times-- > 0) {
+ if (ioctl(ippFd,IPP_BLIT_SYNC,&ipp_req)){
+ LOGE("ipp do erro,do again,ipp_times = %d ,err: %s \n",ipp_times,strerror(errno));
+ } else {
+ break;
+ }
+ }
+ if (ipp_times <= 0) {
+ ret = -1;
+ goto do_ipp_err;
+ }
+ }
+ }
+
+do_ipp_err:
+ if(ippFd > 0)
+ close(ippFd);
+
+ return ret;
+}
+
+extern "C" int rk_camera_yuv_scale_crop_ipp(int v4l2_fmt_src, int v4l2_fmt_dst,
+ long srcbuf, long dstbuf,int src_w, int src_h,int dst_w, int dst_h,bool rotation_180)
+{
+ long vipdata_base;
+
+ struct rk29_ipp_req ipp_req;
+ int src_y_offset,src_uv_offset,dst_y_offset,dst_uv_offset,src_y_size,dst_y_size;
+ int scale_w_times =0,scale_h_times = 0,w,h;
+ int ret = 0;
+ int ippFd = -1;
+ int ratio = 0;
+ int top_offset=0,left_offset=0;
+ int cropW,cropH;
+
+
+ if((ippFd = open("/dev/rk29-ipp",O_RDWR)) < 0) {
+ LOGE("%s(%d):open /dev/rk29-ipp device failed ,err: %s",__FUNCTION__,__LINE__,strerror(errno));
+ ret = -1;
+ goto do_ipp_err;
+ }
+
+
+ /*
+ *ddl@rock-chips.com:
+ * IPP Dest image resolution is 2047x1088, so scale operation break up some times
+ */
+ if ((dst_w > 0x7f0) || (dst_h > 0x430)) {
+ scale_w_times = ((dst_w/0x7f0)>(dst_h/0x430))?(dst_w/0x7f0):(dst_h/0x430);
+ scale_h_times = scale_w_times;
+ scale_w_times++;
+ scale_h_times++;
+ } else {
+ scale_w_times = 1;
+ scale_h_times = 1;
+ }
+ memset(&ipp_req, 0, sizeof(struct rk29_ipp_req));
+
+ //need crop ?
+ if((src_w*100/src_h) != (dst_w*100/dst_h)){
+ ratio = ((src_w*100/dst_w) >= (src_h*100/dst_h))?(src_h*100/dst_h):(src_w*100/dst_w);
+ cropW = (ratio*dst_w/100)& (~0x03);
+ cropH = (ratio*dst_h/100)& (~0x03);
+
+ left_offset=MAX((((src_w-cropW)>>1)-1),0);
+ top_offset=MAX((((src_h-cropH)>>1)-1),0);
+ left_offset &= ~0x01;
+ top_offset &=~0x01;
+ }else{
+ cropW = src_w;
+ cropH = src_h;
+ top_offset=0;
+ left_offset=0;
+ }
+#if 1
+ if((src_w == 2592) && (src_h == 1944) && (dst_w == 2592) && (dst_h == 1458)){
+ scale_w_times= 2;
+ scale_h_times = 3;
+ cropW = dst_w;
+ cropH = dst_h;
+
+ left_offset=0;
+ top_offset=242;
+ }
+#endif
+ ipp_req.timeout = 3000;
+ if(rotation_180)
+ ipp_req.flag = IPP_ROT_180;
+ else
+ ipp_req.flag = IPP_ROT_0;
+ ipp_req.store_clip_mode =1;
+ ipp_req.src0.w = cropW/scale_w_times;
+ ipp_req.src0.h = cropH/scale_h_times;
+ ipp_req.src_vir_w = src_w;
+ if(v4l2_fmt_src == V4L2_PIX_FMT_NV12)
+ ipp_req.src0.fmt = IPP_Y_CBCR_H2V2;
+ else if(v4l2_fmt_src == V4L2_PIX_FMT_NV21)
+ ipp_req.src0.fmt = IPP_Y_CBCR_H2V1;
+ ipp_req.dst0.w = dst_w/scale_w_times;
+ ipp_req.dst0.h = dst_h/scale_h_times;
+ ipp_req.dst_vir_w = dst_w;
+ if(v4l2_fmt_dst == V4L2_PIX_FMT_NV12)
+ ipp_req.dst0.fmt = IPP_Y_CBCR_H2V2;
+ else if(v4l2_fmt_dst == V4L2_PIX_FMT_NV21)
+ ipp_req.dst0.fmt = IPP_Y_CBCR_H2V1;
+ vipdata_base = srcbuf;
+ src_y_size = src_w*src_h;
+ dst_y_size = dst_w*dst_h;
+ for (h=0; h<scale_h_times; h++) {
+ for (w=0; w<scale_w_times; w++) {
+ int ipp_times = 3;
+ src_y_offset = (top_offset + h*cropH/scale_h_times)* src_w
+ + left_offset + w*cropW/scale_w_times;
+ src_uv_offset = (top_offset + h*cropH/scale_h_times)* src_w/2
+ + left_offset + w*cropW/scale_w_times;
+
+ if(rotation_180){
+ dst_y_offset = dst_w*dst_h*(scale_h_times-1-h)/scale_h_times + dst_w*(scale_w_times-1-w)/scale_w_times;
+ dst_uv_offset = dst_w*dst_h*(scale_h_times-1-h)/scale_h_times/2 + dst_w*(scale_w_times-1-w)/scale_w_times;
+ }
+ else{
+ dst_y_offset = dst_w*dst_h*h/scale_h_times + dst_w*w/scale_w_times;
+ dst_uv_offset = dst_w*dst_h*h/scale_h_times/2 + dst_w*w/scale_w_times;
+ }
+
+ ipp_req.src0.YrgbMst = vipdata_base + src_y_offset;
+ ipp_req.src0.CbrMst = vipdata_base + src_y_size + src_uv_offset;
+ ipp_req.dst0.YrgbMst = dstbuf + dst_y_offset;
+ ipp_req.dst0.CbrMst = dstbuf + dst_y_size + dst_uv_offset;
+ while(ipp_times-- > 0) {
+ if (ioctl(ippFd,IPP_BLIT_SYNC,&ipp_req)){
+ LOGE("ipp do erro,do again,ipp_times = %d ,err: %s \n",ipp_times,strerror(errno));
+ } else {
+ break;
+ }
+ }
+ if (ipp_times <= 0) {
+ ret = -1;
+ goto do_ipp_err;
+ }
+ }
+ }
+
+do_ipp_err:
+ if(ippFd > 0)
+ close(ippFd);
+ return ret;
+}
+
+extern "C" int YData_Mirror_Line(int v4l2_fmt_src, int *psrc, int *pdst, int w)
+{
+ int i;
+
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>24)&0x000000ff) | ((*psrc>>8)&0x0000ff00)
+ | ((*psrc<<8)&0x00ff0000) | ((*psrc<<24)&0xff000000);
+ psrc++;
+ pdst--;
+ }
+
+ return 0;
+}
+extern "C" int UVData_Mirror_Line(int v4l2_fmt_src, int *psrc, int *pdst, int w)
+{
+ int i;
+
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>16)&0x0000ffff) | ((*psrc<<16)&0xffff0000);
+ psrc++;
+ pdst--;
+ }
+
+ return 0;
+}
+extern "C" int YuvData_Mirror_Flip(int v4l2_fmt_src, char *pdata, char *pline_tmp, int w, int h)
+{
+ int *pdata_tmp = NULL;
+ int *ptop, *pbottom;
+ int err = 0,i,j;
+
+ pdata_tmp = (int*)pline_tmp;
+
+ // Y mirror and flip
+ ptop = (int*)pdata;
+ pbottom = (int*)(pdata+w*(h-1));
+ for (j=0; j<(h>>1); j++) {
+ YData_Mirror_Line(v4l2_fmt_src, ptop, pdata_tmp+((w>>2)-1),w);
+ YData_Mirror_Line(v4l2_fmt_src, pbottom, ptop+((w>>2)-1), w);
+ memcpy(pbottom, pdata_tmp, w);
+ ptop += (w>>2);
+ pbottom -= (w>>2);
+ }
+ // UV mirror and flip
+ ptop = (int*)(pdata+w*h);
+ pbottom = (int*)(pdata+w*(h*3/2-1));
+ for (j=0; j<(h>>2); j++) {
+ UVData_Mirror_Line(v4l2_fmt_src, ptop, pdata_tmp+((w>>2)-1),w);
+ UVData_Mirror_Line(v4l2_fmt_src, pbottom, ptop+((w>>2)-1), w);
+ memcpy(pbottom, pdata_tmp, w);
+ ptop += (w>>2);
+ pbottom -= (w>>2);
+ }
+YuvData_Mirror_Flip_end:
+ return err;
+}
+extern "C" int YUV420_rotate(const unsigned char* srcy, int src_stride, unsigned char* srcuv,
+ unsigned char* dsty, int dst_stride, unsigned char* dstuv,
+ int width, int height,int rotate_angle){
+ int i = 0,j = 0;
+ // 90 , y plane
+ if(rotate_angle == 90){
+ srcy += src_stride * (height - 1);
+ srcuv += src_stride * ((height >> 1)- 1);
+ src_stride = -src_stride;
+ }else if(rotate_angle == 270){
+ dsty += dst_stride * (width - 1);
+ dstuv += dst_stride * ((width>>1) - 1);
+ dst_stride = -dst_stride;
+ }
+
+ for (i = 0; i < width; ++i)
+ for (j = 0; j < height; ++j)
+ *(dsty+i * dst_stride + j) = *(srcy+j * src_stride + i);
+
+ //uv
+ unsigned char av_u0,av_v0;
+ for (i = 0; i < width; i += 2)
+ for (j = 0; j < (height>>1); ++j) {
+ av_u0 = *(srcuv+i + (j * src_stride));
+ av_v0 = *(srcuv+i + (j * src_stride)+1);
+ *(dstuv+((j<<1) + ((i >> 1) * dst_stride)))= av_u0;
+ *(dstuv+((j<<1) + ((i >> 1) * dst_stride)+1)) = av_v0;
+ }
+
+ return 0;
+ }
+
+ extern "C" int cameraFormatConvert(int v4l2_fmt_src, int v4l2_fmt_dst, const char *android_fmt_dst,
+ char *srcbuf, char *dstbuf,long srcphy,long dstphy,int src_size,
+ int src_w, int src_h, int srcbuf_w,
+ int dst_w, int dst_h, int dstbuf_w,
+ bool mirror)
+ {
+ int y_size,i,j;
+ int ret = -1;
+ /*
+ if (v4l2_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x,%dx%d)->'%c%c%c%c'@(0x%x,0x%x,%dx%d) ",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ (int)srcbuf, srcphy,src_w,src_h,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (int)dstbuf,dstphy,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x,%dx%d)->%s@(0x%x,0x%x,%dx%d)",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (int)srcbuf, srcphy,src_w,src_h,android_fmt_dst, (int)dstbuf,dstphy,
+ dst_w,dst_h);
+ }
+ */
+
+ y_size = src_w*src_h;
+ switch (v4l2_fmt_src)
+ {
+ case V4L2_PIX_FMT_YUV420:
+ {
+// if (CAMERA_IS_UVC_CAMERA()
+// || (CAMERA_IS_RKSOC_CAMERA() && (mCamDriverCapability.version != KERNEL_VERSION(0, 0, 1)))) {
+// goto cameraFormatConvert_default;
+// }
+ }
+ case V4L2_PIX_FMT_NV12:
+ {
+ int *dst_vu, *src_uv;
+
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV12) ||
+ (android_fmt_dst && (strcmp(android_fmt_dst,CAMERA_DISPLAY_FORMAT_NV12)==0))) {
+ if (dstbuf && (dstbuf != srcbuf)) {
+ if (dstbuf_w == dst_w) {
+ memcpy(dstbuf,srcbuf, y_size*3/2);
+ } else { /* ddl@rock-chips.com: v0.4.1 */
+ for (i=0;i<(dst_h*3/2);i++) {
+ memcpy(dstbuf,srcbuf, dst_w);
+ dstbuf += dstbuf_w;
+ srcbuf += srcbuf_w;
+ }
+ }
+ ret = 0;
+ }
+ } else if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21) ||
+ (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420SP)==0))) {
+ if ((src_w == dst_w) && (src_h == dst_h)) {
+ if (mirror == false) {
+ if (dstbuf != srcbuf)
+ memcpy(dstbuf,srcbuf, y_size);
+ src_uv = (int*)(srcbuf + y_size);
+ dst_vu = (int*)(dstbuf+y_size);
+ for (i=0; i<(y_size>>3); i++) {
+ *dst_vu = ((*src_uv&0x00ff00ff)<<8) | ((*src_uv&0xff00ff00)>>8);
+ dst_vu++;
+ src_uv++;
+ }
+ } else {
+ char *psrc,*pdst;
+ psrc = srcbuf;
+ pdst = dstbuf + dst_w-1;
+ for (i=0; i<src_h; i++) {
+ for (j=0; j<src_w; j++) {
+ *pdst-- = *psrc++;
+ }
+ pdst += 2*dst_w;
+ }
+
+ psrc = srcbuf + y_size;
+ pdst = dstbuf + y_size + dst_w-1;
+ for (i=0; i<src_h/2; i++) {
+ for (j=0; j<src_w; j++) {
+ *pdst-- = *psrc++;
+ }
+ pdst += 2*dst_w;
+ }
+ }
+ ret = 0;
+ } else {
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21) ||
+ (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420SP)==0))) {
+ int *dst_uv,*src_uv;
+ unsigned *dst_y,*src_y,*src_y1;
+ int a, b, c, d;
+ if ((src_w == dst_w*4) && (src_h == dst_h*4)) {
+ dst_y = (unsigned int*)dstbuf;
+ src_y = (unsigned int*)srcbuf;
+ src_y1= src_y + (src_w*3)/4;
+ for (i=0; i<dst_h; i++) {
+ for(j=0; j<dst_w/4; j++) {
+ a = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ a >>= 2;
+ src_y++;
+ src_y1++;
+ b = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ b >>= 2;
+ src_y++;
+ src_y1++;
+ c = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ c >>= 2;
+ src_y++;
+ src_y1++;
+ d = (*src_y>>24) + (*src_y&0xff) + (*src_y1>>24) + (*src_y1&0xff);
+ d >>= 2;
+ src_y++;
+ src_y1++;
+ *dst_y++ = a | (b<<8) | (c<<16) | (d<<24);
+ }
+ //dst_y = (int*)(srcbuf+src_w*(i+1));
+ src_y += (src_w*3)/4;
+ src_y1= src_y + (src_w*3)/4;
+ }
+ dst_uv = (int*)(dstbuf+dst_w*dst_h);
+ //dst_uv = (int*)(srcbuf+y_size);
+ src_uv = (int*)(srcbuf+y_size);
+ for (i=0; i<dst_h/2; i++) {
+ for(j=0; j<dst_w/4; j++) {
+ *dst_uv = (*src_uv&0xffff0000)|((*(src_uv+2)&0xffff0000)>>16);
+ *dst_uv = ((*dst_uv&0x00ff00ff)<<8)|((*dst_uv&0xff00ff00)>>8);
+ dst_uv++;
+ src_uv += 4;
+ }
+ //dst_uv = (int*)(srcbuf+y_size+src_w*(i+1));
+ src_uv += src_w*3/4;
+ }
+ }
+ ret = 0;
+ } else {
+ if (v4l2_fmt_dst) {
+ LOGE("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->'%c%c%c%c'@(0x%x,0x%x), %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (long)srcbuf, srcphy, (long)dstbuf,dstphy,src_w,src_h,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->%s@(0x%x,0x%x) %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (long)srcbuf, srcphy,android_fmt_dst, (long)dstbuf,dstphy,
+ src_w,src_h,dst_w,dst_h);
+ }
+ }
+ }
+ } else if (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_RGB565)==0)) {
+/*
+ if (srcphy && dstphy) {
+ #ifdef TARGET_RK29
+ YUV2RGBParams para;
+
+ memset(&para, 0x00, sizeof(YUV2RGBParams));
+ para.yuvAddr = srcphy;
+ para.outAddr = dstphy;
+ para.inwidth = (src_w + 15)&(~15);
+ para.inheight = (src_h + 15)&(~15);
+ para.outwidth = (dst_w + 15)&(~15);
+ para.outheight = (dst_h + 15)&(~15);
+ para.inColor = PP_IN_YUV420sp;
+ para.outColor = PP_OUT_RGB565;
+
+ ret = doYuvToRgb(&para);
+ #else
+ LOGE("%s(%d): Convert nv12 to rgb565 isn't support physical address in current paltform",__FUNCTION__,__LINE__);
+ #endif
+ } else if (srcbuf && dstbuf) {
+ if(mRGAFd > 0) {
+ ret = rga_nv12torgb565(mRGAFd,src_w,src_h,srcbuf, (short int*)dstbuf,dstbuf_w);
+ } else {
+ ret = arm_nv12torgb565(src_w,src_h,srcbuf, (short int*)dstbuf,dstbuf_w);
+ }
+ }
+*/ } else if (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420P)==0)) {
+ char *dst_u,*dst_v,*src_y,*dst_y,*srcuv;
+ int dstc_size,dsty_size, align_dstw,align_dsthalfw;
+
+ if ((src_w == dst_w) && (src_h == dst_h)) {
+ align_dstw = ((dst_w+15)&0xfffffff0);
+ align_dsthalfw = ((dst_w/2+15)&0xfffffff0);
+ dsty_size = align_dstw*dst_h;
+ dstc_size = align_dsthalfw*dst_h/2;
+ src_y = srcbuf;
+ dst_y = dstbuf;
+
+ if (mirror == false) {
+ for (j=0; j<src_h; j++) {
+ for (i=0; i<src_w; i++) {
+ *dst_y++ = *src_y++;
+ }
+ dst_y += align_dstw-src_w;
+ }
+
+ srcuv = (char*)(srcbuf + y_size);
+ dst_u = (char*)(dstbuf+dsty_size);
+ dst_v = dst_u + dstc_size;
+
+ for (j=0; j<src_h/2; j++) {
+ for (i=0; i<src_w/2; i++) {
+ *dst_v++ = *srcuv++;
+ *dst_u++ = *srcuv++;
+ }
+ dst_u += align_dsthalfw-src_w/2;
+ dst_v += align_dsthalfw-src_w/2;
+ }
+
+ } else {
+ char *psrc,*pdst;
+ psrc = srcbuf;
+ pdst = dstbuf + dst_w-1;
+ for (i=0; i<src_h; i++) {
+ for (j=0; j<src_w; j++) {
+ *pdst-- = *psrc++;
+ }
+ pdst += 2*align_dstw - (align_dstw - dst_w);
+ }
+
+ psrc = srcbuf + y_size;
+ dst_u = dstbuf + dsty_size + dst_w/2-1;
+ dst_v = dst_u + dstc_size;
+ for (i=0; i<src_h/2; i++) {
+ for (j=0; j<src_w/2; j++) {
+ *dst_v-- = *psrc++;
+ *dst_u-- = *psrc++;
+ }
+ dst_u += align_dsthalfw*2 - (align_dsthalfw - dst_w/2);
+ dst_v += align_dsthalfw*2 - (align_dsthalfw - dst_w/2);
+ }
+ }
+
+ ret = 0;
+ }
+ }
+ break;
+ }
+ case V4L2_PIX_FMT_YUV422P:
+ {
+// if (CAMERA_IS_UVC_CAMERA()
+// || (mCamDriverCapability.version != KERNEL_VERSION(0, 0, 1))) {
+// goto cameraFormatConvert_default;
+// }
+ }
+ case V4L2_PIX_FMT_NV16:
+ {
+ break;
+ }
+ case V4L2_PIX_FMT_YUYV:
+ {
+ char *srcbuf_begin;
+ int *dstint_y, *dstint_uv, *srcint;
+
+ if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV12) ||
+ ((v4l2_fmt_dst == V4L2_PIX_FMT_YUV420)/* && CAMERA_IS_RKSOC_CAMERA()
+ && (mCamDriverCapability.version == KERNEL_VERSION(0, 0, 1))*/)) {
+ if ((src_w == dst_w) && (src_h == dst_h)) {
+ dstint_y = (int*)dstbuf;
+ srcint = (int*)srcbuf;
+ dstint_uv = (int*)(dstbuf + y_size);
+ /*
+ * author :zyh
+ * neon code for YUYV to YUV420
+ */
+#ifdef HAVE_ARM_NEON
+ for(i=0;i<src_h;i++) {
+ int n = src_w;
+ char tmp = i%2;//get uv only when in even row
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel swap \n\t"
+ " vld2.8 {q0,q1} , [%[src]]! @ q0 = y q1 = uv \n\t"
+ " vst1.16 {q0},[%[dst_y]]! @ now q0 -> dst \n\t"
+ " cmp %[tmp], #1 \n\t"
+ " bge 1f \n\t"
+ " vst1.16 {q1},[%[dst_uv]]! @ now q1 -> dst \n\t"
+ "1: @ don't need get uv in odd row \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+ : [dst_y] "+r" (dstint_y), [dst_uv] "+r" (dstint_uv),[src] "+r" (srcint), [n] "+r" (n),[tmp] "+r" (tmp)
+ : [src_stride] "r" (src_w)
+ : "cc", "memory", "q0", "q1", "q2"
+ );
+ }
+ //LOGE("---------------neon code YUY to YUV420-----------------------------");
+ /*
+ * C code YUYV to YUV420
+ */
+#else
+ for(i=0;i<src_h; i++) {
+ for (j=0; j<(src_w>>2); j++) {
+ if(i%2 == 0){
+ *dstint_uv++ = (*(srcint+1)&0xff000000)|((*(srcint+1)&0x0000ff00)<<8)
+ |((*srcint&0xff000000)>>16)|((*srcint&0x0000ff00)>>8);
+ }
+ *dstint_y++ = ((*(srcint+1)&0x00ff0000)<<8)|((*(srcint+1)&0x000000ff)<<16)
+ |((*srcint&0x00ff0000)>>8)|(*srcint&0x000000ff);
+ srcint += 2;
+ }
+ }
+ //LOGE("---------------c code YUY to YUV420-----------------------------");
+#endif
+ ret = 0;
+ } else {
+ if (v4l2_fmt_dst) {
+ LOGE("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->'%c%c%c%c'@(0x%x,0x%x), %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (long)srcbuf, srcphy, (long)dstbuf,dstphy,src_w,src_h,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->%s@(0x%x,0x%x) %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (long)srcbuf, srcphy,android_fmt_dst, (long)dstbuf,dstphy,
+ src_w,src_h,dst_w,dst_h);
+ }
+ }
+
+ } else if ((v4l2_fmt_dst == V4L2_PIX_FMT_NV21)||
+ (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_YUV420SP)==0))) {
+ if ((src_w==dst_w) && (src_h==dst_h)) {
+ dstint_y = (int*)dstbuf;
+ srcint = (int*)srcbuf;
+ for(i=0;i<(y_size>>2);i++) {
+ *dstint_y++ = ((*(srcint+1)&0x00ff0000)<<8)|((*(srcint+1)&0x000000ff)<<16)
+ |((*srcint&0x00ff0000)>>8)|(*srcint&0x000000ff);
+ srcint += 2;
+ }
+ dstint_uv = (int*)(dstbuf + y_size);
+ srcint = (int*)srcbuf;
+ for(i=0;i<src_h/2; i++) {
+ for (j=0; j<(src_w>>2); j++) {
+ *dstint_uv++ = ((*(srcint+1)&0xff000000)>>8)|((*(srcint+1)&0x0000ff00)<<16)
+ |((*srcint&0xff000000)>>24)|(*srcint&0x0000ff00);
+ srcint += 2;
+ }
+ srcint += (src_w>>1);
+ }
+ ret = 0;
+ } else {
+ if (v4l2_fmt_dst) {
+ LOGE("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->'%c%c%c%c'@(0x%x,0x%x), %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF,
+ (long)srcbuf, srcphy, (long)dstbuf,dstphy,src_w,src_h,dst_w,dst_h);
+ } else if (android_fmt_dst) {
+ LOGD("cameraFormatConvert '%c%c%c%c'@(0x%x,0x%x)->%s@(0x%x,0x%x) %dx%d->%dx%d "
+ "scale isn't support",
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF
+ , (long)srcbuf, srcphy,android_fmt_dst, (long)dstbuf,dstphy,
+ src_w,src_h,dst_w,dst_h);
+ }
+ }
+ }
+ break;
+ }
+ case V4L2_PIX_FMT_RGB565:
+ {
+ if (android_fmt_dst && (strcmp(android_fmt_dst,android::CameraParameters::PIXEL_FORMAT_RGB565)==0)){
+ if (srcbuf && dstbuf && (srcbuf != dstbuf)){
+ /* if(mRGAFd > 0) {
+ ret = rga_rgb565_cp(mRGAFd,src_w,src_h,srcbuf, (short int*)dstbuf);
+ } else {
+ memcpy(dstbuf,srcbuf,src_w*src_h*2);
+ ret = 0;
+ }*/
+ }
+ }
+ break;
+ }
+
+ case V4L2_PIX_FMT_MJPEG:
+ {
+ #if 0
+ VPU_FRAME outbuf;
+ unsigned int output_len;
+ unsigned int input_len,i,j,w,h;
+ FILE *fp;
+ char filename[50];
+ unsigned int *psrc,*pdst;
+
+ output_len = 0;
+ input_len = src_size;
+ if (v4l2_fmt_dst == V4L2_PIX_FMT_NV12) {
+
+ ret = mMjpegDecoder.decode(mMjpegDecoder.decoder,(unsigned char*)&outbuf, &output_len, (unsigned char*)srcbuf, &input_len);
+ if ((ret >= 0) && (output_len == sizeof(VPU_FRAME))) {
+ VPUMemLink(&outbuf.vpumem);
+ /* following codes are used to get yuv data after decoder */
+ VPUMemInvalidate(&outbuf.vpumem);
+ w = ((outbuf.DisplayWidth+15)&(~15));
+ h = ((outbuf.DisplayHeight+15)&(~15));
+ if (mirror == false) {
+ memcpy(dstbuf, outbuf.vpumem.vir_addr,w*h*3/2);
+ } else {
+ pdst = (unsigned int*)(dstbuf);
+ psrc = (unsigned int*)outbuf.vpumem.vir_addr;
+ pdst += ((w>>2)-1);
+ for (j=0; j<h; j++) {
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>24)&0x000000ff) | ((*psrc>>8)&0x0000ff00)
+ | ((*psrc<<8)&0x00ff0000) | ((*psrc<<24)&0xff000000);
+ psrc++;
+ pdst--;
+ }
+ pdst += (w>>1);
+ }
+
+ pdst = (unsigned int*)dstbuf;
+ psrc = (unsigned int*)outbuf.vpumem.vir_addr;
+
+ pdst += (w*h/4);
+ psrc += (w*h/4);
+ pdst += ((w>>2)-1);
+ for (j=0; j<(h/2); j++) {
+ for (i=0; i<(w>>2); i++) {
+ *pdst = ((*psrc>>16)&0x0000ffff) | ((*psrc<<16)&0xffff0000);
+ psrc++;
+ pdst--;
+ }
+ pdst += (w>>1);
+ }
+ }
+ VPUFreeLinear(&outbuf.vpumem);
+ } else {
+ LOGE("%s(%d): mjpeg decode failed! ret:%d output_len: 0x%x, src_buf: %p, input_len: %d",__FUNCTION__,__LINE__,
+ ret,output_len,srcbuf, input_len);
+ }
+ }
+ #endif
+ break;
+ }
+
+ cameraFormatConvert_default:
+ default:
+ if (android_fmt_dst) {
+ LOGE("%s(%d): CameraHal is not support (%c%c%c%c -> %s)",__FUNCTION__,__LINE__,
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF, android_fmt_dst);
+ } else if (v4l2_fmt_dst) {
+ LOGE("%s(%d): CameraHal is not support (%c%c%c%c -> %c%c%c%c)",__FUNCTION__,__LINE__,
+ v4l2_fmt_src & 0xFF, (v4l2_fmt_src >> 8) & 0xFF,
+ (v4l2_fmt_src >> 16) & 0xFF, (v4l2_fmt_src >> 24) & 0xFF,
+ v4l2_fmt_dst & 0xFF, (v4l2_fmt_dst >> 8) & 0xFF,
+ (v4l2_fmt_dst >> 16) & 0xFF, (v4l2_fmt_dst >> 24) & 0xFF);
+ }
+ break;
+ }
+ return ret;
+
+ }
+
+
diff --git a/CameraHal/CameraHal_MediaProfile.cpp b/CameraHal/CameraHal_MediaProfile.cpp
new file mode 100755
index 0000000..8cc6da0
--- /dev/null
+++ b/CameraHal/CameraHal_MediaProfile.cpp
@@ -0,0 +1,852 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <time.h>
+#include <fcntl.h>
+#include <dlfcn.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <sys/file.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <linux/videodev2.h>
+#include <linux/version.h>
+
+#include "CameraHal.h"
+#include "CameraHal_Module.h"
+
+namespace android {
+
+#define VIDEO_DEV_NAME "/dev/video0"
+#define VIDEO_DEV_NAME_1 "/dev/video1"
+
+#define RK29_CAM_VERSION_CODE_1 KERNEL_VERSION(0, 0, 1)
+#define RK29_CAM_VERSION_CODE_2 KERNEL_VERSION(0, 0, 2)
+
+static uint resolution[][2] = {{176,144},{240,160},{320,240},{352,288},
+ {640,480},{720,480},{800,600}, {1280,720},{1920,1080},
+ {0,0}};
+
+#define RK30_PLAT 1
+#define RK29_PLAT 0
+
+#define XML_FILE_DEFAULT "/etc/media_profiles_default.xml"
+#define XML_FILE_TMP "/data/media_profiles_tmp.xml"
+
+
+
+static char fmt_name[][10]={
+{"qcif"},
+{"160p"},
+{"qvga"},
+{"cif"},
+{"480p"},
+{"480p"},
+{"svga"},
+{"720p"},
+{"1080p"}
+};
+
+struct xml_video_element{
+ int n_cameraId;
+ char str_quality[10];
+ int n_width;
+ int n_height;
+ int n_frameRate;
+ int isAddMark;
+};
+
+struct xml_fp_position{
+ int camid;
+ long camid_start;
+ long camid_end;
+};
+
+struct xml_video_name{
+ int camid;
+ char camera_name[30];
+};
+
+
+int CameraGroupFound_new(struct xml_video_name* video_name)
+{
+ int media_profiles_id=0,i,len,front_id = 0,back_id = 0;
+ struct xml_video_name* pst_video_name = video_name;
+ char camera_link[30];
+ char *camera_name = pst_video_name->camera_name;
+ char *end;
+
+ for (i=0; i<2; i++) {
+ pst_video_name->camid = i;
+ sprintf(camera_link,"/sys/dev/char/81:%d/device",i);
+ memset(camera_name,0x00,sizeof(pst_video_name->camera_name));
+ len = readlink(camera_link,camera_name, sizeof(pst_video_name->camera_name));
+ if (len > 0) {
+ if (strstr(camera_name,"front")) {
+ camera_name = strstr(camera_name,"front");
+ camera_name = strstr(camera_name,"_");
+ if (camera_name != NULL)
+ front_id = strtol(camera_name+1,&end,10);
+ else
+ front_id = 0;
+ } else if (strstr(camera_name,"back")) {
+ camera_name = strstr(camera_name,"back");
+ camera_name = strstr(camera_name,"_");
+ if (camera_name != NULL)
+ back_id = strtol(camera_name+1,&end,10);
+ else
+ back_id = 0;
+ }
+
+ media_profiles_id = ((back_id<<8) | front_id);
+ }
+ pst_video_name++;
+ camera_name = pst_video_name->camera_name;
+ }
+ return media_profiles_id;
+}
+
+int isSame_camera(struct xml_video_name* device_videoname, struct xml_video_name* xml_videoname)
+{
+ int ret = -1;
+ struct xml_video_name* pst_device_name = device_videoname;
+ struct xml_video_name* pst_xml_name = xml_videoname;
+ int i;
+
+ ret = strcmp(pst_device_name->camera_name, pst_xml_name->camera_name);
+ pst_device_name++;
+ pst_xml_name++;
+ ret |= strcmp(pst_device_name->camera_name, pst_xml_name->camera_name);
+
+ return ret;
+}
+
+
+int find_resolution_index(int w, int h)
+{
+ int list_w, list_h;
+ int i=0;
+ int index=-1;
+
+ do{
+ list_w = resolution[i][0];
+ list_h = resolution[i][1];
+
+ if(list_w==w && list_h==h){
+ index = i;
+ break;
+ }
+ i++;
+ }while(list_w!=0 && list_h!=0);
+
+ return index;
+}
+
+int find_frameRate(struct xml_video_element* pst_element, int element_count, struct xml_video_element* find_element)
+{
+ int i;
+ int ret = -1;
+ struct xml_video_element* element = pst_element;
+
+ for(i=0; i<element_count; i++, element++){
+ if(find_element->n_cameraId != element->n_cameraId){
+ continue;
+ }
+ ret = strcmp(element->str_quality, find_element->str_quality);
+ //printf("strcmp(%d) %d==%d\n", ret, element->n_width, find_element->n_width);
+ if(!ret && (element->n_width==find_element->n_width))
+ {
+ find_element->n_height = element->n_height;
+ find_element->n_frameRate = element->n_frameRate;
+ find_element->isAddMark = element->isAddMark;
+ break;
+ }
+ }
+
+ if(i==element_count)
+ return -1;
+ else
+ return 0;
+}
+
+int find_camId_fseek(FILE* fp, struct xml_fp_position* fp_pos){
+ char one_line_buf[256];
+ int find_fmt_sign=0;
+ char *leave_line, *leave_line1, *leave_line2;
+ char str_camId[4];
+ const char *equal_sign = "=";
+ int count=0;
+
+ if(fp==NULL)
+ return -1;
+
+ if(fp_pos==NULL)
+ return -2;
+
+ memset(str_camId, 0x00, sizeof(str_camId));
+ sprintf(str_camId, "%d", fp_pos->camid);
+ fseek(fp,0,SEEK_SET);
+ while(fgets(one_line_buf,256,fp) != NULL)
+ {
+ if(strlen(one_line_buf) < 3) //line is NULL
+ {
+ continue;
+ }
+
+ if(find_fmt_sign==0)
+ {
+ leave_line = NULL;
+ leave_line = strstr(one_line_buf, "<CamcorderProfiles");
+ if(leave_line == NULL) //no "<CamcorderProfiles"
+ {
+ continue;
+ }
+
+ leave_line1 = NULL;
+ leave_line1 = strstr(leave_line,equal_sign);
+ if(leave_line1 == NULL) //no "="
+ {
+ continue;
+ }
+
+ leave_line2 = NULL;
+ leave_line2 = strstr(leave_line1,str_camId);
+ if(leave_line2 == NULL) //no "0/1"
+ {
+ continue;
+ }else{
+ fp_pos->camid_start = ftell(fp);
+ find_fmt_sign=1;
+ continue;
+ }
+ }else{
+ leave_line = NULL;
+ leave_line = strstr(one_line_buf, "</CamcorderProfiles>");
+ if(leave_line == NULL) //no
+ {
+ continue;
+ }else{
+ fp_pos->camid_end = ftell(fp);
+ break;
+ }
+ }
+
+ if(fgetc(fp)==EOF)
+ {
+ break;
+ }
+ fseek(fp,-1,SEEK_CUR);
+ memset(one_line_buf,0,sizeof(one_line_buf));
+ }
+
+ return 0;
+}
+int xml_version_check(struct v4l2_capability* CamDriverCapability)
+{
+ return (CamDriverCapability->version > 0x000300)?1:0;
+}
+
+int xml_read_camname(FILE* fp, struct xml_video_name* video_name, int video_count)
+{
+ char one_line_buf[256];
+ char *leave_line0, *leave_line1, *leave_line2;
+ int leave_num;
+ const char* equal_sign = "=";
+ const char* mark_sign_start = "<!--";
+ const char* mark_sign_end = "-->";
+ const char* videoname_sign = "videoname";
+ struct xml_video_name* pst_video_name = video_name;
+ int count = 0;
+
+ fseek(fp,0,SEEK_SET);
+
+ while(fgets(one_line_buf,256,fp) != NULL)
+ {
+ if(strlen(one_line_buf) < 3) //line is NULL
+ {
+ continue;
+ }
+ leave_line0 = NULL;
+ leave_line0 = strstr(one_line_buf, mark_sign_start);
+ if(leave_line0==NULL)
+ {
+ continue;
+ }
+ leave_line1 = NULL;
+ leave_line1 = strstr(one_line_buf, mark_sign_end);
+ if(leave_line1==NULL)
+ {
+ continue;
+ }
+
+ leave_line0 = NULL;
+ leave_line0 = strstr(one_line_buf, videoname_sign);
+ if(leave_line0==NULL)
+ {
+ continue;
+ }
+
+ leave_line1 = NULL;
+ leave_line1 = strstr(leave_line0, equal_sign);
+ if(leave_line1==NULL)
+ {
+ continue;
+ }
+
+ sscanf(leave_line0, "videoname%d=\"%[^\"]", &pst_video_name->camid, pst_video_name->camera_name);
+ count++;
+ if(count==video_count){
+ break;
+ }
+ pst_video_name++;
+
+ if(fgetc(fp)==EOF)
+ {
+ break;
+ }
+ fseek(fp,-1,SEEK_CUR);
+ }
+
+ return count;
+}
+
+
+int xml_copy_and_write_camname(const char* src_file, char* dst_file, struct xml_video_name* video_name, int video_count)
+{
+ FILE *fpsrc, *fpdst;
+ char one_line_buf[256];
+ char *leave_line0, *leave_line1, *leave_line2;
+ struct xml_video_name* pst_video_name = video_name;
+ int isWrite=0;
+ int i;
+
+ fpsrc = fopen(src_file,"r");
+ if(fpsrc == NULL)
+ {
+ LOGE("%s 111OPEN '%s' FALID, r \n", __FUNCTION__, src_file);
+ return -1;
+ }
+
+ fpdst = fopen(dst_file,"w");
+ if(fpdst == NULL)
+ {
+ LOGE("%s 222OPEN %s TEMP FALID w \n",__FUNCTION__, dst_file);
+ return -2;
+ }
+
+ fseek(fpsrc,0,SEEK_SET);
+ fseek(fpdst,0,SEEK_SET);
+ while(fgets(one_line_buf,256,fpsrc) != NULL)
+ {
+
+ fputs(one_line_buf, fpdst);
+
+ if(isWrite==0){
+ leave_line0 = NULL;
+ leave_line0 = strstr(one_line_buf, "<?");
+ if(leave_line0==NULL){
+ continue;
+ }
+
+ leave_line0 = NULL;
+ leave_line0 = strstr(one_line_buf, "?>");
+ if(leave_line0==NULL){
+ continue;
+ }
+
+ for(i=0; i<video_count; i++){
+ fprintf(fpdst, "<!-- videoname%d=\"%s\" --> \n", pst_video_name->camid, pst_video_name->camera_name);
+ pst_video_name++;
+ }
+ isWrite=1;
+ }
+
+ if(fgetc(fpsrc)==EOF)
+ {
+ break;
+ }
+ fseek(fpsrc,-1,SEEK_CUR);
+ }
+
+ memset(one_line_buf,0,sizeof(one_line_buf));
+ fclose(fpsrc);
+ fclose(fpdst);
+
+ return 0;
+}
+
+int xml_alter(char* src_xml_file, const char* dst_xml_file, struct xml_video_element* pst_element, int element_count)
+{
+ int ret = 0, err=0;
+ int alter_sign = 0;
+ int find_fmt_sign=0;
+ int leave_num=0;
+ long now_fp_pos;
+ const char *equal_sign = "=";
+ FILE *src_fp = NULL, *dst_fp = NULL;
+ char one_line_buf[256];
+ char frontpart_line[50];
+ long front_fptmp = 0,back_fptmp = 0;
+ struct xml_fp_position fp_pos[2];
+ struct xml_video_element find_element;
+ char *leave_line, *leave_line1, *leave_line2;
+ struct xml_video_element* element =pst_element ;
+
+ memset(&find_element, 0x00, sizeof(struct xml_video_element));
+ src_fp = fopen(src_xml_file, "r");
+ if(src_fp==NULL){
+ err = -1;
+ LOGD("open file '%s' failed!!! (r)\n", src_xml_file);
+ goto alter_exit;
+ }
+
+ dst_fp = fopen(dst_xml_file, "w");
+ if(dst_fp==NULL){
+ err = -2;
+ LOGD("open file '%s' failed!!! (r)\n", dst_xml_file);
+ goto alter_exit;
+ }
+
+ memset(&fp_pos, 0x00, 2*sizeof(struct xml_fp_position));
+ fp_pos[0].camid = 0;
+ ret = find_camId_fseek(src_fp, &fp_pos[0]);
+ if(ret < 0 || fp_pos[0].camid_end <= fp_pos[0].camid_start){
+ LOGD("find camid(%d) failed\n", fp_pos[0].camid);
+ err = -3;
+ goto alter_exit;
+ }
+
+ fp_pos[1].camid = 1;
+ ret = find_camId_fseek(src_fp, &fp_pos[1]);
+ if(ret < 0 || fp_pos[1].camid_end <= fp_pos[1].camid_start){
+ LOGD("find camid(%d) failed\n", fp_pos[1].camid);
+ err = -3;
+ goto alter_exit;
+ }
+
+ if(fp_pos[0].camid_end>0 && fp_pos[0].camid_start>0 && fp_pos[1].camid_end>0 && fp_pos[1].camid_start>0){
+ fseek(src_fp,0,SEEK_SET);
+ fseek(dst_fp,0,SEEK_SET);
+
+ while(fgets(one_line_buf,256,src_fp) != NULL)
+ {
+ if(strlen(one_line_buf) < 3) //line is NULL
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+
+ if(find_fmt_sign==0)
+ {
+ leave_line = NULL;
+ leave_line = strstr(one_line_buf,equal_sign);
+ if(leave_line == NULL) //no "="
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+ leave_line1 = NULL;
+ leave_line1 = strstr(one_line_buf, "<EncoderProfile");
+ if(leave_line1 == NULL)
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+
+ leave_line2 = NULL;
+ leave_line2 = strstr(leave_line1, "timelapse");
+ if(leave_line2 == NULL)
+ {
+ memset(find_element.str_quality, 0x00, sizeof(find_element.str_quality));
+ sscanf(leave_line, "%*[^\"]\"%[^\"]", find_element.str_quality);
+ }else{
+ memset(find_element.str_quality, 0x00, sizeof(find_element.str_quality));
+ sscanf(leave_line, "%*[^\"]\"timelapse%[^\"]", find_element.str_quality);
+ }
+
+ //printf("quality %s\n", find_element.str_quality);
+ find_fmt_sign = 1;
+ front_fptmp = ftell(dst_fp);
+ fprintf(dst_fp, " \n");
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+ else if(find_fmt_sign==1)
+ {
+ leave_line = NULL;
+ leave_line = strstr(one_line_buf,equal_sign);
+ if(leave_line == NULL) //no "="
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+
+ leave_line1 = NULL;
+ leave_line1 = strstr(one_line_buf,"width");
+ if(leave_line1 == NULL) //no "width"
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+ sscanf(leave_line, "%*[^1-9]%d\"", &find_element.n_width);
+ //printf("%d\n", find_element.n_width);
+ find_fmt_sign=2;
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+ else if(find_fmt_sign==2)
+ {
+ leave_line = NULL;
+ leave_line = strstr(one_line_buf,equal_sign);
+ if(leave_line == NULL) //no "="
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+ leave_line1 = NULL;
+ leave_line1 = strstr(one_line_buf, "frameRate");
+ if(leave_line1 == NULL) //no "framRate"
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+
+ now_fp_pos = ftell(src_fp);
+ if(now_fp_pos>fp_pos[0].camid_start && now_fp_pos<fp_pos[0].camid_end)
+ find_element.n_cameraId = 0;
+ else if(now_fp_pos>fp_pos[1].camid_start && now_fp_pos<fp_pos[1].camid_end)
+ find_element.n_cameraId = 1;
+ else
+ find_element.n_cameraId = -1;
+
+ if(find_element.n_cameraId != -1){
+ ret = find_frameRate(element, element_count, &find_element);
+ if(ret==0){
+ leave_num = leave_line - one_line_buf;
+ memset(frontpart_line,0,sizeof(frontpart_line));
+ strncpy(frontpart_line,one_line_buf,leave_num);
+ fputs(frontpart_line,dst_fp);
+
+ //printf("new frameRate %d isaddmark(%d)\n", find_element.n_frameRate, find_element.isAddMark);
+ fprintf(dst_fp,"=\"%d\" /> \n", (find_element.n_frameRate));
+ alter_sign++;
+ find_fmt_sign = 3;
+ LOGD("XML modify: camID(%d) resolution:%s(%dx%d) fps(%d) isaddmark(%d)\n",find_element.n_cameraId,find_element.str_quality,
+ find_element.n_width, find_element.n_height, find_element.n_frameRate, find_element.isAddMark);
+ }else{
+ LOGD("WARNING: can't find camID(%d) resolution:%s(%dx), addmark!!!\n", find_element.n_cameraId,find_element.str_quality, find_element.n_width);
+ find_element.isAddMark=1;
+ find_fmt_sign = 3;
+ fputs(one_line_buf, dst_fp);
+ //continue;
+ }
+ }else{
+ find_fmt_sign = 3;
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+ }else if(find_fmt_sign==3){
+ leave_line = NULL;
+ leave_line = strstr(one_line_buf,"</EncoderProfile>");
+ if(leave_line == NULL) //no "framRate"
+ {
+ fputs(one_line_buf, dst_fp);
+ continue;
+ }
+ fputs(one_line_buf, dst_fp);
+ if(find_element.isAddMark){
+ back_fptmp = ftell(dst_fp);
+ fseek(dst_fp,front_fptmp,SEEK_SET);
+ fprintf(dst_fp, "<!-- \n");
+ fseek(dst_fp,back_fptmp,SEEK_SET);
+ fprintf(dst_fp, "--> \n");
+ find_element.isAddMark=0;
+ }
+ find_fmt_sign=0;
+ }
+
+ if(fgetc(src_fp)==EOF)
+ {
+ break;
+ }
+ fseek(src_fp,-1,SEEK_CUR);
+ memset(one_line_buf,0,sizeof(one_line_buf));
+ }
+ }
+
+alter_exit:
+ fclose(src_fp);
+ fclose(dst_fp);
+
+ if(err==0){
+ remove(src_xml_file);
+ rename(dst_xml_file,src_xml_file);
+ chmod(src_xml_file, S_IRUSR|S_IWUSR|S_IRGRP|S_IROTH);
+ }
+ return ret;
+}
+
+
+int camera_request_framerate(const char* dev_path, int camid, struct xml_video_element* pst_element, int element_count)
+{
+ int err,i;
+ int nSizeBytes;
+ int ret;
+ int crop_w, crop_h;
+ int fps;
+ int width, height;
+ int iCamFd=-1;
+ int ver=0;
+ unsigned int pix_format_tmp;
+ struct v4l2_format format;
+ struct v4l2_frmivalenum fival;
+ struct v4l2_capability CamDriverCapability;
+ struct xml_video_element* element=pst_element;
+ struct xml_video_element* element_tmp;
+ int resolution_index;
+ struct v4l2_format fmt;
+ int sensor_resolution_w=0,sensor_resolution_h=0;
+
+ iCamFd = open(dev_path, O_RDWR);
+
+ if (iCamFd < 0) {
+ LOGE("%s.%d Could not open the camera device %s \n",
+ __FUNCTION__,__LINE__,dev_path);
+ err = -1;
+ goto exit;
+ }
+
+ memset(&CamDriverCapability, 0, sizeof(struct v4l2_capability));
+ err = ioctl(iCamFd, VIDIOC_QUERYCAP, &CamDriverCapability);
+ if (err < 0) {
+ LOGE("%s.%d Error opening device unable to query device.\n",__FUNCTION__,__LINE__);
+ goto exit;
+ }
+
+ if (CamDriverCapability.version == RK29_CAM_VERSION_CODE_1) {
+ pix_format_tmp = V4L2_PIX_FMT_YUV420;
+ LOGD("%s.%d Current camera driver version: 0.0.1 \n",__FUNCTION__,__LINE__);
+ } else {
+ pix_format_tmp = V4L2_PIX_FMT_NV12;
+ }
+ /* oyyf@rock-chips.com: v0.4.0x15 */
+ if(strcmp((char*)&CamDriverCapability.driver[0],"uvcvideo") == 0){
+ i=0;
+ sensor_resolution_w = 640; /* ddl@rock-chips.com: uvc camera resolution fix vga */
+ sensor_resolution_h = 480;
+
+ for(i=0; i<element_count; i++, element++){
+ width = resolution[i][0];
+ height = resolution[i][1];
+ element->n_cameraId = camid;
+ element->n_width = width;
+ element->n_height = height;
+ strcat(element->str_quality, fmt_name[i]);
+ element->n_frameRate = 15;
+ element->isAddMark = 0;
+ if ((width>sensor_resolution_w) || (height>sensor_resolution_h)) {
+ element->isAddMark = 1;
+ LOGD("USB-CAMERA: CameraId:%d %dx%d fps: %d isAddMark(%d)\n",camid,width,height,element->n_frameRate,element->isAddMark);
+ continue;
+ }
+ LOGD("USB-CAMERA: CameraId:%d %dx%d fps: %d isAddMark(%d)\n",camid,width,height,element->n_frameRate,element->isAddMark);
+ }
+
+ goto exit;
+ }
+
+ ver = xml_version_check(&CamDriverCapability);
+ if(ver){
+ i=0;
+
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.pixelformat= pix_format_tmp;
+ fmt.fmt.pix.field = V4L2_FIELD_NONE;
+
+ /*picture size setting*/
+ fmt.fmt.pix.width = 10000;
+ fmt.fmt.pix.height = 10000;
+ ret = ioctl(iCamFd, VIDIOC_TRY_FMT, &fmt);
+
+ sensor_resolution_w = fmt.fmt.pix.width;
+ sensor_resolution_h = fmt.fmt.pix.height; /* ddl@rock-chips.com: v0.4.e */
+
+ for(i=0; i<element_count; i++, element++){
+ width = resolution[i][0];
+ height = resolution[i][1];
+ memset(&fival, 0, sizeof(fival));
+ fival.index = 0;
+ fival.pixel_format = pix_format_tmp;
+ fival.width = width;
+ fival.height = height;
+ fival.reserved[1] = 0x00;
+ element->n_cameraId = camid;
+ element->n_width = width;
+ element->n_height = height;
+ strcat(element->str_quality, fmt_name[i]);
+ element->n_frameRate = 0;
+ element->isAddMark = 0;
+
+ /* ddl@rock-chips.com: v0.4.e */
+ if ((width>sensor_resolution_w) || (height>sensor_resolution_h)) {
+ element->isAddMark = 1;
+ continue;
+ }
+
+ if ((ret = ioctl(iCamFd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
+ if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ fps = (fival.discrete.denominator/fival.discrete.numerator);
+ crop_w = (fival.reserved[1]&0xffff0000)>>16;
+ crop_h = (fival.reserved[1]&0xffff);
+
+ element->n_frameRate = fps;
+
+ if ((crop_w!= width) || (crop_h != height)) {
+ if(width==1280 && height==720 ) {
+ element->isAddMark = 1;
+ }
+ }
+
+ if(width==720 && height==480){
+
+ if ((crop_w>800) || (crop_h>600)) { /* ddl@rock-chips.com: v0.4.e */
+ element->isAddMark = 1;
+ } else {
+ resolution_index = find_resolution_index(640,480);
+ element_tmp = pst_element + resolution_index;
+ element_tmp->isAddMark = 1;
+ }
+ }
+
+ LOGD("CameraId:%d %dx%d(%dx%d) fps: %d\n",camid,width,height,crop_w,crop_h, fps);
+
+ } else {
+ LOGE("fival.type != V4L2_FRMIVAL_TYPE_DISCRETE\n");
+ }
+ } else {
+ element->isAddMark = 1;
+ LOGE("find frame intervals failed ret(%d)\n", ret);
+ }
+ }
+ } else {
+ LOGE("%s.%d Current camera driver version: %d.%d.%d, It is not support CameraHal_MediaProfile\n",
+ __FUNCTION__,__LINE__,
+ (CamDriverCapability.version>>16) & 0xff,(CamDriverCapability.version>>8) & 0xff,
+ CamDriverCapability.version & 0xff);
+ err = -1;
+ }
+
+exit:
+ if(iCamFd>0){
+ close(iCamFd);
+ iCamFd=-1;
+ }
+ return err;
+}
+
+
+
+int media_profiles_xml_control(void)
+{
+ int ver=0;
+ int ret=0,ret1,ret2;
+ int i;
+ int element_count;
+ struct xml_video_element *pst_element, *pst_element1;
+ struct timeval t0, t1, t2, t3;
+ int media_profiles_id;
+ struct xml_video_name *pst_device_video_name;
+ struct xml_video_name *pst_xml_video_name;
+ FILE* client_fp;
+ char clientXmlFile[30];
+ char defaultXmlFile[30];
+ int count;
+ FILE* fp=NULL;
+
+ pst_xml_video_name = (struct xml_video_name*)malloc(2*sizeof(struct xml_video_name));
+ memset(pst_xml_video_name, 0x00, 2*sizeof(struct xml_video_name));
+ pst_device_video_name = (struct xml_video_name*)malloc(2*sizeof(struct xml_video_name));
+ memset(pst_device_video_name, 0x00, 2*sizeof(struct xml_video_name));
+
+ media_profiles_id = CameraGroupFound_new(pst_device_video_name);
+ if(media_profiles_id==0){
+ clientXmlFile[0]=0x00;
+ strcat(clientXmlFile,"/etc/media_profiles.xml");
+ printf("client media_profiles.xml file name %s\n", clientXmlFile);
+ }else{
+ sprintf(clientXmlFile,"/etc/media_profiles%d%d.xml",(media_profiles_id&0xff00)>>8,media_profiles_id&0xff);
+ printf("client media_profiles.xml file name %s\n", clientXmlFile);
+ }
+
+ client_fp = fopen(clientXmlFile, "r");
+ if(client_fp != NULL){
+ LOGD("client have %s file, so we use client file first!\n", clientXmlFile);
+ fclose(client_fp);
+ goto xml_exit;
+ }
+
+ defaultXmlFile[0] = 0x00;
+ strcat(defaultXmlFile,"/data/media_profiles.xml");
+ printf("media_profiles.xml file name %s\n", defaultXmlFile);
+
+ fp = fopen(defaultXmlFile, "r");
+ if(fp!=NULL){
+ count = xml_read_camname(fp, pst_xml_video_name, 2);
+ //if camera dev name is same ,then, wo don't need to create new xml file
+ ret = isSame_camera(pst_device_video_name, pst_xml_video_name);
+ fclose(fp);
+ if(ret==0){
+ LOGE("%s is exist, and camera device is same!\n", defaultXmlFile);
+ goto xml_exit;
+ }
+ LOGD("%s is exist, but camera device is not same!\n", defaultXmlFile);
+ remove(defaultXmlFile);
+ }
+ ret = xml_copy_and_write_camname(XML_FILE_DEFAULT, defaultXmlFile, pst_device_video_name, 2);
+ if(ret<0){
+ LOGD("copy file failed\n");
+ goto xml_exit;
+ } else {
+ LOGD("copy file %s from %s, and alter its configuration\n", defaultXmlFile,XML_FILE_DEFAULT);
+ }
+
+ i=0;
+ element_count=0;
+ while(resolution[i][0]>0 && resolution[i][1]>0){
+ element_count++;
+ i++;
+ }
+ pst_element = (struct xml_video_element*)malloc(2*element_count*sizeof(struct xml_video_element));
+ memset(pst_element, 0x00, 2*element_count*sizeof(struct xml_video_element));
+ pst_element1 = pst_element + element_count;
+
+ //dev: /dev/video0
+ ret1 = camera_request_framerate(VIDEO_DEV_NAME, 0, pst_element, element_count);
+ //dev: /dev/video1
+ ret2 = camera_request_framerate(VIDEO_DEV_NAME_1, 1, pst_element1, element_count);
+
+ if ((ret1>=0) || (ret2>=0)) {
+ ret = xml_alter(defaultXmlFile, XML_FILE_TMP, pst_element, 2*element_count);
+ LOGD("/data/media_profiles.xml is validate!");
+ } else {
+ LOGD("/etc/media_profiles.xml is validate!");
+ }
+
+ free(pst_element);
+ pst_element=NULL;
+
+xml_exit:
+ free(pst_xml_video_name);
+ pst_xml_video_name=NULL;
+ free(pst_device_video_name);
+ pst_device_video_name=NULL;
+
+ return ret;
+}
+
+}
+
diff --git a/CameraHal/CameraHal_Mem.cpp b/CameraHal/CameraHal_Mem.cpp
new file mode 100644
index 0000000..fa90be1
--- /dev/null
+++ b/CameraHal/CameraHal_Mem.cpp
@@ -0,0 +1,1583 @@
+/*
+*Author: zyc@rock-chips.co
+*/
+#include <sys/stat.h>
+#include <unistd.h>
+#include <utils/CallStack.h>
+#include "CameraHal.h"
+
+
+namespace android {
+
+
+/******************ION BUFFER START*******************/
+MemManagerBase::MemManagerBase()
+{
+ mPreviewBufferInfo = NULL;
+ mRawBufferInfo = NULL;
+ mJpegBufferInfo = NULL;
+ mJpegBufferInfo = NULL;
+}
+MemManagerBase::~MemManagerBase()
+{
+ mPreviewBufferInfo = NULL;
+ mRawBufferInfo = NULL;
+ mJpegBufferInfo = NULL;
+ mJpegBufferInfo = NULL;
+}
+
+unsigned int MemManagerBase::getBufferAddr(enum buffer_type_enum buf_type, unsigned int buf_idx, buffer_addr_t addr_type)
+{
+ unsigned long addr = 0x00;
+ struct bufferinfo_s *buf_info;
+
+ switch(buf_type)
+ {
+ case PREVIEWBUFFER:
+ buf_info = mPreviewBufferInfo;
+ break;
+ case RAWBUFFER:
+ buf_info = mRawBufferInfo;
+ break;
+ case JPEGBUFFER:
+ buf_info = mJpegBufferInfo;
+ break;
+ case VIDEOENCBUFFER:
+ buf_info = mVideoEncBufferInfo;
+ break;
+ default:
+ LOGE("Buffer type(0x%x) is invaildate",buf_type);
+ goto getVirAddr_end;
+ }
+
+ if (buf_idx > buf_info->mNumBffers) {
+ LOGE("Buffer index(0x%x) is invalidate, Total buffer is 0x%x",
+ buf_idx,buf_info->mNumBffers);
+ goto getVirAddr_end;
+ }
+
+ if (addr_type == buffer_addr_vir) {
+ addr = (buf_info+buf_idx)->mVirBaseAddr;
+ } else if (addr_type == buffer_addr_phy) {
+ addr = (buf_info+buf_idx)->mPhyBaseAddr;
+ } else if(addr_type == buffer_sharre_fd){
+ addr = (buf_info+buf_idx)->mShareFd;
+ }
+getVirAddr_end:
+ return addr;
+}
+
+int MemManagerBase::dump()
+{
+
+ return 0;
+}
+
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_ION)
+
+IonMemManager::IonMemManager()
+ :MemManagerBase(),
+ mPreviewData(NULL),
+ mRawData(NULL),
+ mJpegData(NULL),
+ mVideoEncData(NULL),
+ mIonMemMgr(NULL)
+{
+ mIonMemMgr = new IonAlloc(PAGE_SIZE, ION_MODULE_CAM);
+}
+
+IonMemManager::~IonMemManager()
+{
+ if (mPreviewData) {
+ destroyPreviewBuffer();
+ free(mPreviewData);
+ mPreviewData = NULL;
+ }
+ if(mRawData) {
+ destroyRawBuffer();
+ free(mRawData);
+ mRawData = NULL;
+ }
+ if(mJpegData) {
+ destroyJpegBuffer();
+ free(mJpegData);
+ mJpegData = NULL;
+ }
+ if(mVideoEncData) {
+ destroyVideoEncBuffer();
+ free(mVideoEncData);
+ mVideoEncData = NULL;
+ }
+ if(mIonMemMgr)
+ delete mIonMemMgr;
+ mIonMemMgr = NULL;
+
+}
+
+int IonMemManager::createIonBuffer(struct bufferinfo_s* ionbuf)
+{
+ int ret =0;
+ int numBufs;
+ int frame_size;
+ struct ion_buffer_t* tmpalloc = NULL;
+ struct bufferinfo_s* tmp_buf = NULL;
+
+ if (!ionbuf || !mIonMemMgr < 0) {
+ LOGE("ion_alloc malloc buffer failed");
+ goto null_fail;
+ }
+
+ numBufs = ionbuf->mNumBffers;
+ frame_size = ionbuf->mPerBuffersize;
+ ionbuf->mBufferSizes = numBufs*PAGE_ALIGN(frame_size);
+ switch(ionbuf->mBufType)
+ {
+ case PREVIEWBUFFER:
+ tmpalloc = mPreviewData;
+ tmp_buf = mPreviewBufferInfo;
+ break;
+ case RAWBUFFER:
+ tmpalloc = mRawData;
+ tmp_buf = mRawBufferInfo;
+ break;
+ case JPEGBUFFER:
+ tmpalloc = mJpegData;
+ tmp_buf = mJpegBufferInfo;
+ break;
+ case VIDEOENCBUFFER:
+ tmpalloc = mVideoEncData;
+ tmp_buf = mVideoEncBufferInfo;
+ break;
+ default:
+ goto null_fail;
+ }
+
+ memset(tmpalloc,0,sizeof(tmpalloc));
+ ret = mIonMemMgr->alloc(ionbuf->mBufferSizes, _ION_HEAP_RESERVE, tmpalloc);
+ if(ret != 0) {
+ LOGE("ion_alloc malloc buffer failed , type is = %d ",ionbuf->mBufType);
+ goto alloc_fail;
+ }
+
+ ionbuf->mPhyBaseAddr = (unsigned long)tmpalloc->phys;
+ ionbuf->mVirBaseAddr = (unsigned long)tmpalloc->virt;
+ ionbuf->mPerBuffersize = PAGE_ALIGN(frame_size);
+ *tmp_buf = *ionbuf;
+
+ return 0;
+
+alloc_fail:
+null_fail:
+ memset(tmp_buf,0,sizeof(struct bufferinfo_s));
+ memset(tmpalloc,0,sizeof(ion_buffer_t));
+ return ret;
+}
+
+void IonMemManager::destroyIonBuffer(buffer_type_enum buftype)
+{
+ struct ion_buffer_t* tmpalloc = NULL;
+
+ switch(buftype)
+ {
+ case PREVIEWBUFFER:
+ tmpalloc = mPreviewData;
+ if(tmpalloc && tmpalloc->virt) {
+ LOG1("free preview buffer success!");
+ mIonMemMgr->free(*tmpalloc);
+ memset(tmpalloc,0,sizeof(ion_buffer_t));
+ } else {
+ if (mPreviewData == NULL) {
+ // LOGE("%s(%d): mPreviewData is NULL",__FUNCTION__,__LINE__);
+ } else {
+ LOGE("mPreviewData->virt:0x%x mPreviewBufferInfo->mVirBaseAddr:0x%x",(long)mPreviewData->virt,mPreviewBufferInfo->mVirBaseAddr);
+ }
+ }
+ memset(&mPreviewBufferInfo,0,sizeof(mPreviewBufferInfo));
+ break;
+ case RAWBUFFER:
+ tmpalloc = mRawData;
+ if(tmpalloc && tmpalloc->virt) {
+ LOG1("free RAWBUFFER buffer success!");
+ mIonMemMgr->free(*tmpalloc);
+ memset(tmpalloc,0,sizeof(ion_buffer_t));
+ } else {
+ if (mRawData == NULL) {
+ // LOGE("%s(%d): mRawData is NULL",__FUNCTION__,__LINE__);
+ } else {
+ LOGE("mRawData->virt:0x%x mRawBufferInfo->mVirBaseAddr:0x%x",(long)mRawData->virt,mRawBufferInfo->mVirBaseAddr);
+ }
+ }
+ memset(&mRawBufferInfo,0,sizeof(mRawBufferInfo));
+ break;
+ case JPEGBUFFER:
+ tmpalloc = mJpegData;
+ if(tmpalloc && tmpalloc->virt) {
+ LOG1("free RAWBUFFER buffer success!");
+ mIonMemMgr->free(*tmpalloc);
+ memset(tmpalloc,0,sizeof(ion_buffer_t));
+ } else {
+ if (mJpegData == NULL) {
+ // LOGE("%s(%d): mJpegData is NULL",__FUNCTION__,__LINE__);
+ } else {
+ LOGE("mJpegData->virt:0x%x mRawBufferInfo->mVirBaseAddr:0x%x",(long)mJpegData->virt,mJpegBufferInfo->mVirBaseAddr);
+ }
+ }
+ memset(&mJpegBufferInfo,0,sizeof(mJpegBufferInfo));
+ break;
+ case VIDEOENCBUFFER:
+ tmpalloc = mVideoEncData;
+ if(tmpalloc && tmpalloc->virt) {
+ LOG1("free VIDEOENCBUFFER buffer success!");
+ mIonMemMgr->free(*tmpalloc);
+ memset(tmpalloc,0,sizeof(ion_buffer_t));
+ } else {
+ if (mVideoEncData == NULL) {
+ // LOGE("%s(%d): mVideoEncData is NULL",__FUNCTION__,__LINE__);
+ } else {
+ LOGE("mVideoEncData->virt:0x%x mVideoEncBufferInfo->mVirBaseAddr:0x%x",(long)mVideoEncData->virt,mVideoEncBufferInfo->mVirBaseAddr);
+ }
+ }
+ memset(&mVideoEncBufferInfo,0,sizeof(mVideoEncBufferInfo));
+ break;
+
+ default:
+ LOGE("buffer type is wrong !");
+ break;
+ }
+}
+
+int IonMemManager::createVideoEncBuffer(struct bufferinfo_s* videoencbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(videoencbuf->mBufType != VIDEOENCBUFFER)
+ LOGE("the type is not VIDEOENCBUFFER");
+
+ if(!mVideoEncData) {
+ mVideoEncData = (ion_buffer_t*)malloc(sizeof(ion_buffer_t));
+ } else if(mVideoEncData->virt) {
+ LOG1("FREE the video buffer alloced before firstly");
+ destroyVideoEncBuffer();
+ }
+
+ memset(mVideoEncData,0,sizeof(ion_buffer_t));
+
+ ret = createIonBuffer(videoencbuf);
+ if (ret == 0) {
+ LOG1("Video buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mVideoEncBufferInfo->mPhyBaseAddr,mVideoEncBufferInfo->mVirBaseAddr,mVideoEncBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Video buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+int IonMemManager::destroyVideoEncBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ destroyIonBuffer(VIDEOENCBUFFER);
+
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+
+int IonMemManager::createPreviewBuffer(struct bufferinfo_s* previewbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(previewbuf->mBufType != PREVIEWBUFFER)
+ LOGE("the type is not PREVIEWBUFFER");
+
+ if(!mPreviewData) {
+ mPreviewData = (ion_buffer_t*)malloc(sizeof(ion_buffer_t));
+ } else if(mPreviewData->virt) {
+ LOG1("FREE the preview buffer alloced before firstly");
+ destroyPreviewBuffer();
+ }
+
+ memset(mPreviewData,0,sizeof(ion_buffer_t));
+
+ ret = createIonBuffer(previewbuf);
+ if (ret == 0) {
+ LOG1("Preview buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mPreviewBufferInfo->mPhyBaseAddr,mPreviewBufferInfo->mVirBaseAddr,mPreviewBufferInfo->mBufferSizes);
+ } else {
+ LOGE("%s(%d): Preview buffer alloc failed",__FUNCTION__,__LINE__);
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+int IonMemManager::destroyPreviewBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ destroyIonBuffer(PREVIEWBUFFER);
+
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+int IonMemManager::createRawBuffer(struct bufferinfo_s* rawbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if (rawbuf->mBufType != RAWBUFFER)
+ LOGE("%s(%d): the type is not RAWBUFFER",__FUNCTION__,__LINE__);
+
+ if (!mRawData) {
+ mRawData = (ion_buffer_t*)malloc(sizeof(ion_buffer_t));
+ } else if(mRawData->virt) {
+ LOG1("FREE the raw buffer alloced before firstly");
+ destroyRawBuffer();
+ }
+ memset(mRawData,0,sizeof(ion_buffer_t));
+
+ ret = createIonBuffer(rawbuf);
+ if (ret == 0) {
+ LOG1("Raw buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mRawBufferInfo->mPhyBaseAddr,mRawBufferInfo->mVirBaseAddr,mRawBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Raw buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+
+}
+int IonMemManager::destroyRawBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ destroyIonBuffer(RAWBUFFER);
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+ int IonMemManager::createJpegBuffer(struct bufferinfo_s* jpegbuf)
+ {
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(jpegbuf->mBufType != JPEGBUFFER)
+ LOGE("the type is not JPEGBUFFER");
+
+ if(!mJpegData) {
+ mJpegData = (ion_buffer_t*)malloc(sizeof(ion_buffer_t));
+ } else if(mJpegData->virt) {
+ LOG1("FREE the jpeg buffer alloced before firstly");
+ destroyJpegBuffer();
+ }
+ memset(mJpegData,0,sizeof(ion_buffer_t));
+
+ ret = createIonBuffer(jpegbuf);
+ if (ret == 0) {
+ LOG1("Jpeg buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mJpegBufferInfo->mPhyBaseAddr,mJpegBufferInfo->mVirBaseAddr,mJpegBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Jpeg buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+
+ }
+int IonMemManager::destroyJpegBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ destroyIonBuffer(JPEGBUFFER);
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+int IonMemManager::flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len)
+{
+ Mutex::Autolock lock(mLock);
+ ion_buffer_t data;
+
+ switch(buftype)
+ {
+ case PREVIEWBUFFER:
+ data = *mPreviewData;
+ break;
+ case RAWBUFFER:
+ data = *mRawData;
+
+ break;
+ case JPEGBUFFER:
+ data = *mJpegData;
+ break;
+ case VIDEOENCBUFFER:
+ data = *mVideoEncData;
+ break;
+ default:
+ break;
+ }
+
+ mIonMemMgr->cache_op(data, ION_FLUSH_CACHE);
+
+ return 0;
+}
+#endif
+
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_IONDMA)
+IonDmaMemManager::IonDmaMemManager(bool iommuEnabled)
+ :MemManagerBase(),
+ mPreviewData(NULL),
+ mRawData(NULL),
+ mJpegData(NULL),
+ mVideoEncData(NULL),
+ client_fd(-1),
+ mIommuEnabled(iommuEnabled)
+{
+ client_fd = ion_open();
+}
+
+IonDmaMemManager::~IonDmaMemManager()
+{
+ if (mPreviewData) {
+ destroyPreviewBuffer();
+ free(mPreviewData);
+ mPreviewData = NULL;
+ }
+ if(mRawData) {
+ destroyRawBuffer();
+ free(mRawData);
+ mRawData = NULL;
+ }
+ if(mJpegData) {
+ destroyJpegBuffer();
+ free(mJpegData);
+ mJpegData = NULL;
+ }
+ if(mVideoEncData) {
+ destroyVideoEncBuffer();
+ free(mVideoEncData);
+ mVideoEncData = NULL;
+ }
+ if(client_fd != -1)
+ ion_close(client_fd);
+
+
+}
+
+int IonDmaMemManager::createIonBuffer(struct bufferinfo_s* ionbuf)
+{
+ int ret =0,i = 0;
+ int numBufs;
+ int frame_size;
+ camera_ionbuf_t* tmpalloc = NULL;
+ struct bufferinfo_s* tmp_buf = NULL;
+ #ifdef ROCKCHIP_ION_VERSION
+ ion_user_handle_t handle = 0;
+ #else
+ struct ion_handle* handle = NULL;
+ #endif
+ int map_fd;
+ long temp_handle = 0;
+ unsigned long vir_addr = 0;
+
+
+ if (!ionbuf) {
+ LOGE("ion_alloc malloc buffer failed");
+ return -1;
+ }
+
+ numBufs = ionbuf->mNumBffers;
+ frame_size = ionbuf->mPerBuffersize;
+ ionbuf->mBufferSizes = numBufs*PAGE_ALIGN(frame_size);
+ switch(ionbuf->mBufType)
+ {
+ case PREVIEWBUFFER:
+ tmpalloc = mPreviewData ;
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL){
+ mPreviewBufferInfo = tmp_buf;
+ }else{
+ LOGE("ion_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ case RAWBUFFER:
+ tmpalloc = mRawData;
+
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL){
+ mRawBufferInfo = tmp_buf;
+ }else{
+ LOGE("ion_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ case JPEGBUFFER:
+ tmpalloc = mJpegData;
+
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL ){
+ mJpegBufferInfo = tmp_buf;
+ }else{
+ LOGE("ion_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ case VIDEOENCBUFFER:
+ tmpalloc = mVideoEncData ;
+
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL){
+ mVideoEncBufferInfo = tmp_buf;
+ }else{
+ LOGE("ion_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ default:
+ return -1;
+ }
+
+ for(i = 0;i < numBufs;i++){
+ memset(tmpalloc,0,sizeof(struct camera_ionbuf_s));
+
+ if((!mIommuEnabled) || (!ionbuf->mIsForceIommuBuf)){
+ #if defined(TARGET_RK3188)
+ ret = ion_alloc(client_fd, ionbuf->mPerBuffersize, PAGE_SIZE, ION_HEAP(ION_CARVEOUT_HEAP_ID), 0, &handle);
+ #else
+ ret = ion_alloc(client_fd, ionbuf->mPerBuffersize, PAGE_SIZE, ION_HEAP(ION_CMA_HEAP_ID), 0, &handle);
+ #endif
+ }else{
+ ret = ion_alloc(client_fd, ionbuf->mPerBuffersize, PAGE_SIZE, ION_HEAP(ION_VMALLOC_HEAP_ID), 0, &handle);
+ }
+ if (ret) {
+ LOGE("ion alloc failed\n");
+ break;
+ }
+
+ LOG1("handle %d\n", handle);
+
+ ret = ion_share(client_fd,handle,&map_fd);
+ if (ret) {
+ LOGE("ion map failed\n");
+ ion_free(client_fd,handle);
+ break;
+ }
+
+ vir_addr = (unsigned long )mmap(NULL, ionbuf->mPerBuffersize, PROT_READ | PROT_WRITE, MAP_SHARED, map_fd, 0);
+ if (vir_addr == 0) {
+ LOGE("ion mmap failed\n");
+ ret = -1;
+ ion_free(client_fd,handle);
+ break;
+ }
+
+ if((!mIommuEnabled) || (!ionbuf->mIsForceIommuBuf)){
+ ret=ion_get_phys(client_fd,handle,&(tmpalloc->phy_addr));
+ if(ret<0)
+ LOGE("ion_get_phys failed\n");
+ }else{
+ tmpalloc->phy_addr = map_fd;
+ }
+ tmpalloc->size = ionbuf->mPerBuffersize;
+ tmpalloc->vir_addr = vir_addr;
+ temp_handle = handle;
+ tmpalloc->ion_hdl = (void*)temp_handle;
+ tmpalloc->map_fd = map_fd;
+
+
+ ionbuf->mPhyBaseAddr = (unsigned long)tmpalloc->phy_addr;
+ ionbuf->mVirBaseAddr = (unsigned long)tmpalloc->vir_addr;
+ ionbuf->mPerBuffersize = PAGE_ALIGN(frame_size);
+ ionbuf->mShareFd = (unsigned int)tmpalloc->map_fd;
+ *tmp_buf = *ionbuf;
+ tmp_buf++;
+ tmpalloc++;
+
+ }
+ if(ret < 0){
+ LOGE(" failed !");
+ while(--i >= 0){
+ --tmpalloc;
+ --tmp_buf;
+ munmap((void *)tmpalloc->vir_addr, tmpalloc->size);
+ temp_handle = (long)tmpalloc->ion_hdl;
+ ion_free(client_fd, (ion_user_handle_t)temp_handle);
+ }
+ free(tmpalloc);
+ free(tmp_buf);
+ }
+ return ret;
+}
+
+void IonDmaMemManager::destroyIonBuffer(buffer_type_enum buftype)
+{
+ camera_ionbuf_t* tmpalloc = NULL;
+ int err = 0;
+ struct bufferinfo_s* tmp_buf = NULL;
+ long temp_handle = 0;
+
+ switch(buftype)
+ {
+ case PREVIEWBUFFER:
+ tmpalloc = mPreviewData;
+ tmp_buf = mPreviewBufferInfo;
+ break;
+ case RAWBUFFER:
+ tmpalloc = mRawData;
+ tmp_buf = mRawBufferInfo;
+ break;
+ case JPEGBUFFER:
+ tmpalloc = mJpegData;
+ tmp_buf = mJpegBufferInfo;
+ break;
+ case VIDEOENCBUFFER:
+ tmpalloc = mVideoEncData;
+ tmp_buf = mVideoEncBufferInfo;
+ break;
+
+ default:
+ LOGE("buffer type is wrong !");
+ break;
+ }
+
+
+ for(unsigned int i = 0;(tmp_buf && (i < tmp_buf->mNumBffers));i++){
+ if(tmpalloc && tmpalloc->vir_addr) {
+ err = munmap((void *)tmpalloc->vir_addr, tmpalloc->size);
+ if (err) {
+ LOGE("munmap failed\n");
+ return;
+ }
+
+ close(tmpalloc->map_fd);
+ temp_handle = (long)tmpalloc->ion_hdl;
+ err = ion_free(client_fd, (ion_user_handle_t)temp_handle);
+ }
+ tmpalloc++;
+ }
+
+ switch(buftype)
+ {
+ case PREVIEWBUFFER:
+ free(mPreviewData);
+ mPreviewData = NULL;
+ free(mPreviewBufferInfo);
+ mPreviewBufferInfo = NULL;
+ break;
+ case RAWBUFFER:
+ free(mRawData);
+ mRawData = NULL;
+ free(mRawBufferInfo);
+ mRawBufferInfo = NULL;
+ break;
+ case JPEGBUFFER:
+ free(mJpegData);
+ mJpegData = NULL;
+ free(mJpegBufferInfo);
+ mJpegBufferInfo = NULL;
+ break;
+ case VIDEOENCBUFFER:
+ free(mVideoEncData);
+ mVideoEncData = NULL;
+ free(mVideoEncBufferInfo);
+ mVideoEncBufferInfo = NULL;
+ break;
+
+ default:
+ LOGE("buffer type is wrong !");
+ break;
+ }
+
+
+}
+
+
+int IonDmaMemManager::createVideoEncBuffer(struct bufferinfo_s* videoencbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(videoencbuf->mBufType != VIDEOENCBUFFER)
+ LOGE("the type is not VIDEOENCBUFFER");
+
+ if(!mVideoEncData) {
+ mVideoEncData = (camera_ionbuf_t*)malloc(sizeof(camera_ionbuf_t) * videoencbuf->mNumBffers);
+ } else if(mVideoEncData->vir_addr) {
+ LOG1("FREE the video buffer alloced before firstly");
+ destroyVideoEncBuffer();
+ }
+
+ memset(mVideoEncData,0,sizeof(camera_ionbuf_t)* videoencbuf->mNumBffers);
+
+ ret = createIonBuffer(videoencbuf);
+ if (ret == 0) {
+ LOG1("Video buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mVideoEncBufferInfo->mPhyBaseAddr,mVideoEncBufferInfo->mVirBaseAddr,mVideoEncBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Video buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+int IonDmaMemManager::destroyVideoEncBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ destroyIonBuffer(VIDEOENCBUFFER);
+
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+
+int IonDmaMemManager::createPreviewBuffer(struct bufferinfo_s* previewbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(previewbuf->mBufType != PREVIEWBUFFER)
+ LOGE("the type is not PREVIEWBUFFER");
+
+ if(!mPreviewData) {
+ mPreviewData = (camera_ionbuf_t*)malloc(sizeof(camera_ionbuf_t) * previewbuf->mNumBffers);
+ if(!mPreviewData){
+ LOGE("malloc mPreviewData failed!");
+ ret = -1;
+ return ret;
+ }
+ } else if(mPreviewData->vir_addr) {
+ LOG1("FREE the preview buffer alloced before firstly");
+ destroyPreviewBuffer();
+ }
+
+ memset(mPreviewData,0,sizeof(camera_ionbuf_t)* previewbuf->mNumBffers);
+
+ ret = createIonBuffer(previewbuf);
+ if (ret == 0) {
+ LOG1("Preview buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mPreviewBufferInfo->mPhyBaseAddr,mPreviewBufferInfo->mVirBaseAddr,mPreviewBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Preview buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+int IonDmaMemManager::destroyPreviewBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ destroyIonBuffer(PREVIEWBUFFER);
+
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+int IonDmaMemManager::createRawBuffer(struct bufferinfo_s* rawbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if (rawbuf->mBufType != RAWBUFFER)
+ LOGE("the type is not RAWBUFFER");
+
+ if (!mRawData) {
+ mRawData = (camera_ionbuf_t*)malloc(sizeof(camera_ionbuf_t) * rawbuf->mNumBffers);
+ } else if(mRawData->vir_addr) {
+ LOG1("FREE the raw buffer alloced before firstly");
+ destroyRawBuffer();
+ }
+ memset(mRawData,0,sizeof(camera_ionbuf_t)* rawbuf->mNumBffers);
+
+ ret = createIonBuffer(rawbuf);
+ if (ret == 0) {
+ LOG1("Raw buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mRawBufferInfo->mPhyBaseAddr,mRawBufferInfo->mVirBaseAddr,mRawBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Raw buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+
+}
+int IonDmaMemManager::destroyRawBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ destroyIonBuffer(RAWBUFFER);
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+
+ int IonDmaMemManager::createJpegBuffer(struct bufferinfo_s* jpegbuf)
+ {
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(jpegbuf->mBufType != JPEGBUFFER)
+ LOGE("the type is not JPEGBUFFER");
+
+ if(!mJpegData) {
+ mJpegData = (camera_ionbuf_t*)malloc(sizeof(camera_ionbuf_t) * jpegbuf->mNumBffers);
+ } else if(mJpegData->vir_addr) {
+ LOG1("FREE the jpeg buffer alloced before firstly");
+ destroyJpegBuffer();
+ }
+ memset(mJpegData,0,sizeof(camera_ionbuf_t)* jpegbuf->mNumBffers);
+
+ ret = createIonBuffer(jpegbuf);
+ if (ret == 0) {
+ LOG1("Jpeg buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mJpegBufferInfo->mPhyBaseAddr,mJpegBufferInfo->mVirBaseAddr,mJpegBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Jpeg buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+
+ }
+int IonDmaMemManager::destroyJpegBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ destroyIonBuffer(JPEGBUFFER);
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+int IonDmaMemManager::flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len)
+{
+ Mutex::Autolock lock(mLock);
+
+ return 0;
+}
+
+
+#endif
+
+
+/******************ION BUFFER END*******************/
+
+/******************GRALLOC DRM BUFFER START*******************/
+
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_GRALLOC_DRM)
+GrallocDrmMemManager::GrallocDrmMemManager(bool iommuEnabled)
+ :MemManagerBase(),
+ mPreviewData(NULL),
+ mRawData(NULL),
+ mJpegData(NULL),
+ mVideoEncData(NULL),
+ mHandle(NULL),
+ mOps(NULL)
+{
+ mOps = get_cam_ops(CAM_MEM_TYPE_GRALLOC);
+
+ if (mOps)
+ mHandle = mOps->init(iommuEnabled ? 1:0,
+ CAM_MEM_FLAG_HW_WRITE | CAM_MEM_FLAG_HW_READ | CAM_MEM_FLAG_SW_WRITE | CAM_MEM_FLAG_SW_READ,
+ 0);
+}
+
+GrallocDrmMemManager::~GrallocDrmMemManager()
+{
+ if (mPreviewData) {
+ destroyPreviewBuffer();
+ free(mPreviewData);
+ mPreviewData = NULL;
+ }
+ if(mRawData) {
+ destroyRawBuffer();
+ free(mRawData);
+ mRawData = NULL;
+ }
+ if(mJpegData) {
+ destroyJpegBuffer();
+ free(mJpegData);
+ mJpegData = NULL;
+ }
+ if(mVideoEncData) {
+ destroyVideoEncBuffer();
+ free(mVideoEncData);
+ mVideoEncData = NULL;
+ }
+ if(mHandle)
+ mOps->deInit(mHandle);
+
+}
+
+int GrallocDrmMemManager::createGrallocDrmBuffer(struct bufferinfo_s* grallocbuf)
+{
+ int ret =0,i = 0;
+ int numBufs;
+ int frame_size;
+ cam_mem_info_t** tmpalloc = NULL;
+ struct bufferinfo_s* tmp_buf = NULL;
+
+ if (!grallocbuf) {
+ LOGE("gralloc_alloc malloc buffer failed");
+ return -1;
+ }
+
+ numBufs = grallocbuf->mNumBffers;
+ frame_size = grallocbuf->mPerBuffersize;
+ grallocbuf->mBufferSizes = numBufs*PAGE_ALIGN(frame_size);
+ switch(grallocbuf->mBufType)
+ {
+ case PREVIEWBUFFER:
+ tmpalloc = mPreviewData ;
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL){
+ mPreviewBufferInfo = tmp_buf;
+ }else{
+ LOGE("gralloc_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ case RAWBUFFER:
+ tmpalloc = mRawData;
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL){
+ mRawBufferInfo = tmp_buf;
+ }else{
+ LOGE("gralloc_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ case JPEGBUFFER:
+ tmpalloc = mJpegData;
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL ){
+ mJpegBufferInfo = tmp_buf;
+ }else{
+ LOGE("gralloc_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ case VIDEOENCBUFFER:
+ tmpalloc = mVideoEncData ;
+
+ if((tmp_buf = (struct bufferinfo_s*)malloc(numBufs*sizeof(struct bufferinfo_s))) != NULL){
+ mVideoEncBufferInfo = tmp_buf;
+ }else{
+ LOGE("gralloc_alloc malloc buffer failed");
+ return -1;
+ }
+ break;
+ default:
+ return -1;
+ }
+
+ for(i = 0;i < numBufs;i++){
+ *tmpalloc = mOps->alloc(mHandle,grallocbuf->mPerBuffersize);
+ if (*tmpalloc) {
+ #if 0
+ //if iommu needed,should get camsys_fd first
+ //get phy and iommu address
+ if (mOps->iommu_map(mHandle,*tmpalloc)) {
+ mOps->free(mHandle,*tmpalloc);
+ LOGE("gralloc mOps->iommu_map failed");
+ ret = -1;
+ break;
+ }
+ #endif
+
+ } else {
+ LOGE("gralloc mOps->alloc failed");
+ ret = -1;
+ break;
+ }
+ grallocbuf->mPhyBaseAddr = (unsigned long)((*tmpalloc)->phy_addr);
+ grallocbuf->mVirBaseAddr = (unsigned long)((*tmpalloc)->vir_addr);
+ grallocbuf->mPerBuffersize = PAGE_ALIGN(frame_size);
+ grallocbuf->mShareFd = (unsigned int)((*tmpalloc)->fd);
+ *tmp_buf = *grallocbuf;
+ tmp_buf++;
+ tmpalloc++;
+
+ }
+ if(ret < 0){
+ LOGE(" failed !");
+ while(--i >= 0){
+ --tmpalloc;
+ --tmp_buf;
+#if 0
+ //if iommu needed,should get camsys_fd first
+ mOps->iommu_map(mHandle,*tmpalloc);
+#endif
+ mOps->free(mHandle,*tmpalloc);
+ }
+ free(*tmpalloc);
+ free(tmp_buf);
+ }
+ return ret;
+}
+
+void GrallocDrmMemManager::destroyGrallocDrmBuffer(buffer_type_enum buftype)
+{
+ cam_mem_info_t** tmpalloc = NULL;
+ int err = 0;
+ struct bufferinfo_s* tmp_buf = NULL;
+ long temp_handle = 0;
+
+ switch(buftype)
+ {
+ case PREVIEWBUFFER:
+ tmpalloc = mPreviewData;
+ tmp_buf = mPreviewBufferInfo;
+ break;
+ case RAWBUFFER:
+ tmpalloc = mRawData;
+ tmp_buf = mRawBufferInfo;
+ break;
+ case JPEGBUFFER:
+ tmpalloc = mJpegData;
+ tmp_buf = mJpegBufferInfo;
+ break;
+ case VIDEOENCBUFFER:
+ tmpalloc = mVideoEncData;
+ tmp_buf = mVideoEncBufferInfo;
+ break;
+
+ default:
+ LOGE("buffer type is wrong !");
+ break;
+ }
+
+
+ for(unsigned int i = 0;(tmp_buf && (i < tmp_buf->mNumBffers));i++){
+ if(*tmpalloc && (*tmpalloc)->vir_addr) {
+#if 0
+ //if iommu needed,should get camsys_fd first
+ mOps->iommu_map(mHandle,*tmpalloc);
+#endif
+ mOps->free(mHandle,*tmpalloc);
+ }
+ tmpalloc++;
+ }
+
+ switch(buftype)
+ {
+ case PREVIEWBUFFER:
+ free(mPreviewData);
+ mPreviewData = NULL;
+ free(mPreviewBufferInfo);
+ mPreviewBufferInfo = NULL;
+ break;
+ case RAWBUFFER:
+ free(mRawData);
+ mRawData = NULL;
+ free(mRawBufferInfo);
+ mRawBufferInfo = NULL;
+ break;
+ case JPEGBUFFER:
+ free(mJpegData);
+ mJpegData = NULL;
+ free(mJpegBufferInfo);
+ mJpegBufferInfo = NULL;
+ break;
+ case VIDEOENCBUFFER:
+ free(mVideoEncData);
+ mVideoEncData = NULL;
+ free(mVideoEncBufferInfo);
+ mVideoEncBufferInfo = NULL;
+ break;
+
+ default:
+ LOGE("buffer type is wrong !");
+ break;
+ }
+
+
+}
+
+
+int GrallocDrmMemManager::createVideoEncBuffer(struct bufferinfo_s* videoencbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(videoencbuf->mBufType != VIDEOENCBUFFER)
+ LOGE("the type is not VIDEOENCBUFFER");
+
+ if(!mVideoEncData) {
+ mVideoEncData = (cam_mem_info_t**)malloc(sizeof(cam_mem_info_t*) * videoencbuf->mNumBffers);
+ } else if((*mVideoEncData)->vir_addr) {
+ LOG1("FREE the video buffer alloced before firstly");
+ destroyVideoEncBuffer();
+ }
+
+ memset(mVideoEncData,0,sizeof(cam_mem_info_t*)* videoencbuf->mNumBffers);
+
+ ret = createGrallocDrmBuffer(videoencbuf);
+ if (ret == 0) {
+ LOG1("Video buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mVideoEncBufferInfo->mPhyBaseAddr,mVideoEncBufferInfo->mVirBaseAddr,mVideoEncBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Video buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+int GrallocDrmMemManager::destroyVideoEncBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ destroyGrallocDrmBuffer(VIDEOENCBUFFER);
+
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+
+int GrallocDrmMemManager::createPreviewBuffer(struct bufferinfo_s* previewbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(previewbuf->mBufType != PREVIEWBUFFER)
+ LOGE("the type is not PREVIEWBUFFER");
+
+ if(!mPreviewData) {
+ mPreviewData = (cam_mem_info_t**)malloc(sizeof(cam_mem_info_t*) * previewbuf->mNumBffers);
+ if(!mPreviewData){
+ LOGE("malloc mPreviewData failed!");
+ ret = -1;
+ return ret;
+ }
+ } else if((*mPreviewData)->vir_addr) {
+ LOG1("FREE the preview buffer alloced before firstly");
+ destroyPreviewBuffer();
+ }
+
+ memset(mPreviewData,0,sizeof(cam_mem_info_t*)* previewbuf->mNumBffers);
+
+ ret = createGrallocDrmBuffer(previewbuf);
+ if (ret == 0) {
+ LOG1("Preview buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mPreviewBufferInfo->mPhyBaseAddr,mPreviewBufferInfo->mVirBaseAddr,mPreviewBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Preview buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+}
+int GrallocDrmMemManager::destroyPreviewBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ destroyGrallocDrmBuffer(PREVIEWBUFFER);
+
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+
+int GrallocDrmMemManager::createRawBuffer(struct bufferinfo_s* rawbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if (rawbuf->mBufType != RAWBUFFER)
+ LOGE("the type is not RAWBUFFER");
+
+ if (!mRawData) {
+ mRawData = (cam_mem_info_t**)malloc(sizeof(cam_mem_info_t*) * rawbuf->mNumBffers);
+ } else if((*mRawData)->vir_addr) {
+ LOG1("FREE the raw buffer alloced before firstly");
+ destroyRawBuffer();
+ }
+ memset(mRawData,0,sizeof(cam_mem_info_t*)* rawbuf->mNumBffers);
+
+ ret = createGrallocDrmBuffer(rawbuf);
+ if (ret == 0) {
+ LOG1("Raw buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mRawBufferInfo->mPhyBaseAddr,mRawBufferInfo->mVirBaseAddr,mRawBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Raw buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+
+}
+int GrallocDrmMemManager::destroyRawBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ destroyGrallocDrmBuffer(RAWBUFFER);
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+
+ int GrallocDrmMemManager::createJpegBuffer(struct bufferinfo_s* jpegbuf)
+ {
+ LOG_FUNCTION_NAME
+ int ret;
+ Mutex::Autolock lock(mLock);
+
+ if(jpegbuf->mBufType != JPEGBUFFER)
+ LOGE("the type is not JPEGBUFFER");
+
+ if(!mJpegData) {
+ mJpegData = (cam_mem_info_t**)malloc(sizeof(cam_mem_info_t*) * jpegbuf->mNumBffers);
+ } else if((*mJpegData)->vir_addr) {
+ LOG1("FREE the jpeg buffer alloced before firstly");
+ destroyJpegBuffer();
+ }
+ memset(mJpegData,0,sizeof(cam_mem_info_t*)* jpegbuf->mNumBffers);
+
+ ret = createGrallocDrmBuffer(jpegbuf);
+ if (ret == 0) {
+ LOG1("Jpeg buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mJpegBufferInfo->mPhyBaseAddr,mJpegBufferInfo->mVirBaseAddr,mJpegBufferInfo->mBufferSizes);
+ } else {
+ LOGE("Jpeg buffer alloc failed");
+ }
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+
+ }
+int GrallocDrmMemManager::destroyJpegBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ destroyGrallocDrmBuffer(JPEGBUFFER);
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+
+}
+int GrallocDrmMemManager::flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len)
+{
+ Mutex::Autolock lock(mLock);
+
+ return 0;
+}
+#endif
+/******************GRALLOC DRM BUFFER END*******************/
+
+/*****************pmem buffer start*******************/
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_PMEM)
+PmemManager::PmemManager(char* devpath)
+ :MemManagerBase(),
+ mPmemFd(-1),
+ mPmemSize(0),
+ mPmemHeapPhyBase(0),
+ mMemHeap(NULL),
+ mMemHeapPmem(NULL),
+ mJpegBuffer(NULL),
+ mRawBuffer(NULL),
+ mPreviewBuffer(NULL)
+{
+ initPmem(devpath);
+}
+PmemManager::~PmemManager()
+{
+ deinitPmem();
+}
+int PmemManager::initPmem(char* devpath)
+{
+ int pmem_fd;
+ struct pmem_region sub;
+ int err = 0;
+ Mutex::Autolock lock(mLock);
+
+ if(mPmemFd > 0) {
+ LOGE(" PMEM has been initialized");
+ return err;
+ }
+
+ pmem_fd = open(devpath, O_RDWR);
+ if (pmem_fd < 0) {
+ LOGE("Open the PMEM device(%s): %s", devpath, strerror(errno));
+ err = -1;
+ goto exit;
+ }
+
+ ioctl(pmem_fd, PMEM_GET_TOTAL_SIZE, &sub);
+ mPmemSize = sub.len;
+
+ if (pmem_fd > 0) {
+ close(pmem_fd);
+ pmem_fd = -1;
+ }
+
+ mMemHeap = new MemoryHeapBase(devpath,mPmemSize,0);
+ mPmemFd = mMemHeap->getHeapID();
+ if (mPmemFd < 0) {
+ LOGE("allocate mMemHeap from %s failed",devpath);
+ err = -1;
+ goto exit;
+ }
+
+ if (ioctl(mPmemFd,PMEM_GET_PHYS, &sub)) {
+ LOGE("Obtain %s physical address failed",devpath);
+ err = -1;
+ goto exit;
+ } else {
+ mPmemHeapPhyBase = sub.offset;
+ }
+
+ mMemHeapPmem = new MemoryHeapPmem(mMemHeap,0);
+
+exit:
+ if (err < 0) {
+ if (mMemHeapPmem != NULL) {
+ mMemHeapPmem.clear();
+ mMemHeapPmem = NULL;
+ }
+
+ if (mMemHeap != NULL) {
+ mMemHeap.clear();
+ mMemHeap = NULL;
+ }
+ }
+ if (pmem_fd > 0) {
+ close(pmem_fd);
+ pmem_fd = -1;
+ }
+ return err;
+}
+
+int PmemManager::deinitPmem()
+{
+ Mutex::Autolock lock(mLock);
+
+ if (mMemHeapPmem != NULL) {
+ mMemHeapPmem.clear();
+ mMemHeapPmem = NULL;
+ }
+ if (mMemHeap != NULL) {
+ mMemHeap.clear();
+ mMemHeap = NULL;
+ }
+ mPmemHeapPhyBase = 0;
+
+ if( mPmemFd > 0 ) {
+ close(mPmemFd);
+ mPmemFd = -1;
+ }
+ return 0;
+}
+
+int PmemManager::createPreviewBuffer(struct bufferinfo_s* previewbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret =0,i ;
+ struct bufferinfo_s* tmp_buf = NULL;
+ void * viraddress = NULL;
+ int numBufs;
+ int frame_size;
+ Mutex::Autolock lock(mLock);
+
+ if(!previewbuf || mMemHeapPmem == NULL){
+ LOGE("Pmem malloc preview buffer failed");
+ ret = -1;
+ goto null_fail;
+ }
+
+ numBufs = previewbuf->mNumBffers;
+ frame_size = previewbuf->mPerBuffersize;
+ previewbuf->mBufferSizes = numBufs*PAGE_ALIGN(frame_size);
+
+ if( mPmemSize < previewbuf->mBufferSizes+mJpegBufferInfo.mBufferSizes+mRawBufferInfo.mBufferSizes){
+ LOGE("Pmem is not enough for 0x%x bytes preview buffer!(Pmem:0x%x, Raw:0x%x, Jpeg:0x%x)",
+ previewbuf->mBufferSizes,mPmemSize,mRawBufferInfo.mBufferSizes,mJpegBufferInfo.mBufferSizes);
+ ret = -1;
+ goto null_fail;
+ }
+
+ mPreviewBuffer = (sp<IMemory>**)malloc(sizeof(sp<IMemory>*)*numBufs);
+ if (mPreviewBuffer == NULL) {
+ LOGE("mPreviewBuffer malloc failed");
+ ret = -1;
+ goto null_fail;
+ }
+
+ for(i = 0; i < numBufs; i++){
+ mPreviewBuffer[i] = (sp<IMemory>*)new (sp<IMemory>);
+ *mPreviewBuffer[i] = (static_cast<MemoryHeapPmem*>(mMemHeapPmem.get()))->mapMemory(PAGE_ALIGN(frame_size)*i,PAGE_ALIGN(frame_size));
+ }
+
+ previewbuf->mPhyBaseAddr = mPmemHeapPhyBase + (*mPreviewBuffer[0])->offset();
+ previewbuf->mVirBaseAddr= (unsigned long)(*mPreviewBuffer[0])->pointer();
+ previewbuf->mPerBuffersize = PAGE_ALIGN(frame_size);
+ mPreviewBufferInfo = *previewbuf;
+ LOG1("Preview buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mPreviewBufferInfo.mPhyBaseAddr,mPreviewBufferInfo.mVirBaseAddr,mPreviewBufferInfo.mBufferSizes);
+
+ LOG_FUNCTION_NAME_EXIT
+null_fail:
+ return ret;
+}
+int PmemManager::destroyPreviewBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+ unsigned int i;
+
+ for(i = 0; i < mPreviewBufferInfo.mNumBffers; i++) {
+ if (mPreviewBuffer[i] != NULL) {
+ (*mPreviewBuffer[i]).clear();
+ delete mPreviewBuffer[i];
+ mPreviewBuffer[i] = NULL;
+ }
+ }
+ free((char*)mPreviewBuffer);
+ mPreviewBuffer = NULL;
+ memset(&mPreviewBufferInfo,0,sizeof(mPreviewBufferInfo));
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+int PmemManager::createRawBuffer(struct bufferinfo_s* rawbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret =0;
+ int numBufs;
+ int frame_size;
+ int map_start;
+ Mutex::Autolock lock(mLock);
+
+ if(!rawbuf || mMemHeapPmem == NULL ){
+ LOGE("Pmem malloc raw buffer failed");
+ ret = -1;
+ goto null_fail;
+ }
+
+ numBufs = rawbuf->mNumBffers;
+ frame_size = rawbuf->mPerBuffersize;
+ rawbuf->mBufferSizes = numBufs*PAGE_ALIGN(frame_size);
+ //compute the start address of map
+ if( mPmemSize < mPreviewBufferInfo.mBufferSizes + mJpegBufferInfo.mBufferSizes +rawbuf->mBufferSizes){
+ LOGE("Pmem is not enough for 0x%x bytes raw buffer!(Pmem:0x%x, Preview:0x%x, Jpeg:0x%x)",
+ rawbuf->mBufferSizes,mPmemSize,mPreviewBufferInfo.mBufferSizes,mJpegBufferInfo.mBufferSizes);
+ ret = -1;
+ goto null_fail;
+ } else {
+ map_start = mPreviewBufferInfo.mBufferSizes;
+ }
+
+ mRawBuffer = (static_cast<MemoryHeapPmem*>(mMemHeapPmem.get()))->mapMemory(map_start,PAGE_ALIGN(frame_size)*numBufs);
+ rawbuf->mPhyBaseAddr = mPmemHeapPhyBase + mRawBuffer->offset();
+ rawbuf->mVirBaseAddr= (unsigned long)mRawBuffer->pointer();
+ rawbuf->mPerBuffersize = PAGE_ALIGN(frame_size);
+ mRawBufferInfo= *rawbuf;
+
+ LOG1("Raw buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mRawBufferInfo.mPhyBaseAddr,mRawBufferInfo.mVirBaseAddr,mRawBufferInfo.mBufferSizes);
+
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+null_fail:
+ memset(&mRawBufferInfo,0,sizeof(mRawBufferInfo));
+ return ret;
+}
+int PmemManager::destroyRawBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ if (mRawBuffer!= NULL) {
+ mRawBuffer.clear();
+ mRawBuffer = NULL;
+ }
+ memset(&mRawBufferInfo,0,sizeof(mRawBufferInfo));
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+int PmemManager::createJpegBuffer(struct bufferinfo_s* jpegbuf)
+{
+ LOG_FUNCTION_NAME
+ int ret =0;
+ int numBufs;
+ int frame_size;
+ int map_start;
+
+ Mutex::Autolock lock(mLock);
+ if(!jpegbuf || mMemHeapPmem == NULL ){
+ LOGE("Pmem malloc jpeg buffer failed");
+ ret = -1;
+ goto null_fail;
+ }
+
+ numBufs = jpegbuf->mNumBffers;
+ frame_size = jpegbuf->mPerBuffersize;
+ jpegbuf->mBufferSizes = numBufs*PAGE_ALIGN(frame_size);
+
+ //compute the start address of map
+ if ( mPmemSize < mPreviewBufferInfo.mBufferSizes + mRawBufferInfo.mBufferSizes +jpegbuf->mBufferSizes) {
+ LOGE("Pmem is not enough for 0x%x bytes jpeg buffer!(Pmem:0x%x, Preview:0x%x, Raw:0x%x)",
+ jpegbuf->mBufferSizes,mPmemSize,mPreviewBufferInfo.mBufferSizes,mRawBufferInfo.mBufferSizes);
+ ret = -1;
+ goto null_fail;
+ } else {
+ map_start = mPmemSize - jpegbuf->mBufferSizes;
+ }
+ mJpegBuffer = (static_cast<MemoryHeapPmem*>(mMemHeapPmem.get()))->mapMemory(map_start,PAGE_ALIGN(frame_size)*numBufs);
+ jpegbuf->mPhyBaseAddr = mPmemHeapPhyBase + mJpegBuffer->offset();
+ jpegbuf->mVirBaseAddr= (unsigned long)mJpegBuffer->pointer();
+ jpegbuf->mPerBuffersize = PAGE_ALIGN(frame_size);
+ mJpegBufferInfo= *jpegbuf;
+
+ LOG1("Jpeg buffer information(phy:0x%x vir:0x%x size:0x%x)",
+ mJpegBufferInfo.mPhyBaseAddr,mJpegBufferInfo.mVirBaseAddr,mJpegBufferInfo.mBufferSizes);
+ LOG_FUNCTION_NAME_EXIT
+ return ret;
+
+null_fail:
+ memset(&mJpegBufferInfo,0,sizeof(mJpegBufferInfo));
+ return ret;
+}
+int PmemManager::destroyJpegBuffer()
+{
+ LOG_FUNCTION_NAME
+ Mutex::Autolock lock(mLock);
+
+ if (mJpegBuffer!= NULL) {
+ mJpegBuffer.clear();
+ mJpegBuffer = NULL;
+ }
+ memset(&mJpegBufferInfo,0,sizeof(mJpegBufferInfo));
+ LOG_FUNCTION_NAME_EXIT
+ return 0;
+}
+int PmemManager::flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len)
+{
+ struct bufferinfo_s* tmpbuf =NULL;
+ int ret = 0;
+ struct pmem_region region;
+ sp<IMemory> tmpmem;
+ unsigned int i;
+ Mutex::Autolock lock(mLock);
+
+ switch(buftype)
+ {
+ case PREVIEWBUFFER:
+ tmpmem = (*mPreviewBuffer[0]);
+ break;
+ case RAWBUFFER:
+ tmpmem =mRawBuffer;
+
+ break;
+ case JPEGBUFFER:
+ tmpmem =mJpegBuffer;
+ break;
+ default:
+ break;
+ }
+ region.offset = tmpmem->offset()+offset;
+ region.len = len;
+ ret = ioctl(mPmemFd,PMEM_CACHE_FLUSH, &region);
+
+ return ret;
+}
+/******************* pmem buffer end*****************/
+#endif
+}
diff --git a/CameraHal/CameraHal_Mem.h b/CameraHal/CameraHal_Mem.h
new file mode 100644
index 0000000..bbf2167
--- /dev/null
+++ b/CameraHal/CameraHal_Mem.h
@@ -0,0 +1,238 @@
+/*
+*Author: zyc@rock-chips.com
+*/
+#define CAMERA_MEM_PMEM 0
+#define CAMERA_MEM_ION 1
+#define CAMERA_MEM_IONDMA 2
+#define CAMERA_MEM_GRALLOC_DRM 3
+/*
+*NOTE:
+* configuration macro
+*
+*/
+#if defined(RK_DRM_GRALLOC)
+#define CONFIG_CAMERA_MEM CAMERA_MEM_GRALLOC_DRM
+#elif (defined(TARGET_RK32) || defined(TARGET_RK312x) || defined(TARGET_RK3188))
+#define CONFIG_CAMERA_MEM CAMERA_MEM_IONDMA
+#elif defined(TARGET_RK30)
+#define CONFIG_CAMERA_MEM CAMERA_MEM_ION
+#else
+#define CONFIG_CAMERA_MEM CAMERA_MEM_PMEM
+#endif
+
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_ION)
+#include <ion/IonAlloc.h>
+#elif (CONFIG_CAMERA_MEM == CAMERA_MEM_PMEM)
+#ifdef HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#include <binder/MemoryHeapPmem.h>
+#endif
+
+#elif (CONFIG_CAMERA_MEM == CAMERA_MEM_IONDMA)
+#include <rockchip_ion.h>
+#include <ion.h>
+
+#elif (CONFIG_CAMERA_MEM == CAMERA_MEM_GRALLOC_DRM)
+#include "camera_mem.h"
+#endif
+#include <binder/IMemory.h>
+namespace android {
+
+/*****************for unified memory manager start*******************/
+enum buffer_type_enum{
+ PREVIEWBUFFER,
+ RAWBUFFER,
+ JPEGBUFFER,
+ VIDEOENCBUFFER,
+
+};
+struct bufferinfo_s{
+ unsigned int mNumBffers; //invaild if this value is 0
+ size_t mPerBuffersize;
+ size_t mBufferSizes;
+ unsigned long mPhyBaseAddr;
+ unsigned long mVirBaseAddr;
+ unsigned long mShareFd;
+ buffer_type_enum mBufType;
+ bool mIsForceIommuBuf;
+};
+
+typedef enum buffer_addr_e {
+ buffer_addr_phy,
+ buffer_addr_vir,
+ buffer_sharre_fd
+}buffer_addr_t;
+
+class MemManagerBase{
+public :
+ MemManagerBase();
+ virtual ~MemManagerBase();
+ virtual int createPreviewBuffer(struct bufferinfo_s* previewbuf) = 0;
+ virtual int createRawBuffer(struct bufferinfo_s* rawbuf) = 0;
+ virtual int createJpegBuffer(struct bufferinfo_s* jpegbuf) = 0;
+ virtual int createVideoEncBuffer(struct bufferinfo_s* videoencbuf) = 0;
+
+ virtual int destroyPreviewBuffer() = 0;
+ virtual int destroyRawBuffer() = 0;
+ virtual int destroyJpegBuffer() = 0;
+ virtual int destroyVideoEncBuffer() = 0;
+ virtual int flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len) = 0;
+ #if 0
+ struct bufferinfo_s& getPreviewBufInfo(){
+ return mPreviewBufferInfo;}
+ struct bufferinfo_s& getRawBufInfo(){
+ return mRawBufferInfo;}
+ struct bufferinfo_s& getJpegBufInfo(){
+ return mJpegBufferInfo;}
+ struct bufferinfo_s& getVideoEncBufInfo(){
+ return mVideoEncBufferInfo;}
+ #endif
+ unsigned int getBufferAddr(enum buffer_type_enum buf_type, unsigned int buf_idx, buffer_addr_t addr_type);
+ int dump();
+protected:
+ struct bufferinfo_s* mPreviewBufferInfo;
+ struct bufferinfo_s* mRawBufferInfo;
+ struct bufferinfo_s* mJpegBufferInfo;
+ struct bufferinfo_s* mVideoEncBufferInfo;
+ mutable Mutex mLock;
+};
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_PMEM)
+class PmemManager:public MemManagerBase{
+ public :
+ PmemManager(char* devpath);
+ ~PmemManager();
+ virtual int createPreviewBuffer(struct bufferinfo_s* previewbuf);
+ virtual int createRawBuffer(struct bufferinfo_s* rawbuf);
+ virtual int createJpegBuffer(struct bufferinfo_s* jpegbuf);
+ virtual int createVideoEncBuffer(struct bufferinfo_s* videoencbuf) ;
+
+ virtual int destroyPreviewBuffer();
+ virtual int destroyRawBuffer();
+ virtual int destroyJpegBuffer();
+ virtual int destroyVideoEncBuffer();
+ virtual int flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len) ;
+ int initPmem(char* devpath);
+ int deinitPmem();
+ private:
+ int mPmemFd;
+ unsigned int mPmemSize;
+ unsigned int mPmemHeapPhyBase;
+ sp<MemoryHeapBase> mMemHeap;
+ sp<MemoryHeapBase> mMemHeapPmem;
+ sp<IMemory> mJpegBuffer;
+ sp<IMemory> mRawBuffer;
+ sp<IMemory> **mPreviewBuffer;
+};
+#endif
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_ION)
+class IonMemManager:public MemManagerBase{
+ public :
+ IonMemManager();
+ ~IonMemManager();
+
+ virtual int createPreviewBuffer(struct bufferinfo_s* previewbuf);
+ virtual int createRawBuffer(struct bufferinfo_s* rawbuf);
+ virtual int createJpegBuffer(struct bufferinfo_s* jpegbuf);
+ virtual int createVideoEncBuffer(struct bufferinfo_s* videoencbuf) ;
+
+ virtual int destroyPreviewBuffer();
+ virtual int destroyRawBuffer();
+ virtual int destroyJpegBuffer();
+ virtual int destroyVideoEncBuffer();
+ virtual int flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len);
+ private:
+ int createIonBuffer(struct bufferinfo_s* ionbuf);
+ void destroyIonBuffer(buffer_type_enum buftype);
+ ion_buffer_t *mPreviewData;
+ ion_buffer_t *mRawData;
+ ion_buffer_t *mJpegData;
+ ion_buffer_t *mVideoEncData;
+ IonAlloc *mIonMemMgr;
+};
+#endif
+
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_IONDMA)
+
+typedef struct camera_ionbuf_s
+{
+ void* ion_hdl;
+ int map_fd;
+ unsigned long vir_addr;
+ unsigned long phy_addr;
+ size_t size;
+ int share_id;
+
+}camera_ionbuf_t;
+
+
+class IonDmaMemManager:public MemManagerBase{
+ public :
+ IonDmaMemManager(bool iommuEnabled);
+ ~IonDmaMemManager();
+
+ virtual int createPreviewBuffer(struct bufferinfo_s* previewbuf);
+ virtual int createRawBuffer(struct bufferinfo_s* rawbuf);
+ virtual int createJpegBuffer(struct bufferinfo_s* jpegbuf);
+ virtual int createVideoEncBuffer(struct bufferinfo_s* videoencbuf) ;
+
+ virtual int destroyPreviewBuffer();
+ virtual int destroyRawBuffer();
+ virtual int destroyJpegBuffer();
+ virtual int destroyVideoEncBuffer();
+ virtual int flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len);
+
+ //map
+
+ //unmap
+
+ //share
+ private:
+ int createIonBuffer(struct bufferinfo_s* ionbuf);
+ void destroyIonBuffer(buffer_type_enum buftype);
+ camera_ionbuf_t* mPreviewData;
+ camera_ionbuf_t* mRawData;
+ camera_ionbuf_t* mJpegData;
+ camera_ionbuf_t* mVideoEncData;
+ int client_fd;
+ bool mIommuEnabled;
+
+};
+#endif
+
+#if (CONFIG_CAMERA_MEM == CAMERA_MEM_GRALLOC_DRM)
+class GrallocDrmMemManager:public MemManagerBase{
+ public :
+ GrallocDrmMemManager(bool iommuEnabled);
+ ~GrallocDrmMemManager();
+
+ virtual int createPreviewBuffer(struct bufferinfo_s* previewbuf);
+ virtual int createRawBuffer(struct bufferinfo_s* rawbuf);
+ virtual int createJpegBuffer(struct bufferinfo_s* jpegbuf);
+ virtual int createVideoEncBuffer(struct bufferinfo_s* videoencbuf) ;
+
+ virtual int destroyPreviewBuffer();
+ virtual int destroyRawBuffer();
+ virtual int destroyJpegBuffer();
+ virtual int destroyVideoEncBuffer();
+ virtual int flushCacheMem(buffer_type_enum buftype,unsigned int offset, unsigned int len);
+
+ //map
+
+ //unmap
+
+ //share
+ private:
+ int createGrallocDrmBuffer(struct bufferinfo_s* grallocbuf);
+ void destroyGrallocDrmBuffer(buffer_type_enum buftype);
+ cam_mem_info_t** mPreviewData;
+ cam_mem_info_t** mRawData;
+ cam_mem_info_t** mJpegData;
+ cam_mem_info_t** mVideoEncData;
+ cam_mem_handle_t* mHandle;
+ cam_mem_ops_t* mOps;
+};
+#endif
+
+
+/*****************for unified memory manager end*******************/
+}
diff --git a/CameraHal/CameraHal_Module.cpp b/CameraHal/CameraHal_Module.cpp
new file mode 100755
index 0000000..4a21da2
--- /dev/null
+++ b/CameraHal/CameraHal_Module.cpp
@@ -0,0 +1,1532 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+
+#include <utils/threads.h>
+#include <binder/IPCThreadState.h>
+#include "CameraHal.h"
+#include "CameraHal_Module.h"
+#include "CameraHal_MediaProfile.cpp"
+#include <time.h>
+#include "vpu.h"
+#include "cam_api/cam_engine_interface.h"
+
+rk_cam_info_t gCamInfos[CAMERAS_SUPPORT_MAX];
+static android::CameraHal* gCameraHals[CAMERAS_SUPPORT_MAX];
+#if CONFIG_AUTO_DETECT_FRAMERATE
+static sp<CameraFpsDetectThread> gCameraFpsDetectThread;
+#endif
+static unsigned int gCamerasOpen = 0;
+static signed int gCamerasNumber = -1;
+#ifdef LAPTOP
+static signed int gCamerasUnavailabled = 0;
+static String8 gUsbCameraNames[CAMERAS_SUPPORT_MAX];
+#endif
+static android::Mutex gCameraHalDeviceLock;
+
+static int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device);
+static int camera_device_close(hw_device_t* device);
+static int camera_get_number_of_cameras(void);
+static int camera_get_camera_info(int camera_id, struct camera_info *info);
+
+static struct hw_module_methods_t camera_module_methods = {
+ open: camera_device_open
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+ common: {
+ tag: HARDWARE_MODULE_TAG,
+ version_major: ((CONFIG_CAMERAHAL_VERSION&0xffff00)>>16),
+ version_minor: CONFIG_CAMERAHAL_VERSION&0xff,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: CAMERA_MODULE_NAME,
+ author: "RockChip",
+ methods: &camera_module_methods,
+ dso: NULL, /* remove compilation warnings */
+ reserved: {0}, /* remove compilation warnings */
+ },
+ get_number_of_cameras: camera_get_number_of_cameras,
+ get_camera_info: camera_get_camera_info,
+ set_callbacks:NULL,
+ get_vendor_tag_ops:NULL,
+#if defined(ANDROID_5_X)
+ open_legacy:NULL,
+#endif
+#if defined(ANDROID_6_X)
+ set_torch_mode:NULL,
+ init:NULL,
+#endif
+ reserved: {0}
+};
+
+
+
+
+/*******************************************************************
+ * implementation of camera_device_ops functions
+ *******************************************************************/
+
+int camera_set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->setPreviewWindow(window);
+
+ return rv;
+}
+
+void camera_set_callbacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user);
+}
+
+void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->enableMsgType(msg_type);
+}
+
+void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->disableMsgType(msg_type);
+}
+
+int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return 0;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ return gCameraHals[rk_dev->cameraid]->msgTypeEnabled(msg_type);
+}
+
+int camera_start_preview(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->startPreview();
+
+ return rv;
+}
+
+void camera_stop_preview(struct camera_device * device)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->stopPreview();
+}
+
+int camera_preview_enabled(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->previewEnabled();
+ return rv;
+}
+
+int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ // TODO: meta data buffer not current supported
+ rv = gCameraHals[rk_dev->cameraid]->storeMetaDataInBuffers(enable);
+ return rv;
+ //return enable ? android::INVALID_OPERATION: android::OK;
+}
+
+int camera_start_recording(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->startRecording();
+ return rv;
+}
+
+void camera_stop_recording(struct camera_device * device)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->stopRecording();
+}
+
+int camera_recording_enabled(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->recordingEnabled();
+ return rv;
+}
+
+void camera_release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->releaseRecordingFrame(opaque);
+}
+
+int camera_auto_focus(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->autoFocus();
+ return rv;
+}
+
+int camera_cancel_auto_focus(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->cancelAutoFocus();
+ return rv;
+}
+
+int camera_take_picture(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->takePicture();
+ return rv;
+}
+
+int camera_cancel_picture(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->cancelPicture();
+ return rv;
+}
+
+int camera_set_parameters(struct camera_device * device, const char *params)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->setParameters(params);
+ return rv;
+}
+
+char* camera_get_parameters(struct camera_device * device)
+{
+ char* param = NULL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return NULL;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ param = gCameraHals[rk_dev->cameraid]->getParameters();
+
+ return param;
+}
+
+static void camera_put_parameters(struct camera_device *device, char *parms)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->putParameters(parms);
+}
+
+int camera_send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->sendCommand(cmd, arg1, arg2);
+ return rv;
+}
+
+void camera_release(struct camera_device * device)
+{
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ gCameraHals[rk_dev->cameraid]->release();
+}
+
+int camera_dump(struct camera_device * device, int fd)
+{
+ int rv = -EINVAL;
+ rk_camera_device_t* rk_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ rv = gCameraHals[rk_dev->cameraid]->dump(fd);
+ return rv;
+}
+int camera_device_close(hw_device_t* device)
+{
+ int ret = 0;
+ rk_camera_device_t* rk_dev = NULL;
+
+ LOGD("%s", __FUNCTION__);
+
+ android::Mutex::Autolock lock(gCameraHalDeviceLock);
+
+ if (!device) {
+ ret = -EINVAL;
+ goto done;
+ }
+
+ rk_dev = (rk_camera_device_t*) device;
+
+ if (rk_dev) {
+ if (gCameraHals[rk_dev->cameraid]) {
+ delete gCameraHals[rk_dev->cameraid];
+ gCameraHals[rk_dev->cameraid] = NULL;
+ gCamerasOpen--;
+ }
+
+ if (rk_dev->base.ops) {
+ free(rk_dev->base.ops);
+ }
+ free(rk_dev);
+ }
+done:
+
+ return ret;
+}
+
+#ifdef LAPTOP
+int checkUsbCameraStatus(int cameraid)
+{
+ int fd = -1;
+ bool needRegetCameraNumbers = false;
+ fd = open(gCamInfos[cameraid].device_path, O_RDONLY);
+ if (fd < 0) {
+ LOGE("Open %s failed! strr: %s",gCamInfos[cameraid].device_path,strerror(errno));
+ needRegetCameraNumbers = true;
+ goto camera_get_number_of_cameras_end;
+ }
+ LOGD("checkUsbCameraStatus Open %s success!",gCamInfos[cameraid].device_path);
+
+ struct v4l2_capability capability;
+ memset(&capability, 0, sizeof(struct v4l2_capability));
+ if (ioctl(fd, VIDIOC_QUERYCAP, &capability) < 0) {
+ LOGE("Video device(%s): query capability not supported.\n",gCamInfos[cameraid].device_path);
+ close(fd);
+ needRegetCameraNumbers = true;
+ goto camera_get_number_of_cameras_end;
+ }
+ close(fd);
+ if ((capability.capabilities & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING)) != (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING)) {
+ LOGD("Video device(%s): video capture not supported.\n",gCamInfos[cameraid].device_path);
+ needRegetCameraNumbers = true;
+ } else {
+ if (strcmp(gUsbCameraNames[cameraid].string(), (char*)&capability.card[0])) {
+ LOGE("Current camera %d name(%s) is different from last(%s)", cameraid,
+ (char*)&capability.card[0], gUsbCameraNames[cameraid].string());
+ needRegetCameraNumbers = true;
+ } else if (cameraid == 0 && strstr(gUsbCameraNames[cameraid].string(), "HP IR")) {
+ LOGE("Current camera %d name(%s) is different from HP HD", cameraid,
+ gUsbCameraNames[cameraid].string());
+ cameraid = 1;
+ } else if (cameraid == 1 && strstr(gUsbCameraNames[cameraid].string(), "HP HD")) {
+ LOGE("Current camera %d name(%s) is different from HP IR", cameraid,
+ gUsbCameraNames[cameraid].string());
+ cameraid = 0;
+ }
+ }
+
+camera_get_number_of_cameras_end:
+ if (needRegetCameraNumbers) {
+ LOGD("%s(%d): Current usb camera has changed and get_number_of_cameras again.",__FUNCTION__, __LINE__);
+ gCamerasNumber = -1;
+ camera_get_number_of_cameras();
+ if (cameraid == 0) {
+ if (!strstr(gUsbCameraNames[cameraid].string(), "HP HD")){
+ if (strstr(gUsbCameraNames[1].string(), "HP HD"))
+ cameraid = 1;
+ }
+ } else {
+ if (!strstr(gUsbCameraNames[cameraid].string(), "HP IR")){
+ if (strstr(gUsbCameraNames[0].string(), "HP IR"))
+ cameraid = 0;
+ }
+ }
+ LOGD("try to open camera %d after reget number_of_cameras", cameraid);
+ }
+ return cameraid;
+}
+#endif
+
+/*******************************************************************
+ * implementation of camera_module functions
+ *******************************************************************/
+
+/* open device handle to one of the cameras
+ *
+ * assume camera service will keep singleton of each camera
+ * so this function will always only be called once per camera instance
+ */
+
+int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device)
+{
+ int rv = 0;
+ int cameraid;
+ rk_camera_device_t* camera_device = NULL;
+ camera_device_ops_t* camera_ops = NULL;
+ android::CameraHal* camera = NULL;
+
+ android::Mutex::Autolock lock(gCameraHalDeviceLock);
+
+ LOGI("camera_device open");
+
+ if (name != NULL) {
+ cameraid = atoi(name);
+
+#ifdef LAPTOP
+ char sys_device_lock[PROPERTY_VALUE_MAX];
+ property_get("sys.device_locked.status",sys_device_lock,NULL);
+ const char* cameraCallProcess = getCallingProcess();
+ if (!strcmp(sys_device_lock, "1")
+ && strstr("com.android.facelock",cameraCallProcess)) {
+ if (strstr(gUsbCameraNames[cameraid].string(), "HD HD")) {
+ LOGD("facelock switch camera to open HP IR camera!!!");
+ cameraid = cameraid == 0 ? 1 : cameraid;
+ }
+ }
+ cameraid = checkUsbCameraStatus(cameraid);
+#endif
+
+ if(cameraid >= gCamerasNumber) {
+ LOGE("camera service provided cameraid out of bounds, "
+ "cameraid = %d, num supported = %d",
+ cameraid, gCamerasNumber);
+ rv = -EINVAL;
+ goto fail;
+ }
+
+ if(gCamerasOpen >= CAMERAS_SUPPORTED_SIMUL_MAX) {
+ LOGE("maximum number(%d) of cameras already open",gCamerasOpen);
+ rv = -EUSERS;
+ goto fail;
+ }
+
+ camera_device = (rk_camera_device_t*)malloc(sizeof(*camera_device));
+ if(!camera_device) {
+ LOGE("camera_device allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
+ if(!camera_ops) {
+ LOGE("camera_ops allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ memset(camera_device, 0, sizeof(*camera_device));
+ memset(camera_ops, 0, sizeof(*camera_ops));
+
+ camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
+ camera_device->base.common.version = 0;
+ camera_device->base.common.module = (hw_module_t *)(module);
+ camera_device->base.common.close = camera_device_close;
+ camera_device->base.ops = camera_ops;
+
+ camera_ops->set_preview_window = camera_set_preview_window;
+ camera_ops->set_callbacks = camera_set_callbacks;
+ camera_ops->enable_msg_type = camera_enable_msg_type;
+ camera_ops->disable_msg_type = camera_disable_msg_type;
+ camera_ops->msg_type_enabled = camera_msg_type_enabled;
+ camera_ops->start_preview = camera_start_preview;
+ camera_ops->stop_preview = camera_stop_preview;
+ camera_ops->preview_enabled = camera_preview_enabled;
+ camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
+ camera_ops->start_recording = camera_start_recording;
+ camera_ops->stop_recording = camera_stop_recording;
+ camera_ops->recording_enabled = camera_recording_enabled;
+ camera_ops->release_recording_frame = camera_release_recording_frame;
+ camera_ops->auto_focus = camera_auto_focus;
+ camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
+ camera_ops->take_picture = camera_take_picture;
+ camera_ops->cancel_picture = camera_cancel_picture;
+ camera_ops->set_parameters = camera_set_parameters;
+ camera_ops->get_parameters = camera_get_parameters;
+ camera_ops->put_parameters = camera_put_parameters;
+ camera_ops->send_command = camera_send_command;
+ camera_ops->release = camera_release;
+ camera_ops->dump = camera_dump;
+
+ *device = &camera_device->base.common;
+
+ // -------- RockChip specific stuff --------
+
+ camera_device->cameraid = cameraid;
+
+ camera = new android::CameraHal(cameraid);
+
+ if(!camera || !camera->mInitState) {
+ LOGE("Couldn't create instance of CameraHal class");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ gCameraHals[cameraid] = camera;
+ gCamerasOpen++;
+ }
+
+ return rv;
+
+fail:
+ if(camera_device) {
+ free(camera_device);
+ camera_device = NULL;
+ }
+ if(camera_ops) {
+ free(camera_ops);
+ camera_ops = NULL;
+ }
+ if(camera) {
+ delete camera;
+ camera = NULL;
+ }
+ *device = NULL;
+ return rv;
+}
+
+static uint MediaProfile_Resolution[][2] = {{176,144},{240,160},{320,240},{352,288},
+ {640,480},{720,480},{800,600},{1280,720},{1920,1080},
+ {0,0}};
+
+int find_DV_resolution_index(int w, int h)
+{
+ int list_w, list_h;
+ int i=0;
+ int index=-1;
+
+ do{
+ list_w = MediaProfile_Resolution[i][0];
+ list_h = MediaProfile_Resolution[i][1];
+
+ if(list_w==w && list_h==h){
+ index = i;
+ break;
+ }
+ i++;
+ }while(list_w!=0 && list_h!=0);
+
+ return index;
+}
+
+int camera_get_number_of_cameras(void)
+{
+ char cam_path[20];
+ char cam_num[3],i;
+ int cam_cnt=0,fd=-1,rk29_cam[CAMERAS_SUPPORT_MAX];
+ struct v4l2_capability capability;
+ rk_cam_info_t camInfoTmp[CAMERAS_SUPPORT_MAX];
+ char *ptr,**ptrr;
+ char version[PROPERTY_VALUE_MAX];
+ char property[PROPERTY_VALUE_MAX];
+ int hwrotation = 0;
+ camera_board_profiles * profiles = NULL;
+ size_t nCamDev = 0;
+ char trace_level[PROPERTY_VALUE_MAX];
+ struct timeval t0, t1;
+ ::gettimeofday(&t0, NULL);
+
+ if (gCamerasNumber > 0)
+ goto camera_get_number_of_cameras_end;
+
+ {
+ property_set(CAMERAHAL_CAM_OTP_PROPERTY_KEY, "true");
+ memset(version,0x00,sizeof(version));
+ sprintf(version,"%d.%d.%d",((CONFIG_CAMERAHAL_VERSION&0xff0000)>>16),
+ ((CONFIG_CAMERAHAL_VERSION&0xff00)>>8),CONFIG_CAMERAHAL_VERSION&0xff);
+ property_set(CAMERAHAL_VERSION_PROPERTY_KEY,version);
+
+ property_get(CAMERAHAL_TRACE_LEVEL_PROPERTY_KEY, trace_level, "-1");
+ if (strcmp(trace_level,"-1")==0) {
+ property_set(CAMERAHAL_TRACE_LEVEL_PROPERTY_KEY, "0");
+ }
+
+ memset(version,0x00,sizeof(version));
+ sprintf(version,"%d.%d.%d",((ConfigBoardXmlVersion&0xff0000)>>16),
+ ((ConfigBoardXmlVersion&0xff00)>>8),ConfigBoardXmlVersion&0xff);
+ property_set(CAMERAHAL_CAMBOARDXML_PARSER_PROPERTY_KEY,version);
+
+
+ CamEngineVersionItf *camEngVerItf;
+ CamEngVer_t camEngVer;
+
+ camEngVerItf = new CamEngineVersionItf();
+
+ camEngVerItf->getVersion(&camEngVer);
+
+ memset(version,0x00,sizeof(version));
+ sprintf(version,"%d.%d.%d",((camEngVer.libisp_ver&0xff0000)>>16),
+ ((camEngVer.libisp_ver&0xff00)>>8),camEngVer.libisp_ver&0xff);
+ property_set(CAMERAHAL_LIBISP_PROPERTY_KEY,version);
+
+ memset(version,0x00,sizeof(version));
+ sprintf(version,"%d.%d.%d",((camEngVer.isi_ver&0xff0000)>>16),
+ ((camEngVer.isi_ver&0xff00)>>8),camEngVer.isi_ver&0xff);
+ property_set(CAMERAHAL_ISI_PROPERTY_KEY,version);
+
+ delete camEngVerItf;
+
+ }
+
+ memset(&camInfoTmp[0],0x00,sizeof(rk_cam_info_t));
+ memset(&camInfoTmp[1],0x00,sizeof(rk_cam_info_t));
+
+ profiles = camera_board_profiles::getInstance();
+ nCamDev = profiles->mDevieVector.size();
+ LOGE("board profiles cam num %d\n", nCamDev);
+ if (nCamDev>0) {
+ camera_board_profiles::LoadSensor(profiles);
+ char sensor_ver[32];
+
+ for (i=0; (i<nCamDev); i++)
+ {
+ LOGE("load sensor name(%s) connect %d\n", profiles->mDevieVector[i]->mHardInfo.mSensorInfo.mSensorName, profiles->mDevieVector[i]->mIsConnect);
+ if(profiles->mDevieVector[i]->mIsConnect==1){
+ rk_sensor_info *pSensorInfo = &(profiles->mDevieVector[i]->mHardInfo.mSensorInfo);
+
+ camInfoTmp[cam_cnt&0x01].pcam_total_info = profiles->mDevieVector[i];
+ strncpy(camInfoTmp[cam_cnt&0x01].device_path, pSensorInfo->mCamsysDevPath, sizeof(camInfoTmp[cam_cnt&0x01].device_path));
+ strncpy(camInfoTmp[cam_cnt&0x01].driver, pSensorInfo->mSensorDriver, sizeof(camInfoTmp[cam_cnt&0x01].driver));
+ unsigned int SensorDrvVersion = profiles->mDevieVector[i]->mLoadSensorInfo.mpI2cInfo->sensor_drv_version;
+ memset(version,0x00,sizeof(version));
+ sprintf(version,"%d.%d.%d",((SensorDrvVersion&0xff0000)>>16),
+ ((SensorDrvVersion&0xff00)>>8),SensorDrvVersion&0xff);
+
+ if(pSensorInfo->mFacing == RK_CAM_FACING_FRONT){
+ camInfoTmp[cam_cnt&0x01].facing_info.facing = CAMERA_FACING_FRONT;
+ } else {
+ camInfoTmp[cam_cnt&0x01].facing_info.facing = CAMERA_FACING_BACK;
+ }
+
+ memset(sensor_ver,0x00,sizeof(sensor_ver));
+ if (strlen(pSensorInfo->mSensorName) < (sizeof(sensor_ver)-16))
+ sprintf(sensor_ver,"%s%s%s","sys_graphic.",pSensorInfo->mSensorName,".ver");
+ else
+ sprintf(sensor_ver,"%s",pSensorInfo->mSensorName);
+ property_set(sensor_ver, version);
+
+ camInfoTmp[cam_cnt&0x01].facing_info.orientation = pSensorInfo->mOrientation;
+ cam_cnt++;
+
+ unsigned int CamsysDrvVersion = profiles->mDevieVector[i]->mCamsysVersion.drv_ver;
+ memset(version,0x00,sizeof(version));
+ sprintf(version,"%d.%d.%d",((CamsysDrvVersion&0xff0000)>>16),
+ ((CamsysDrvVersion&0xff00)>>8),CamsysDrvVersion&0xff);
+ property_set(CAMERAHAL_CAMSYS_VERSION_PROPERTY_KEY,version);
+ }
+ }
+ }
+
+#ifdef LAPTOP
+ for (int i = 0; i < CAMERAS_SUPPORT_MAX; i++)
+ gUsbCameraNames[i].clear();
+#endif
+
+ if(cam_cnt<CAMERAS_SUPPORT_MAX){
+ int i=0;
+ int element_count=0;
+ while(MediaProfile_Resolution[i][0]>0 && MediaProfile_Resolution[i][1]>0){
+ element_count++;
+ i++;
+ }
+
+ for (i=0; i<10; i++) {
+ cam_path[0] = 0x00;
+ unsigned int pix_format_tmp = V4L2_PIX_FMT_NV12;
+ strcat(cam_path, CAMERA_DEVICE_NAME);
+ sprintf(cam_num, "%d", i);
+ strcat(cam_path,cam_num);
+ fd = open(cam_path, O_RDONLY);
+ if (fd < 0) {
+ LOGE("Open %s failed! strr: %s",cam_path,strerror(errno));
+ break;
+ }
+ LOGD("Open %s success!",cam_path);
+
+ memset(&capability, 0, sizeof(struct v4l2_capability));
+ if (ioctl(fd, VIDIOC_QUERYCAP, &capability) < 0) {
+ LOGE("Video device(%s): query capability not supported.\n",cam_path);
+ goto loop_continue;
+ }
+
+ if ((capability.capabilities & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING)) != (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING)) {
+ LOGD("Video device(%s): video capture not supported.\n",cam_path);
+ } else {
+ rk_cam_total_info* pNewCamInfo = new rk_cam_total_info();
+ memset(camInfoTmp[cam_cnt&0x01].device_path,0x00, sizeof(camInfoTmp[cam_cnt&0x01].device_path));
+ strcat(camInfoTmp[cam_cnt&0x01].device_path,cam_path);
+ memset(camInfoTmp[cam_cnt&0x01].fival_list,0x00, sizeof(camInfoTmp[cam_cnt&0x01].fival_list));
+ memcpy(camInfoTmp[cam_cnt&0x01].driver,capability.driver, sizeof(camInfoTmp[cam_cnt&0x01].driver));
+ camInfoTmp[cam_cnt&0x01].version = capability.version;
+ if (strstr((char*)&capability.card[0], "front") != NULL) {
+ camInfoTmp[cam_cnt&0x01].facing_info.facing = CAMERA_FACING_FRONT;
+#ifdef LAPTOP
+ } else if (strstr((char*)&capability.card[0], "HP HD") != NULL
+ || strstr((char*)&capability.card[0], "HP IR")) {
+ camInfoTmp[cam_cnt&0x01].facing_info.facing = CAMERA_FACING_FRONT;
+ if (strstr((char*)&capability.card[0], "HP IR"))
+ gCamerasUnavailabled++;
+ gUsbCameraNames[cam_cnt&0x01] = String8((char*)&capability.card[0]);
+ LOGD("Camera %d name: %s", (cam_cnt&0x01), gUsbCameraNames[cam_cnt&0x01].string());
+#endif
+ } else {
+ camInfoTmp[cam_cnt&0x01].facing_info.facing = CAMERA_FACING_BACK;
+ }
+ ptr = strstr((char*)&capability.card[0],"-");
+ if (ptr != NULL) {
+ ptr++;
+ camInfoTmp[cam_cnt&0x01].facing_info.orientation = atoi(ptr);
+ } else {
+ camInfoTmp[cam_cnt&0x01].facing_info.orientation = 0;
+ }
+
+ memset(version,0x00,sizeof(version));
+ sprintf(version,"%d.%d.%d",((capability.version&0xff0000)>>16),
+ ((capability.version&0xff00)>>8),capability.version&0xff);
+ property_set(CAMERAHAL_V4L2_VERSION_PROPERTY_KEY,version);
+
+ if(strcmp((char*)&capability.driver[0],"uvcvideo") == 0)//uvc
+ {
+ int ret,i=0,j=0;
+ struct v4l2_frmivalenum fival;
+ struct v4l2_frmsizeenum fsize;
+ struct v4l2_fmtdesc fmtdesc;
+ unsigned int width, height;
+ unsigned int sensor_resolution_w=0,sensor_resolution_h=0;
+ unsigned int CameraHal_SupportFmt[6];
+ unsigned int mCamDriverSupportFmt[CAMERA_DRIVER_SUPPORT_FORMAT_MAX];
+ unsigned int mCamDriverPreviewFmt=0;
+ unsigned int maxfps;
+
+ //add usb camera to board_profiles
+ rk_DV_info *pDVResolution = new rk_DV_info();
+ memset(pNewCamInfo->mHardInfo.mSensorInfo.mSensorName, 0x00, sizeof(pNewCamInfo->mHardInfo.mSensorInfo.mSensorName));
+ strcpy(pNewCamInfo->mHardInfo.mSensorInfo.mSensorName, UVC_CAM_NAME);
+ pNewCamInfo->mIsIommuEnabled = VPUClientGetIOMMUStatus();
+ #if 0
+ //DV
+ strcpy(pDVResolution->mName, "480p");
+ pDVResolution->mWidth = 640;
+ pDVResolution->mHeight = 480;
+ pDVResolution->mFps = 10;
+ pDVResolution->mIsSupport = 1;
+ pDVResolution->mResolution = ISI_RES_VGAP15;
+ pNewCamInfo->mSoftInfo.mDV_vector.add(pDVResolution);
+ #endif
+ //paremeters
+ pNewCamInfo->mSoftInfo.mAntiBandingConfig.mAntiBandingSupport = 0;
+ pNewCamInfo->mSoftInfo.mAntiBandingConfig.mDefault = 0;
+ pNewCamInfo->mSoftInfo.mAwbConfig.mAwbSupport = 0;
+ pNewCamInfo->mSoftInfo.mAwbConfig.mDefault = 0;
+ pNewCamInfo->mSoftInfo.mContinue_snapshot_config= 0;
+ pNewCamInfo->mSoftInfo.mEffectConfig.mEffectSupport = 0;
+ pNewCamInfo->mSoftInfo.mEffectConfig.mDefault = 0;
+ pNewCamInfo->mSoftInfo.mFlashConfig.mFlashSupport= 0;
+ pNewCamInfo->mSoftInfo.mFlashConfig.mDefault = 0;
+ pNewCamInfo->mSoftInfo.mFocusConfig.mFocusSupport= 0;
+ pNewCamInfo->mSoftInfo.mFocusConfig.mDefault = 0;
+ pNewCamInfo->mSoftInfo.mHDRConfig= 0;
+ pNewCamInfo->mSoftInfo.mSenceConfig.mSenceSupport= 0;
+ pNewCamInfo->mSoftInfo.mSenceConfig.mDefault = 0;
+ pNewCamInfo->mSoftInfo.mZSLConfig = 0;
+ pNewCamInfo->mSoftInfo.mInterpolationRes = 0;
+ //profiles->AddConnectUVCSensorToVector(pNewCamInfo, profiles);
+
+ CameraHal_SupportFmt[0] = V4L2_PIX_FMT_NV12;
+ CameraHal_SupportFmt[1] = V4L2_PIX_FMT_NV16;
+ #if CONFIG_CAMERA_UVC_MJPEG_SUPPORT
+ CameraHal_SupportFmt[2] = V4L2_PIX_FMT_MJPEG;
+ CameraHal_SupportFmt[3] = V4L2_PIX_FMT_YUYV;
+ CameraHal_SupportFmt[4] = V4L2_PIX_FMT_RGB565;
+ #else
+ CameraHal_SupportFmt[2] = V4L2_PIX_FMT_YUYV;
+ CameraHal_SupportFmt[3] = V4L2_PIX_FMT_RGB565;
+ CameraHal_SupportFmt[4] = 0x00;
+ #endif
+ CameraHal_SupportFmt[5] = 0x00;
+
+ memset(&fmtdesc, 0, sizeof(fmtdesc));
+ fmtdesc.index = 0;
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ while (ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
+ mCamDriverSupportFmt[fmtdesc.index] = fmtdesc.pixelformat;
+ LOG1("mCamDriverSupportFmt:(%c%c%c%c),index = %d",fmtdesc.pixelformat&0xFF,(fmtdesc.pixelformat>>8)&0xFF,
+ (fmtdesc.pixelformat>>16)&0xFF,(fmtdesc.pixelformat>>24)&0xFF,fmtdesc.index);
+ fmtdesc.index++;
+ }
+
+ i = 0;
+ while (CameraHal_SupportFmt[i]) {
+ LOG1("CameraHal_SupportFmt:fmt = %d,index = %d",CameraHal_SupportFmt[i],i);
+ j = 0;
+ while (mCamDriverSupportFmt[j]) {
+ if (mCamDriverSupportFmt[j] == CameraHal_SupportFmt[i]) {
+ break;
+ }
+ j++;
+ }
+ if (mCamDriverSupportFmt[j] == CameraHal_SupportFmt[i]) {
+ break;
+ }
+ i++;
+ }
+
+ if (CameraHal_SupportFmt[i] == 0x00) {
+ mCamDriverPreviewFmt = 0;
+ } else {
+ mCamDriverPreviewFmt = CameraHal_SupportFmt[i];
+ }
+
+ fsize.index = 0;
+ fsize.pixel_format = mCamDriverPreviewFmt;
+ String8 supportPreviewSizes;
+ char mediaProfile_Resolution[32],str_element[32];
+
+ while ((ret = ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &fsize)) == 0) {
+ if (fsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ //if(fsize.discrete.width%16 || fsize.discrete.height%16)
+ //{
+ // fsize.index++;
+ // continue;
+ //}
+
+ if (fsize.discrete.width > sensor_resolution_w) {
+ sensor_resolution_w = fsize.discrete.width;
+ sensor_resolution_h = fsize.discrete.height;
+ }
+ memset(str_element,0x00,sizeof(str_element));
+ if (supportPreviewSizes.size() != 0)
+ str_element[0]=',';
+ sprintf((char*)(&str_element[strlen(str_element)]),"%d",fsize.discrete.width);
+ strcat(str_element, "x");
+ sprintf((char*)(&str_element[strlen(str_element)]),"%d",fsize.discrete.height);
+ supportPreviewSizes.append((const char*)str_element);
+ }
+ fsize.index++;
+ }
+ if(sensor_resolution_w == 0){
+ sensor_resolution_w = 640;
+ sensor_resolution_h = 480;
+ pDVResolution->mResolution = ISI_RES_VGAP15;
+ }
+ for(i=0; i<element_count; i++){
+ width = MediaProfile_Resolution[i][0];
+ height = MediaProfile_Resolution[i][1];
+ memset(&fival, 0, sizeof(fival));
+ fival.index = 0;
+ fival.pixel_format = fsize.pixel_format;
+ fival.width = width;
+ fival.height = height;
+ fival.reserved[1] = 0x00;
+ maxfps = 0;
+
+ rk_DV_info *pDVResolution = new rk_DV_info();
+ pDVResolution->mWidth = width;
+ pDVResolution->mHeight = height;
+ pDVResolution->mFps = 0;
+ memset(mediaProfile_Resolution,0x00,sizeof(mediaProfile_Resolution));
+ sprintf(mediaProfile_Resolution,"%dx%d",width,height);
+ if (strstr(supportPreviewSizes.string(), mediaProfile_Resolution) != NULL)
+ pDVResolution->mIsSupport = 1;
+ else
+ pDVResolution->mIsSupport = 0;
+
+ if ((width>sensor_resolution_w) || (height>sensor_resolution_h)) {
+ pNewCamInfo->mSoftInfo.mDV_vector.add(pDVResolution);
+ continue;
+ }
+ //LOGE("index = %d, pixel_format = %d, width = %d, height = %d",
+ // fival.index, fival.pixel_format, fival.width, fival.height);
+ while ((ret = ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
+ if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ pDVResolution->mFps = fival.discrete.denominator/fival.discrete.numerator;
+ //pDVResolution->mIsSupport = 1;
+ if (pDVResolution->mFps > maxfps)
+ maxfps = pDVResolution->mFps;
+ LOG1("%dx%d : %d %d/%d",fival.width,fival.height, pDVResolution->mFps,fival.discrete.denominator,fival.discrete.numerator);
+ }else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
+ break;
+ } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ break;
+ }
+ fival.index++;