diff --git a/system/camerad/cameras/camera_common.h b/system/camerad/cameras/camera_common.h index 555362ab8bc7ab..f97940b669fb63 100644 --- a/system/camerad/cameras/camera_common.h +++ b/system/camerad/cameras/camera_common.h @@ -18,9 +18,6 @@ enum CameraType { }; // for debugging -const bool env_disable_road = getenv("DISABLE_ROAD") != NULL; -const bool env_disable_wide_road = getenv("DISABLE_WIDE_ROAD") != NULL; -const bool env_disable_driver = getenv("DISABLE_DRIVER") != NULL; const bool env_debug_frames = getenv("DEBUG_FRAMES") != NULL; const bool env_log_raw_frames = getenv("LOG_RAW_FRAMES") != NULL; const bool env_ctrl_exp_from_params = getenv("CTRL_EXP_FROM_PARAMS") != NULL; diff --git a/system/camerad/cameras/camera_qcom2.cc b/system/camerad/cameras/camera_qcom2.cc index 47ea5ded4d8e0d..2eba1e36155542 100644 --- a/system/camerad/cameras/camera_qcom2.cc +++ b/system/camerad/cameras/camera_qcom2.cc @@ -26,6 +26,14 @@ const int MIPI_SETTLE_CNT = 33; // Calculated by camera_freqs.py extern ExitHandler do_exit; +CameraState::CameraState(MultiCameraState *multi_camera_state, const CameraConfig &config) + : multi_cam_state(multi_camera_state), + camera_num(config.camera_num), + stream_type(config.stream_type), + focal_len(config.focal_len), + enabled(config.enabled) { +} + int CameraState::clear_req_queue() { struct cam_req_mgr_flush_info req_mgr_flush_request = {0}; req_mgr_flush_request.session_hdl = session_handle; @@ -434,39 +442,36 @@ void CameraState::sensor_set_parameters() { cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight) * ci->sensor_analog_gains[gain_idx] * exposure_time; } -void CameraState::camera_map_bufs(MultiCameraState *s) { +void CameraState::camera_map_bufs() { for (int i = 0; i < FRAME_BUF_COUNT; i++) { // configure ISP to put the image in place struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0}; - mem_mgr_map_cmd.mmu_hdls[0] = s->device_iommu; + mem_mgr_map_cmd.mmu_hdls[0] = multi_cam_state->device_iommu; mem_mgr_map_cmd.num_hdl = 1; mem_mgr_map_cmd.flags = CAM_MEM_FLAG_HW_READ_WRITE; mem_mgr_map_cmd.fd = buf.camera_bufs[i].fd; - int ret = do_cam_control(s->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd)); + int ret = do_cam_control(multi_cam_state->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd)); LOGD("map buf req: (fd: %d) 0x%x %d", buf.camera_bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret); buf_handle[i] = mem_mgr_map_cmd.out.buf_handle; } enqueue_req_multi(1, FRAME_BUF_COUNT, 0); } -void CameraState::camera_init(MultiCameraState *s, VisionIpcServer * v, cl_device_id device_id, cl_context ctx, VisionStreamType yuv_type, float focal_len) { +void CameraState::camera_init(VisionIpcServer * v, cl_device_id device_id, cl_context ctx) { if (!enabled) return; LOGD("camera init %d", camera_num); request_id_last = 0; skipped = true; - buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, yuv_type); - camera_map_bufs(s); + buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, stream_type); + camera_map_bufs(); fl_pix = focal_len / ci->pixel_size_mm; set_exposure_rect(); } -void CameraState::camera_open(MultiCameraState *multi_cam_state_, int camera_num_, bool enabled_) { - multi_cam_state = multi_cam_state_; - camera_num = camera_num_; - enabled = enabled_; +void CameraState::camera_open() { if (!enabled) return; sensor_fd = open_v4l_by_name_and_index("cam-sensor-driver", camera_num); @@ -644,9 +649,9 @@ void CameraState::camera_open(MultiCameraState *multi_cam_state_, int camera_num } void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx) { - s->driver_cam.camera_init(s, v, device_id, ctx, VISION_STREAM_DRIVER, DRIVER_FL_MM); - s->road_cam.camera_init(s, v, device_id, ctx, VISION_STREAM_ROAD, ROAD_FL_MM); - s->wide_road_cam.camera_init(s, v, device_id, ctx, VISION_STREAM_WIDE_ROAD, WIDE_FL_MM); + s->driver_cam.camera_init(v, device_id, ctx); + s->road_cam.camera_init(v, device_id, ctx); + s->wide_road_cam.camera_init(v, device_id, ctx); s->pm = new PubMaster({"roadCameraState", "driverCameraState", "wideRoadCameraState", "thumbnail"}); } @@ -690,11 +695,11 @@ void cameras_open(MultiCameraState *s) { ret = HANDLE_EINTR(ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub)); LOGD("req mgr subscribe: %d", ret); - s->driver_cam.camera_open(s, 2, !env_disable_driver); + s->driver_cam.camera_open(); LOGD("driver camera opened"); - s->road_cam.camera_open(s, 1, !env_disable_road); + s->road_cam.camera_open(); LOGD("road camera opened"); - s->wide_road_cam.camera_open(s, 0, !env_disable_wide_road); + s->wide_road_cam.camera_open(); LOGD("wide road camera opened"); } @@ -956,6 +961,12 @@ void process_road_camera(MultiCameraState *s, CameraState *c, int cnt) { c->set_camera_exposure(set_exposure_target(b, c->ae_xywh, skip, skip)); } +MultiCameraState::MultiCameraState() + : driver_cam(this, DRIVER_CAMERA_CONFIG), + road_cam(this, ROAD_CAMERA_CONFIG), + wide_road_cam(this, WIDE_ROAD_CAMERA_CONFIG) { +} + void cameras_run(MultiCameraState *s) { LOG("-- Starting threads"); std::vector threads; diff --git a/system/camerad/cameras/camera_qcom2.h b/system/camerad/cameras/camera_qcom2.h index 0b15c9c3f04f06..bdebeef9f80225 100644 --- a/system/camerad/cameras/camera_qcom2.h +++ b/system/camerad/cameras/camera_qcom2.h @@ -11,15 +11,41 @@ #define FRAME_BUF_COUNT 4 -#define ROAD_FL_MM 8.0f -#define WIDE_FL_MM 1.71f -#define DRIVER_FL_MM 1.71f +struct CameraConfig { + int camera_num; + VisionStreamType stream_type; + float focal_len; // millimeters + bool enabled; +}; + +const CameraConfig WIDE_ROAD_CAMERA_CONFIG = { + .camera_num = 0, + .stream_type = VISION_STREAM_WIDE_ROAD, + .focal_len = 1.71, + .enabled = !getenv("DISABLE_WIDE_ROAD"), +}; + +const CameraConfig ROAD_CAMERA_CONFIG = { + .camera_num = 1, + .stream_type = VISION_STREAM_ROAD, + .focal_len = 8.0, + .enabled = !getenv("DISABLE_ROAD"), +}; + +const CameraConfig DRIVER_CAMERA_CONFIG = { + .camera_num = 2, + .stream_type = VISION_STREAM_DRIVER, + .focal_len = 1.71, + .enabled = !getenv("DISABLE_DRIVER"), +}; class CameraState { public: MultiCameraState *multi_cam_state; std::unique_ptr ci; bool enabled; + VisionStreamType stream_type; + float focal_len; std::mutex exp_lock; @@ -44,17 +70,18 @@ class CameraState { int camera_num; float fl_pix; + CameraState(MultiCameraState *multi_camera_state, const CameraConfig &config); void handle_camera_event(void *evdat); void update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain); void set_camera_exposure(float grey_frac); void sensors_start(); - void camera_open(MultiCameraState *multi_cam_state, int camera_num, bool enabled); + void camera_open(); void set_exposure_rect(); void sensor_set_parameters(); - void camera_map_bufs(MultiCameraState *s); - void camera_init(MultiCameraState *s, VisionIpcServer *v, cl_device_id device_id, cl_context ctx, VisionStreamType yuv_type, float focal_len); + void camera_map_bufs(); + void camera_init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx); void camera_close(); int32_t session_handle; @@ -90,7 +117,10 @@ class CameraState { Params params; }; -typedef struct MultiCameraState { +class MultiCameraState { +public: + MultiCameraState(); + unique_fd video0_fd; unique_fd cam_sync_fd; unique_fd isp_fd; @@ -102,4 +132,4 @@ typedef struct MultiCameraState { CameraState driver_cam; PubMaster *pm; -} MultiCameraState; +};