improve tici camerad robustness (#2135)

* equalize performance on three threads

* need to request first

* clean up indexing

* obsolete

* handling skips and drops

* clean up repeated chunks

* use main frame id

* oneshot

* real skip check

* move to test

* should be cropped H here

Co-authored-by: Comma Device <device@comma.ai>
This commit is contained in:
ZwX1616 2020-09-14 16:05:57 -07:00 committed by GitHub
parent 3801374da9
commit 13588e4255
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 143 additions and 82 deletions

View file

@ -129,6 +129,16 @@ void release_fd(int video0_fd, uint32_t handle) {
release(video0_fd, handle);
}
void clear_req_queue(int fd, int32_t session_hdl, int32_t link_hdl) {
struct cam_req_mgr_flush_info req_mgr_flush_request = {0};
req_mgr_flush_request.session_hdl = session_hdl;
req_mgr_flush_request.link_hdl = link_hdl;
req_mgr_flush_request.flush_type = CAM_REQ_MGR_FLUSH_TYPE_ALL;
int ret;
ret = cam_control(fd, CAM_REQ_MGR_FLUSH_REQ, &req_mgr_flush_request, sizeof(req_mgr_flush_request));
// LOGD("flushed all req: %d", ret);
}
// ************** high level camera helpers ****************
void sensors_poke(struct CameraState *s, int request_id) {
@ -479,15 +489,15 @@ void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request
void enqueue_buffer(struct CameraState *s, int i) {
int ret;
int request_id = s->camera_bufs_metadata[i].frame_id;
int request_id = s->request_ids[i];
if (s->buf_handle[i]) {
release(s->video0_fd, s->buf_handle[i]);
// wait
// struct cam_sync_wait sync_wait = {0};
// sync_wait.sync_obj = s->sync_objs[i];
// sync_wait.timeout_ms = 100;
// ret = cam_control(s->video1_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait));
struct cam_sync_wait sync_wait = {0};
sync_wait.sync_obj = s->sync_objs[i];
sync_wait.timeout_ms = 50;
ret = cam_control(s->video1_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait));
// LOGD("fence wait: %d %d", ret, sync_wait.sync_obj);
// destroy old output fence
@ -531,6 +541,12 @@ void enqueue_buffer(struct CameraState *s, int i) {
config_isp(s, s->buf_handle[i], s->sync_objs[i], request_id, s->buf0_handle, 65632*(i+1));
}
void enqueue_req_multi(struct CameraState *s, int start, int n) {
for (int i=start;i<start+n;++i) {
s->request_ids[(i - 1) % FRAME_BUF_COUNT] = i;
enqueue_buffer(s, (i - 1) % FRAME_BUF_COUNT);
}
}
// ******************* camera *******************
@ -563,9 +579,8 @@ static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned
s->analog_gain_frac = 1.0;
s->analog_gain = 0x8;
s->exposure_time = 598;
s->frame_id = -1;
s->first = true;
//s->missed_frame = 0;
s->request_id_last = 0;
s->skipped = true;
}
static void camera_open(CameraState *s, VisionBuf* b) {
@ -781,6 +796,7 @@ static void camera_open(CameraState *s, VisionBuf* b) {
LOGD("start sensor: %d", ret);
ret = device_control(s->sensor_fd, CAM_START_DEV, s->session_handle, s->sensor_dev_handle);
enqueue_req_multi(s, 1, FRAME_BUF_COUNT);
}
void cameras_init(MultiCameraState *s) {
@ -791,7 +807,7 @@ void cameras_init(MultiCameraState *s) {
camera_init(&s->front, CAMERA_ID_AR0231, 2, 20);
printf("front initted \n");
#ifdef NOSCREEN
zsock_t *rgb_sock = zsock_new_push("tcp://192.168.200.51:7768");
zsock_t *rgb_sock = zsock_new_push("tcp://192.168.3.4:7768");
assert(rgb_sock);
s->rgb_sock = rgb_sock;
#endif
@ -904,26 +920,59 @@ static void cameras_close(MultiCameraState *s) {
#endif
}
struct video_event_data {
int32_t session_hdl;
int32_t link_hdl;
int32_t frame_id;
int32_t reserved;
int64_t req_id;
uint64_t tv_sec;
uint64_t tv_usec;
};
void handle_camera_event(CameraState *s, void *evdat) {
struct cam_req_mgr_message *event_data = (struct cam_req_mgr_message *)evdat;
uint64_t timestamp = event_data->u.frame_msg.timestamp;
int main_id = event_data->u.frame_msg.frame_id;
int real_id = event_data->u.frame_msg.request_id;
if (real_id != 0) { // next ready
if (real_id == 1) {s->idx_offset = main_id;}
int buf_idx = (real_id - 1) % FRAME_BUF_COUNT;
// check for skipped frames
if (main_id > s->frame_id_last + 1 && !s->skipped) {
// realign
clear_req_queue(s->video0_fd, event_data->session_hdl, event_data->u.frame_msg.link_hdl);
enqueue_req_multi(s, real_id + 1, FRAME_BUF_COUNT - 1);
s->skipped = true;
} else if (main_id == s->frame_id_last + 1) {
s->skipped = false;
}
// check for dropped requests
if (real_id > s->request_id_last + 1) {
enqueue_req_multi(s, s->request_id_last + 1 + FRAME_BUF_COUNT, real_id - (s->request_id_last + 1));
}
// metas
s->frame_id_last = main_id;
s->request_id_last = real_id;
s->camera_bufs_metadata[buf_idx].frame_id = main_id - s->idx_offset;
s->camera_bufs_metadata[buf_idx].timestamp_eof = timestamp; // only has sof?
s->request_ids[buf_idx] = real_id + FRAME_BUF_COUNT;
// dispatch
tbuffer_dispatch(&s->camera_tb, buf_idx);
} else { // not ready
// reset after half second of no response
if (main_id > s->frame_id_last + 10) {
clear_req_queue(s->video0_fd, event_data->session_hdl, event_data->u.frame_msg.link_hdl);
enqueue_req_multi(s, s->request_id_last + 1, FRAME_BUF_COUNT);
s->frame_id_last = main_id;
s->skipped = true;
}
}
}
void cameras_run(MultiCameraState *s) {
// start devices
LOG("-- Start devices");
sensors_i2c(&s->rear, start_reg_array, sizeof(start_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
sensors_i2c(&s->wide, start_reg_array, sizeof(start_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
sensors_i2c(&s->front, start_reg_array, sizeof(start_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
int start_reg_len = sizeof(start_reg_array) / sizeof(struct i2c_random_wr_payload);
sensors_i2c(&s->rear, start_reg_array, start_reg_len, CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
sensors_i2c(&s->wide, start_reg_array, start_reg_len, CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
sensors_i2c(&s->front, start_reg_array, start_reg_len, CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
// poll events
LOG("-- Dequeueing Video events");
@ -946,42 +995,18 @@ void cameras_run(MultiCameraState *s) {
ret = ioctl(fds[0].fd, VIDIOC_DQEVENT, &ev);
if (ev.type == 0x8000000) {
struct cam_req_mgr_message *event_data = (struct cam_req_mgr_message *)ev.u.data;
uint64_t timestamp = event_data->u.frame_msg.timestamp;
// LOGD("v4l2 event: sess_hdl %d, link_hdl %d, frame_id %d, req_id %lld, timestamp 0x%llx, sof_status %d\n", event_data->session_hdl, event_data->u.frame_msg.link_hdl, event_data->u.frame_msg.frame_id, event_data->u.frame_msg.request_id, event_data->u.frame_msg.timestamp, event_data->u.frame_msg.sof_status);
// printf("sess_hdl %d, link_hdl %d, frame_id %lu, req_id %lu, timestamp 0x%lx, sof_status %d\n", event_data->session_hdl, event_data->u.frame_msg.link_hdl, event_data->u.frame_msg.frame_id, event_data->u.frame_msg.request_id, event_data->u.frame_msg.timestamp, event_data->u.frame_msg.sof_status);
if (event_data->u.frame_msg.request_id != 0 || (event_data->u.frame_msg.request_id == 0 &&
((s->rear.first && event_data->session_hdl == s->rear.req_mgr_session_info.session_hdl) ||
(s->wide.first && event_data->session_hdl == s->wide.req_mgr_session_info.session_hdl) ||
(s->front.first && event_data->session_hdl == s->front.req_mgr_session_info.session_hdl)))) {
if (event_data->session_hdl == s->rear.req_mgr_session_info.session_hdl) {
if (event_data->u.frame_msg.request_id > 0) {s->rear.first = false;}
s->rear.frame_id++;
//printf("rear %d\n", s->rear.frame_id);
int buf_idx = s->rear.frame_id % FRAME_BUF_COUNT;
s->rear.camera_bufs_metadata[buf_idx].frame_id = s->rear.frame_id;
s->rear.camera_bufs_metadata[buf_idx].timestamp_eof = timestamp; // only has sof?
tbuffer_dispatch(&s->rear.camera_tb, buf_idx);
} else if (event_data->session_hdl == s->wide.req_mgr_session_info.session_hdl) {
if (event_data->u.frame_msg.request_id > 0) {s->wide.first = false;}
s->wide.frame_id++;
//printf("wide %d\n", s->wide.frame_id);
int buf_idx = s->wide.frame_id % FRAME_BUF_COUNT;
s->wide.camera_bufs_metadata[buf_idx].frame_id = s->wide.frame_id;
s->wide.camera_bufs_metadata[buf_idx].timestamp_eof = timestamp;
tbuffer_dispatch(&s->wide.camera_tb, buf_idx);
} else if (event_data->session_hdl == s->front.req_mgr_session_info.session_hdl) {
if (event_data->u.frame_msg.request_id > 0) {s->front.first = false;}
s->front.frame_id++;
//printf("front %d\n", s->front.frame_id);
int buf_idx = s->front.frame_id % FRAME_BUF_COUNT;
s->front.camera_bufs_metadata[buf_idx].frame_id = s->front.frame_id;
s->front.camera_bufs_metadata[buf_idx].timestamp_eof = timestamp;
tbuffer_dispatch(&s->front.camera_tb, buf_idx);
} else {
printf("Unknown vidioc event source\n");
assert(false);
}
if (event_data->session_hdl == s->rear.req_mgr_session_info.session_hdl) {
handle_camera_event(&s->rear, event_data);
} else if (event_data->session_hdl == s->wide.req_mgr_session_info.session_hdl) {
handle_camera_event(&s->wide, event_data);
} else if (event_data->session_hdl == s->front.req_mgr_session_info.session_hdl) {
handle_camera_event(&s->front, event_data);
} else {
printf("Unknown vidioc event source\n");
assert(false);
}
}
}
@ -1034,16 +1059,18 @@ void camera_autoexposure(CameraState *s, float grey_frac) {
AG = AG * 4096 + AG * 256 + AG * 16 + AG;
// printf("cam %d gain_frac is %f, set AG to 0x%X, S to %d, dc %d \n", s->camera_num, s->analog_gain_frac, AG, s->exposure_time, s->dc_gain_enabled);
}
struct i2c_random_wr_payload exp_reg_array[] = {{0x3366, AG}, // analog gain
{0x3362, s->dc_gain_enabled?0x1:0x0}, // DC_GAIN
{0x305A, 0x00C4}, // red gain
{0x3058, 0x00B1}, // blue gain
{0x3056, 0x009A}, // g1 gain
{0x305C, 0x009A}, // g2 gain
{0x3012, s->exposure_time}, // integ time
{0x301A, 0x091C}}; // reset
{0x3012, s->exposure_time}}; // integ time
//{0x301A, 0x091C}}; // reset
sensors_i2c(s, exp_reg_array, sizeof(exp_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
}
#ifdef NOSCREEN

View file

@ -15,7 +15,6 @@
#include "media/cam_req_mgr.h"
#define FRAME_BUF_COUNT 4
#define METADATA_BUF_COUNT 4
#ifdef __cplusplus
extern "C" {
@ -63,8 +62,11 @@ typedef struct CameraState {
int buf0_handle;
int buf_handle[FRAME_BUF_COUNT];
int sync_objs[FRAME_BUF_COUNT];
int frame_id;
bool first;
int request_ids[FRAME_BUF_COUNT];
int request_id_last;
int frame_id_last;
int idx_offset;
bool skipped;
struct cam_req_mgr_session_info req_mgr_session_info;

View file

@ -186,6 +186,7 @@ void* frontview_thread(void *arg) {
s->rhd_front = read_db_bool("IsRHD");
set_thread_name("frontview");
err = set_realtime_priority(51);
// we subscribe to this for placement of the AE metering box
// TODO: the loop is bad, ideally models shouldn't affect sensors
SubMaster sm({"driverState"});
@ -235,6 +236,7 @@ void* frontview_thread(void *arg) {
}
clWaitForEvents(1, &debayer_event);
clReleaseEvent(debayer_event);
tbuffer_release(&s->cameras.front.camera_tb, buf_idx);
visionbuf_sync(&s->rgb_front_bufs[ui_idx], VISIONBUF_SYNC_FROM_DEVICE);
@ -279,6 +281,7 @@ void* frontview_thread(void *arg) {
int x_end;
int y_start;
int y_end;
int skip = 1;
if (s->front_meteringbox_xmax > 0)
{
@ -297,11 +300,12 @@ void* frontview_thread(void *arg) {
#ifdef QCOM2
x_start = 0.15*s->rgb_front_width;
x_end = 0.85*s->rgb_front_width;
y_start = 0.15*s->rgb_front_height;
y_end = 0.85*s->rgb_front_height;
y_start = 0.5*s->rgb_front_height;
y_end = 0.75*s->rgb_front_height;
skip = 2;
#endif
uint32_t lum_binning[256] = {0,};
for (int y = y_start; y < y_end; ++y) {
for (int y = y_start; y < y_end; y += skip) {
for (int x = x_start; x < x_end; x += 2) { // every 2nd col
const uint8_t *pix = &bgr_front_ptr[y * s->rgb_front_stride + x * 3];
unsigned int lum = (unsigned int)pix[0] + pix[1] + pix[2];
@ -315,7 +319,7 @@ void* frontview_thread(void *arg) {
lum_binning[std::min(lum / 3, 255u)]++;
}
}
const unsigned int lum_total = (y_end - y_start) * (x_end - x_start)/2;
const unsigned int lum_total = (y_end - y_start) * (x_end - x_start) / 2 / skip;
unsigned int lum_cur = 0;
int lum_med = 0;
for (lum_med=0; lum_med<256; lum_med++) {
@ -385,7 +389,7 @@ void* wideview_thread(void *arg) {
set_thread_name("wideview");
err = set_realtime_priority(1);
err = set_realtime_priority(51);
LOG("setpriority returns %d", err);
// init cl stuff
@ -493,20 +497,20 @@ void* wideview_thread(void *arg) {
// auto exposure over big box
// TODO: fix this? should not use med imo
const int exposure_x = 240;
const int exposure_x = 384;
const int exposure_y = 300;
const int exposure_height = 600;
const int exposure_width = 1440;
const int exposure_height = 400;
const int exposure_width = 1152;
if (cnt % 3 == 0) {
// find median box luminance for AE
uint32_t lum_binning[256] = {0,};
for (int y=0; y<exposure_height; y++) {
for (int x=0; x<exposure_width; x++) {
for (int y=0; y<exposure_height; y+=2) {
for (int x=0; x<exposure_width; x+=2) {
uint8_t lum = yuv_ptr_y[((exposure_y+y)*s->yuv_wide_width) + exposure_x + x];
lum_binning[lum]++;
}
}
const unsigned int lum_total = exposure_height * exposure_width;
const unsigned int lum_total = exposure_height * exposure_width / 4;
unsigned int lum_cur = 0;
int lum_med = 0;
for (lum_med=0; lum_med<256; lum_med++) {
@ -837,26 +841,28 @@ void* processing_thread(void *arg) {
// auto exposure over big box
#ifdef QCOM2
const int exposure_x = 240;
const int exposure_x = 384;
const int exposure_y = 300;
const int exposure_height = 600;
const int exposure_width = 1440;
const int exposure_height = 400;
const int exposure_width = 1152;
const int skip = 2;
#else
const int exposure_x = 290;
const int exposure_y = 322;
const int exposure_height = 314;
const int exposure_width = 560;
const int skip = 1;
#endif
if (cnt % 3 == 0) {
// find median box luminance for AE
uint32_t lum_binning[256] = {0,};
for (int y=0; y<exposure_height; y++) {
for (int x=0; x<exposure_width; x++) {
for (int y=0; y<exposure_height; y+=skip) {
for (int x=0; x<exposure_width; x+=skip) {
uint8_t lum = yuv_ptr_y[((exposure_y+y)*s->yuv_width) + exposure_x + x];
lum_binning[lum]++;
}
}
const unsigned int lum_total = exposure_height * exposure_width;
const unsigned int lum_total = exposure_height * exposure_width / skip / skip;
unsigned int lum_cur = 0;
int lum_med = 0;
for (lum_med=0; lum_med<256; lum_med++) {

View file

@ -0,0 +1,27 @@
#!/usr/bin/env python3
# type: ignore
import cereal.messaging as messaging
all_sockets = ['frame','frontFrame','wideFrame']
prev_id = [None,None,None]
this_id = [None,None,None]
dt = [None,None,None]
num_skipped = [0,0,0]
if __name__ == "__main__":
sm = messaging.SubMaster(all_sockets)
while True:
sm.update()
for i in range(len(all_sockets)):
if not sm.updated[all_sockets[i]]:
continue
this_id[i] = sm[all_sockets[i]].frameId
if prev_id[i] is None:
prev_id[i] = this_id[i]
continue
dt[i] = this_id[i] - prev_id[i]
if dt[i] != 1:
num_skipped[i] += dt[i] - 1
print(all_sockets[i] ,dt[i] - 1, num_skipped[i])
prev_id[i] = this_id[i]

View file

@ -53,7 +53,6 @@ DMonitoringResult dmonitoring_eval_frame(DMonitoringModelState* s, void* stream_
const int crop_x_offset = width - cropped_width;
const int crop_y_offset = 0;
#else
const int full_width_tici = 1928;
const int full_height_tici = 1208;
const int adapt_width_tici = 808;
@ -84,7 +83,7 @@ DMonitoringResult dmonitoring_eval_frame(DMonitoringModelState* s, void* stream_
uint8_t *premirror_cropped_y_buf = get_buffer(s->premirror_cropped_buf, cropped_width*cropped_height*3/2);
uint8_t *premirror_cropped_u_buf = premirror_cropped_y_buf + (cropped_width * cropped_height);
uint8_t *premirror_cropped_v_buf = premirror_cropped_u_buf + ((cropped_width/2) * (cropped_height/2));
for (int r = 0; r < height/2; r++) {
for (int r = 0; r < cropped_height/2; r++) {
memcpy(premirror_cropped_y_buf + (2*r)*cropped_width, raw_y_buf + (2*r + global_y_offset + crop_y_offset)*width + global_x_offset, cropped_width);
memcpy(premirror_cropped_y_buf + (2*r+1)*cropped_width, raw_y_buf + (2*r + global_y_offset + crop_y_offset + 1)*width + global_x_offset, cropped_width);
memcpy(premirror_cropped_u_buf + r*cropped_width/2, raw_u_buf + (r + (global_y_offset + crop_y_offset)/2)*width/2 + global_x_offset/2, cropped_width/2);