Clean up and test camerad refactor (#2310)

* refactor

add function get_camerabuf_by_type

remove cl_free

use camera_id

move front stuff into frontview_thread

bigger refactor

more

typo

rename camerabuf *c to *b

cleanup header files

remove wideview_thread

camera_qcom.c to c++

fix SConscript

camera_process_buf

space

sendrgb same with before

do autoexposure in processing_thread

correct sendrgb

add camera_common.cc

move yuv_transform to CameraBuf

* use C to keep consitent

* rebase master

* rebase master

* continue refactor

* refactor all camera related data&function out of main

 done

* cleanup

* -use buf.rgb_width&height

* remoe ,

* rebase master

* remove unused std::vector

* add func common_camera_process_front

* fix max

* fix sendrgb

* less crap being logged

* fix indents

* add back thumbnails

* thumbnail fixed on tici

* junk junk junk

* unify api

* fix ptr ref

* lol!

* cleanup

* more simplified

* typos

* remove terminate

* use standard polling

* ops_thread refactor, too much code!

* fix build

* ok

* assert

* listen

* incl more

* wq!

* fix more build

* macos

* fix exp targets

* front is different

* const

* bye

* remath

* remove irrelevant

* do ops in a thread

* fix init

* return null

* QCOM2 ops

* qcom2 build

* missing comma

Co-authored-by: deanlee <deanlee3@gmail.com>
Co-authored-by: Comma Device <device@comma.ai>
albatross
ZwX1616 2020-10-16 21:58:05 -07:00 committed by GitHub
parent c5ec96680f
commit 6b020241c9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 1234 additions and 1967 deletions

View File

@ -360,11 +360,11 @@ selfdrive/ui/android/*.hpp
selfdrive/camerad/SConscript
selfdrive/camerad/main.cc
selfdrive/camerad/bufs.h
selfdrive/camerad/snapshot/*
selfdrive/camerad/include/*
selfdrive/camerad/cameras/camera_common.h
selfdrive/camerad/cameras/camera_common.cc
selfdrive/camerad/cameras/camera_frame_stream.cc
selfdrive/camerad/cameras/camera_frame_stream.h
selfdrive/camerad/cameras/camera_qcom.cc

View File

@ -1,6 +1,6 @@
Import('env', 'arch', 'cereal', 'messaging', 'common', 'gpucommon', 'visionipc', 'USE_WEBCAM', 'QCOM_REPLAY')
libs = ['m', 'pthread', common, 'jpeg', 'OpenCL', cereal, messaging, 'czmq', 'zmq', 'capnp', 'kj', visionipc, gpucommon]
libs = ['m', 'pthread', common, 'jpeg', 'OpenCL', cereal, messaging, 'zmq', 'capnp', 'kj', visionipc, gpucommon]
if arch == "aarch64":
libs += ['gsl', 'CB', 'adreno_utils', 'EGL', 'GLESv3', 'cutils', 'ui']
@ -9,8 +9,8 @@ if arch == "aarch64":
else:
cameras = ['cameras/camera_qcom.cc']
elif arch == "larch64":
libs += []
cameras = ['cameras/camera_qcom2.c']
libs += ['atomic']
cameras = ['cameras/camera_qcom2.cc']
# no screen
# env = env.Clone()
# env.Append(CXXFLAGS = '-DNOSCREEN')
@ -36,6 +36,7 @@ env.SharedLibrary('snapshot/visionipc',
env.Program('camerad', [
'main.cc',
'cameras/camera_common.cc',
'transforms/rgb_to_yuv.c',
'imgproc/utils.cc',
cameras,

View File

@ -1,8 +0,0 @@
#ifndef _SELFDRIVE_VISIOND_VISIOND_H_
#define _SELFDRIVE_VISIOND_VISIOND_H_
#include <inttypes.h>
typedef struct { uint8_t *y, *u, *v; } YUVBuf;
#endif // _SELFDRIVE_VISIOND_VISIOND_H_

View File

@ -0,0 +1,413 @@
#include <thread>
#include <stdio.h>
#include <signal.h>
#include <assert.h>
#include <unistd.h>
#if defined(QCOM) && !defined(QCOM_REPLAY)
#include "cameras/camera_qcom.h"
#elif QCOM2
#include "cameras/camera_qcom2.h"
#elif WEBCAM
#include "cameras/camera_webcam.h"
#else
#include "cameras/camera_frame_stream.h"
#endif
#include "camera_common.h"
#include <libyuv.h>
#include <jpeglib.h>
#include "clutil.h"
#include "common/params.h"
#include "common/swaglog.h"
#include "common/util.h"
#include "imgproc/utils.h"
static cl_program build_debayer_program(cl_device_id device_id, cl_context context, const CameraInfo *ci, const CameraBuf *b) {
char args[4096];
snprintf(args, sizeof(args),
"-cl-fast-relaxed-math -cl-denorms-are-zero "
"-DFRAME_WIDTH=%d -DFRAME_HEIGHT=%d -DFRAME_STRIDE=%d "
"-DRGB_WIDTH=%d -DRGB_HEIGHT=%d -DRGB_STRIDE=%d "
"-DBAYER_FLIP=%d -DHDR=%d",
ci->frame_width, ci->frame_height, ci->frame_stride,
b->rgb_width, b->rgb_height, b->rgb_stride,
ci->bayer_flip, ci->hdr);
#ifdef QCOM2
return CLU_LOAD_FROM_FILE(context, device_id, "cameras/real_debayer.cl", args);
#else
return CLU_LOAD_FROM_FILE(context, device_id, "cameras/debayer.cl", args);
#endif
}
void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, int frame_cnt,
const char *name, release_cb relase_callback) {
const CameraInfo *ci = &s->ci;
camera_state = s;
frame_buf_count = frame_cnt;
frame_size = ci->frame_height * ci->frame_stride;
camera_bufs = std::make_unique<VisionBuf[]>(frame_buf_count);
camera_bufs_metadata = std::make_unique<FrameMetadata[]>(frame_buf_count);
for (int i = 0; i < frame_buf_count; i++) {
camera_bufs[i] = visionbuf_allocate_cl(frame_size, device_id, context);
}
rgb_width = ci->frame_width;
rgb_height = ci->frame_height;
#ifndef QCOM2
// debayering does a 2x downscale
if (ci->bayer) {
rgb_width = ci->frame_width / 2;
rgb_height = ci->frame_height / 2;
}
float db_s = 0.5;
#else
float db_s = 1.0;
#endif
if (ci->bayer) {
yuv_transform = transform_scale_buffer(s->transform, db_s);
} else {
yuv_transform = s->transform;
}
for (int i = 0; i < UI_BUF_COUNT; i++) {
VisionImg img = visionimg_alloc_rgb24(device_id, context, rgb_width, rgb_height, &rgb_bufs[i]);
if (i == 0) {
rgb_stride = img.stride;
}
}
tbuffer_init(&ui_tb, UI_BUF_COUNT, name);
tbuffer_init2(&camera_tb, frame_buf_count, "frame", relase_callback, s);
// yuv back for recording and orbd
pool_init(&yuv_pool, YUV_COUNT);
yuv_tb = pool_get_tbuffer(&yuv_pool);
yuv_width = rgb_width;
yuv_height = rgb_height;
yuv_buf_size = rgb_width * rgb_height * 3 / 2;
for (int i = 0; i < YUV_COUNT; i++) {
yuv_ion[i] = visionbuf_allocate_cl(yuv_buf_size, device_id, context);
yuv_bufs[i].y = (uint8_t *)yuv_ion[i].addr;
yuv_bufs[i].u = yuv_bufs[i].y + (yuv_width * yuv_height);
yuv_bufs[i].v = yuv_bufs[i].u + (yuv_width / 2 * yuv_height / 2);
}
int err;
if (ci->bayer) {
cl_program prg_debayer = build_debayer_program(device_id, context, ci, this);
krnl_debayer = clCreateKernel(prg_debayer, "debayer10", &err);
assert(err == 0);
assert(clReleaseProgram(prg_debayer) == 0);
}
rgb_to_yuv_init(&rgb_to_yuv_state, context, device_id, yuv_width, yuv_height, rgb_stride);
#ifdef __APPLE__
q = clCreateCommandQueue(context, device_id, 0, &err);
#else
const cl_queue_properties props[] = {0}; //CL_QUEUE_PRIORITY_KHR, CL_QUEUE_PRIORITY_HIGH_KHR, 0};
q = clCreateCommandQueueWithProperties(context, device_id, props, &err);
#endif
assert(err == 0);
}
CameraBuf::~CameraBuf() {
for (int i = 0; i < frame_buf_count; i++) {
visionbuf_free(&camera_bufs[i]);
}
for (int i = 0; i < UI_BUF_COUNT; i++) {
visionbuf_free(&rgb_bufs[i]);
}
for (int i = 0; i < YUV_COUNT; i++) {
visionbuf_free(&yuv_ion[i]);
}
clReleaseKernel(krnl_debayer);
clReleaseCommandQueue(q);
}
bool CameraBuf::acquire() {
const int buf_idx = tbuffer_acquire(&camera_tb);
if (buf_idx < 0) {
return false;
}
const FrameMetadata &frame_data = camera_bufs_metadata[buf_idx];
if (frame_data.frame_id == -1) {
LOGE("no frame data? wtf");
tbuffer_release(&camera_tb, buf_idx);
return false;
}
cur_frame_data = frame_data;
cur_rgb_idx = tbuffer_select(&ui_tb);
cur_rgb_buf = &rgb_bufs[cur_rgb_idx];
cl_event debayer_event;
cl_mem camrabuf_cl = camera_bufs[buf_idx].buf_cl;
if (camera_state->ci.bayer) {
assert(clSetKernelArg(krnl_debayer, 0, sizeof(cl_mem), &camrabuf_cl) == 0);
assert(clSetKernelArg(krnl_debayer, 1, sizeof(cl_mem), &cur_rgb_buf->buf_cl) == 0);
#ifdef QCOM2
assert(clSetKernelArg(krnl_debayer, 2, camera_state->debayer_cl_localMemSize, 0) == 0);
assert(clEnqueueNDRangeKernel(q, krnl_debayer, 2, NULL,
camera_state->debayer_cl_globalWorkSize, camera_state->debayer_cl_localWorkSize,
0, 0, &debayer_event) == 0);
#else
float digital_gain = camera_state->digital_gain;
if ((int)digital_gain == 0) {
digital_gain = 1.0;
}
assert(clSetKernelArg(krnl_debayer, 2, sizeof(float), &digital_gain) == 0);
const size_t debayer_work_size = rgb_height; // doesn't divide evenly, is this okay?
const size_t debayer_local_work_size = 128;
assert(clEnqueueNDRangeKernel(q, krnl_debayer, 1, NULL,
&debayer_work_size, &debayer_local_work_size, 0, 0, &debayer_event) == 0);
#endif
} else {
assert(cur_rgb_buf->len >= frame_size);
assert(rgb_stride == camera_state->ci.frame_stride);
assert(clEnqueueCopyBuffer(q, camrabuf_cl, cur_rgb_buf->buf_cl, 0, 0,
cur_rgb_buf->len, 0, 0, &debayer_event) == 0);
}
clWaitForEvents(1, &debayer_event);
clReleaseEvent(debayer_event);
tbuffer_release(&camera_tb, buf_idx);
visionbuf_sync(cur_rgb_buf, VISIONBUF_SYNC_FROM_DEVICE);
cur_yuv_idx = pool_select(&yuv_pool);
yuv_metas[cur_yuv_idx] = frame_data;
rgb_to_yuv_queue(&rgb_to_yuv_state, q, cur_rgb_buf->buf_cl, yuv_ion[cur_yuv_idx].buf_cl);
visionbuf_sync(&yuv_ion[cur_yuv_idx], VISIONBUF_SYNC_FROM_DEVICE);
// keep another reference around till were done processing
pool_acquire(&yuv_pool, cur_yuv_idx);
pool_push(&yuv_pool, cur_yuv_idx);
return true;
}
void CameraBuf::release() {
tbuffer_dispatch(&ui_tb, cur_rgb_idx);
pool_release(&yuv_pool, cur_yuv_idx);
}
void CameraBuf::stop() {
tbuffer_stop(&ui_tb);
tbuffer_stop(&camera_tb);
pool_stop(&yuv_pool);
}
// common functions
void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, uint32_t cnt) {
framed.setFrameId(frame_data.frame_id);
framed.setEncodeId(cnt);
framed.setTimestampEof(frame_data.timestamp_eof);
framed.setFrameLength(frame_data.frame_length);
framed.setIntegLines(frame_data.integ_lines);
framed.setGlobalGain(frame_data.global_gain);
framed.setLensPos(frame_data.lens_pos);
framed.setLensSag(frame_data.lens_sag);
framed.setLensErr(frame_data.lens_err);
framed.setLensTruePos(frame_data.lens_true_pos);
framed.setGainFrac(frame_data.gain_frac);
}
void create_thumbnail(MultiCameraState *s, CameraState *c, uint8_t *bgr_ptr) {
const CameraBuf *b = &c->buf;
uint8_t* thumbnail_buffer = NULL;
unsigned long thumbnail_len = 0;
unsigned char *row = (unsigned char *)malloc(b->rgb_width/4*3);
struct jpeg_compress_struct cinfo;
struct jpeg_error_mgr jerr;
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_compress(&cinfo);
jpeg_mem_dest(&cinfo, &thumbnail_buffer, &thumbnail_len);
cinfo.image_width = b->rgb_width / 4;
cinfo.image_height = b->rgb_height / 4;
cinfo.input_components = 3;
cinfo.in_color_space = JCS_RGB;
jpeg_set_defaults(&cinfo);
#ifndef __APPLE__
jpeg_set_quality(&cinfo, 50, true);
jpeg_start_compress(&cinfo, true);
#else
jpeg_set_quality(&cinfo, 50, static_cast<boolean>(true) );
jpeg_start_compress(&cinfo, static_cast<boolean>(true) );
#endif
JSAMPROW row_pointer[1];
for (int ii = 0; ii < b->rgb_height/4; ii+=1) {
for (int j = 0; j < b->rgb_width*3; j+=12) {
for (int k = 0; k < 3; k++) {
uint16_t dat = 0;
int i = ii * 4;
dat += bgr_ptr[b->rgb_stride*i + j + k];
dat += bgr_ptr[b->rgb_stride*i + j+3 + k];
dat += bgr_ptr[b->rgb_stride*(i+1) + j + k];
dat += bgr_ptr[b->rgb_stride*(i+1) + j+3 + k];
dat += bgr_ptr[b->rgb_stride*(i+2) + j + k];
dat += bgr_ptr[b->rgb_stride*(i+2) + j+3 + k];
dat += bgr_ptr[b->rgb_stride*(i+3) + j + k];
dat += bgr_ptr[b->rgb_stride*(i+3) + j+3 + k];
row[(j/4) + (2-k)] = dat/8;
}
}
row_pointer[0] = row;
jpeg_write_scanlines(&cinfo, row_pointer, 1);
}
free(row);
jpeg_finish_compress(&cinfo);
MessageBuilder msg;
auto thumbnaild = msg.initEvent().initThumbnail();
thumbnaild.setFrameId(b->cur_frame_data.frame_id);
thumbnaild.setTimestampEof(b->cur_frame_data.timestamp_eof);
thumbnaild.setThumbnail(kj::arrayPtr((const uint8_t*)thumbnail_buffer, thumbnail_len));
if (s->pm != NULL) {
s->pm->send("thumbnail", msg);
}
}
void set_exposure_target(CameraState *c, const uint8_t *pix_ptr, bool front, int x_start, int x_end, int x_skip, int y_start, int y_end, int y_skip) {
const CameraBuf *b = &c->buf;
uint32_t lum_binning[256] = {0};
for (int y = y_start; y < y_end; y += y_skip) {
for (int x = x_start; x < x_end; x += x_skip) {
if (!front) {
uint8_t lum = pix_ptr[((y_start + y) * b->yuv_width) + x_start + x];
lum_binning[lum]++;
} else {
const uint8_t *pix = &pix_ptr[y * b->rgb_stride + x * 3];
unsigned int lum = (unsigned int)(pix[0] + pix[1] + pix[2]);
lum_binning[std::min(lum / 3, 255u)]++;
}
}
}
unsigned int lum_total = (y_end - y_start) * (x_end - x_start) / x_skip / y_skip;
unsigned int lum_cur = 0;
int lum_med = 0;
int lum_med_alt = 0;
for (lum_med=255; lum_med>=0; lum_med--) {
lum_cur += lum_binning[lum_med];
#ifdef QCOM2
bool reach_hlc_perc = false;
if (c->camera_num == 0) { // wide
reach_hlc_perc = lum_cur > 2*lum_total / (3*HLC_A);
} else {
reach_hlc_perc = lum_cur > lum_total / HLC_A;
}
if (reach_hlc_perc && lum_med > HLC_THRESH) {
lum_med_alt = 86;
}
#endif
if (lum_cur >= lum_total / 2) {
break;
}
}
lum_med = lum_med_alt>lum_med?lum_med_alt:lum_med;
camera_autoexposure(c, lum_med / 256.0);
}
extern volatile sig_atomic_t do_exit;
void *processing_thread(MultiCameraState *cameras, const char *tname,
CameraState *cs, int priority, process_thread_cb callback) {
set_thread_name(tname);
int err = set_realtime_priority(priority);
LOG("%s start! setpriority returns %d", tname, err);
for (int cnt = 0; !do_exit; cnt++) {
if (!cs->buf.acquire()) continue;
callback(cameras, cs, cnt);
cs->buf.release();
}
return NULL;
}
std::thread start_process_thread(MultiCameraState *cameras, const char *tname,
CameraState *cs, int priority, process_thread_cb callback) {
return std::thread(processing_thread, cameras, tname, cs, priority, callback);
}
void common_camera_process_front(SubMaster *sm, PubMaster *pm, CameraState *c, int cnt) {
const CameraBuf *b = &c->buf;
static int meteringbox_xmin = 0, meteringbox_xmax = 0;
static int meteringbox_ymin = 0, meteringbox_ymax = 0;
static const bool rhd_front = Params().read_db_bool("IsRHD");
sm->update(0);
if (sm->updated("driverState")) {
auto state = (*sm)["driverState"].getDriverState();
float face_prob = state.getFaceProb();
float face_position[2];
face_position[0] = state.getFacePosition()[0];
face_position[1] = state.getFacePosition()[1];
// set front camera metering target
if (face_prob > 0.4) {
int x_offset = rhd_front ? 0:b->rgb_width - 0.5 * b->rgb_height;
meteringbox_xmin = x_offset + (face_position[0] + 0.5) * (0.5 * b->rgb_height) - 72;
meteringbox_xmax = x_offset + (face_position[0] + 0.5) * (0.5 * b->rgb_height) + 72;
meteringbox_ymin = (face_position[1] + 0.5) * (b->rgb_height) - 72;
meteringbox_ymax = (face_position[1] + 0.5) * (b->rgb_height) + 72;
} else { // use default setting if no face
meteringbox_ymin = b->rgb_height * 1 / 3;
meteringbox_ymax = b->rgb_height * 1;
meteringbox_xmin = rhd_front ? 0:b->rgb_width * 3 / 5;
meteringbox_xmax = rhd_front ? b->rgb_width * 2 / 5:b->rgb_width;
}
}
// auto exposure
if (cnt % 3 == 0) {
// use driver face crop for AE
int x_start, x_end, y_start, y_end;
int skip = 1;
if (meteringbox_xmax > 0) {
x_start = std::max(0, meteringbox_xmin);
x_end = std::min(b->rgb_width - 1, meteringbox_xmax);
y_start = std::max(0, meteringbox_ymin);
y_end = std::min(b->rgb_height - 1, meteringbox_ymax);
} else {
y_start = b->rgb_height * 1 / 3;
y_end = b->rgb_height * 1;
x_start = rhd_front ? 0 : b->rgb_width * 3 / 5;
x_end = rhd_front ? b->rgb_width * 2 / 5 : b->rgb_width;
}
#ifdef QCOM2
x_start = 96;
x_end = 1832;
y_start = 242;
y_end = 1148;
skip = 4;
#endif
set_exposure_target(c, (const uint8_t *)b->cur_rgb_buf->addr, 1, x_start, x_end, 2, y_start, y_end, skip);
}
MessageBuilder msg;
auto framed = msg.initEvent().initFrontFrame();
framed.setFrameType(cereal::FrameData::FrameType::FRONT);
fill_frame_data(framed, b->cur_frame_data, cnt);
pm->send("frontFrame", msg);
}

View File

@ -1,8 +1,17 @@
#ifndef CAMERA_COMMON_H
#define CAMERA_COMMON_H
#pragma once
#include <stdint.h>
#include <stdbool.h>
#include <stdint.h>
#include <memory>
#include <thread>
#include "common/buffering.h"
#include "common/mat.h"
#include "common/swaglog.h"
#include "common/visionbuf.h"
#include "common/visionimg.h"
#include "imgproc/utils.h"
#include "messaging.hpp"
#include "transforms/rgb_to_yuv.h"
#include "common/visionipc.h"
@ -17,16 +26,14 @@
#define CAMERA_ID_AR0231 8
#define CAMERA_ID_MAX 9
#define UI_BUF_COUNT 4
#define YUV_COUNT 40
#define LOG_CAMERA_ID_FCAMERA 0
#define LOG_CAMERA_ID_DCAMERA 1
#define LOG_CAMERA_ID_ECAMERA 2
#define LOG_CAMERA_ID_QCAMERA 3
#define LOG_CAMERA_ID_MAX 4
#ifdef __cplusplus
extern "C" {
#endif
typedef struct CameraInfo {
const char* name;
int frame_width, frame_height;
@ -62,10 +69,67 @@ typedef struct FrameMetadata {
float gain_frac;
} FrameMetadata;
typedef struct CameraExpInfo {
int op_id;
float grey_frac;
} CameraExpInfo;
extern CameraInfo cameras_supported[CAMERA_ID_MAX];
#ifdef __cplusplus
}
#endif
typedef struct {
uint8_t *y, *u, *v;
} YUVBuf;
#endif
struct MultiCameraState;
struct CameraState;
typedef void (*release_cb)(void *cookie, int buf_idx);
class CameraBuf {
public:
CameraState *camera_state;
cl_kernel krnl_debayer;
cl_command_queue q;
Pool yuv_pool;
VisionBuf yuv_ion[YUV_COUNT];
YUVBuf yuv_bufs[YUV_COUNT];
FrameMetadata yuv_metas[YUV_COUNT];
size_t yuv_buf_size;
int yuv_width, yuv_height;
RGBToYUVState rgb_to_yuv_state;
int rgb_width, rgb_height, rgb_stride;
VisionBuf rgb_bufs[UI_BUF_COUNT];
mat3 yuv_transform;
int cur_yuv_idx, cur_rgb_idx;
FrameMetadata cur_frame_data;
VisionBuf *cur_rgb_buf;
std::unique_ptr<VisionBuf[]> camera_bufs;
std::unique_ptr<FrameMetadata[]> camera_bufs_metadata;
TBuffer camera_tb, ui_tb;
TBuffer *yuv_tb; // only for visionserver
CameraBuf() = default;
~CameraBuf();
void init(cl_device_id device_id, cl_context context, CameraState *s, int frame_cnt,
const char *name = "frame", release_cb relase_callback = nullptr);
bool acquire();
void release();
void stop();
int frame_buf_count;
int frame_size;
};
typedef void (*process_thread_cb)(MultiCameraState *s, CameraState *c, int cnt);
void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, uint32_t cnt);
void create_thumbnail(MultiCameraState *s, CameraState *c, uint8_t *bgr_ptr);
void set_exposure_target(CameraState *c, const uint8_t *pix_ptr, bool front, int x_start, int x_end, int x_skip, int y_start, int y_end, int y_skip);
std::thread start_process_thread(MultiCameraState *cameras, const char *tname,
CameraState *cs, int priority, process_thread_cb callback);
void common_camera_process_front(SubMaster *sm, PubMaster *pm, CameraState *c, int cnt);

View File

@ -23,41 +23,35 @@ extern volatile sig_atomic_t do_exit;
#define FRAME_HEIGHT 874
namespace {
void camera_open(CameraState *s, VisionBuf *camera_bufs, bool rear) {
assert(camera_bufs);
s->camera_bufs = camera_bufs;
void camera_open(CameraState *s, bool rear) {
}
void camera_close(CameraState *s) {
tbuffer_stop(&s->camera_tb);
s->buf.stop();
}
void camera_release_buffer(void *cookie, int buf_idx) {}
void camera_init(CameraState *s, int camera_id, unsigned int fps) {
void camera_init(CameraState *s, int camera_id, unsigned int fps, cl_device_id device_id, cl_context ctx) {
assert(camera_id < ARRAYSIZE(cameras_supported));
s->ci = cameras_supported[camera_id];
assert(s->ci.frame_width != 0);
s->frame_size = s->ci.frame_height * s->ci.frame_stride;
s->fps = fps;
tbuffer_init2(&s->camera_tb, FRAME_BUF_COUNT, "frame", camera_release_buffer, s);
s->buf.init(device_id, ctx, s, FRAME_BUF_COUNT, "camera");
}
void run_frame_stream(MultiCameraState *s) {
SubMaster sm({"frame"});
s->sm = new SubMaster({"frame"});
CameraState *const rear_camera = &s->rear;
auto *tb = &rear_camera->camera_tb;
auto *tb = &rear_camera->buf.camera_tb;
while (!do_exit) {
if (sm.update(1000) == 0) continue;
if (s->sm->update(1000) == 0) continue;
auto frame = sm["frame"].getFrame();
auto frame = (*(s->sm))["frame"].getFrame();
const int buf_idx = tbuffer_select(tb);
rear_camera->camera_bufs_metadata[buf_idx] = {
rear_camera->buf.camera_bufs_metadata[buf_idx] = {
.frame_id = frame.getFrameId(),
.timestamp_eof = frame.getTimestampEof(),
.frame_length = static_cast<unsigned>(frame.getFrameLength()),
@ -65,8 +59,8 @@ void run_frame_stream(MultiCameraState *s) {
.global_gain = static_cast<unsigned>(frame.getGlobalGain()),
};
cl_command_queue q = rear_camera->camera_bufs[buf_idx].copy_q;
cl_mem yuv_cl = rear_camera->camera_bufs[buf_idx].buf_cl;
cl_command_queue q = rear_camera->buf.camera_bufs[buf_idx].copy_q;
cl_mem yuv_cl = rear_camera->buf.camera_bufs[buf_idx].buf_cl;
clEnqueueWriteBuffer(q, yuv_cl, CL_TRUE, 0, frame.getImage().size(), frame.getImage().begin(), 0, NULL, NULL);
tbuffer_dispatch(tb, buf_idx);
@ -93,15 +87,15 @@ CameraInfo cameras_supported[CAMERA_ID_MAX] = {
},
};
void cameras_init(MultiCameraState *s) {
camera_init(&s->rear, CAMERA_ID_IMX298, 20);
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
camera_init(&s->rear, CAMERA_ID_IMX298, 20, device_id, ctx);
s->rear.transform = (mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
}};
camera_init(&s->front, CAMERA_ID_OV8865, 10);
camera_init(&s->front, CAMERA_ID_OV8865, 10, device_id, ctx);
s->front.transform = (mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
@ -111,25 +105,27 @@ void cameras_init(MultiCameraState *s) {
void camera_autoexposure(CameraState *s, float grey_frac) {}
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear,
VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats,
VisionBuf *camera_bufs_front) {
assert(camera_bufs_rear);
assert(camera_bufs_front);
void cameras_open(MultiCameraState *s) {
// LOG("*** open front ***");
camera_open(&s->front, camera_bufs_front, false);
camera_open(&s->front, false);
// LOG("*** open rear ***");
camera_open(&s->rear, camera_bufs_rear, true);
camera_open(&s->rear, true);
}
void cameras_close(MultiCameraState *s) {
camera_close(&s->rear);
}
// called by processing_thread
void camera_process_rear(MultiCameraState *s, CameraState *c, int cnt) {
// empty
}
void cameras_run(MultiCameraState *s) {
std::thread t = start_process_thread(s, "processing", &s->rear, 51, camera_process_rear);
set_thread_name("frame_streaming");
run_frame_stream(s);
cameras_close(s);
t.join();
}

View File

@ -1,5 +1,4 @@
#ifndef CAMERA_FRAME_STREAM_H
#define CAMERA_FRAME_STREAM_H
#pragma once
#include <stdbool.h>
@ -10,50 +9,35 @@
#include <CL/cl.h>
#endif
#include "common/mat.h"
#include "buffering.h"
#include "common/visionbuf.h"
#include "camera_common.h"
#define FRAME_BUF_COUNT 16
#ifdef __cplusplus
extern "C" {
#endif
typedef struct CameraState {
int camera_id;
CameraInfo ci;
int frame_size;
VisionBuf *camera_bufs;
FrameMetadata camera_bufs_metadata[FRAME_BUF_COUNT];
TBuffer camera_tb;
int fps;
float digital_gain;
float cur_gain_frac;
mat3 transform;
} CameraState;
CameraBuf buf;
} CameraState;
typedef struct MultiCameraState {
int ispif_fd;
CameraState rear;
CameraState front;
SubMaster *sm;
PubMaster *pm;
} MultiCameraState;
void cameras_init(MultiCameraState *s);
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front);
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx);
void cameras_open(MultiCameraState *s);
void cameras_run(MultiCameraState *s);
void cameras_close(MultiCameraState *s);
void camera_autoexposure(CameraState *s, float grey_frac);
#ifdef __cplusplus
} // extern "C"
#endif
#endif

View File

@ -4,8 +4,10 @@
#include <assert.h>
#include <unistd.h>
#include <fcntl.h>
#include <math.h>
#include <poll.h>
#include <sys/ioctl.h>
#include <atomic>
#include <linux/media.h>
@ -22,6 +24,7 @@
#include "common/timing.h"
#include "common/swaglog.h"
#include "common/params.h"
#include "clutil.h"
#include "cereal/gen/cpp/log.capnp.h"
@ -30,21 +33,12 @@
#include "camera_qcom.h"
// enable this to run the camera at 60fps and sample every third frame
// supposed to reduce 33ms of lag, but no results
//#define HIGH_FPS
#define CAMERA_MSG_AUTOEXPOSE 0
typedef struct CameraMsg {
int type;
int camera_num;
float grey_frac;
} CameraMsg;
extern volatile sig_atomic_t do_exit;
// global var for AE/AF ops
std::atomic<CameraExpInfo> rear_exp{{0}};
std::atomic<CameraExpInfo> front_exp{{0}};
CameraInfo cameras_supported[CAMERA_ID_MAX] = {
[CAMERA_ID_IMX298] = {
.frame_width = 2328,
@ -106,14 +100,13 @@ static void camera_release_buffer(void* cookie, int buf_idx) {
static void camera_init(CameraState *s, int camera_id, int camera_num,
uint32_t pixel_clock, uint32_t line_length_pclk,
unsigned int max_gain, unsigned int fps) {
unsigned int max_gain, unsigned int fps, cl_device_id device_id, cl_context ctx) {
s->camera_num = camera_num;
s->camera_id = camera_id;
assert(camera_id < ARRAYSIZE(cameras_supported));
s->ci = cameras_supported[camera_id];
assert(s->ci.frame_width != 0);
s->frame_size = s->ci.frame_height * s->ci.frame_stride;
s->pixel_clock = pixel_clock;
s->line_length_pclk = line_length_pclk;
@ -122,12 +115,7 @@ static void camera_init(CameraState *s, int camera_id, int camera_num,
s->self_recover = 0;
s->ops_sock = zsock_new_push(">inproc://cameraops");
assert(s->ops_sock);
s->ops_sock_handle = zsock_resolve(s->ops_sock);
tbuffer_init2(&s->camera_tb, FRAME_BUF_COUNT, "frame",
camera_release_buffer, s);
s->buf.init(device_id, ctx, s, FRAME_BUF_COUNT, "frame", camera_release_buffer);
pthread_mutex_init(&s->frame_info_lock, NULL);
}
@ -233,12 +221,6 @@ static int imx179_s5k3p8sp_apply_exposure(CameraState *s, int gain, int integ_li
//printf("front camera: %d %d %d\n", gain, integ_lines, frame_length);
int err;
if (gain > 448) {
s->digital_gain = (512.0/(512-(gain))) / 8.0;
} else {
s->digital_gain = 1.0;
}
struct msm_camera_i2c_reg_array reg_array[] = {
{0x104,0x1,0},
@ -258,7 +240,18 @@ static int imx179_s5k3p8sp_apply_exposure(CameraState *s, int gain, int integ_li
return err;
}
void cameras_init(MultiCameraState *s) {
cl_program build_conv_program(cl_device_id device_id, cl_context context, int image_w, int image_h, int filter_size) {
char args[4096];
snprintf(args, sizeof(args),
"-cl-fast-relaxed-math -cl-denorms-are-zero "
"-DIMAGE_W=%d -DIMAGE_H=%d -DFLIP_RB=%d "
"-DFILTER_SIZE=%d -DHALF_FILTER_SIZE=%d -DTWICE_HALF_FILTER_SIZE=%d -DHALF_FILTER_SIZE_IMAGE_W=%d",
image_w, image_h, 1,
filter_size, filter_size/2, (filter_size/2)*2, (filter_size/2)*image_w);
return CLU_LOAD_FROM_FILE(context, device_id, "imgproc/conv.cl", args);
}
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
char project_name[1024] = {0};
property_get("ro.boot.project_name", project_name, "");
@ -296,29 +289,29 @@ void cameras_init(MultiCameraState *s) {
camera_init(&s->rear, CAMERA_ID_IMX298, 0,
/*pixel_clock=*/600000000, /*line_length_pclk=*/5536,
/*max_gain=*/510, //0 (ISO 100)- 448 (ISO 800, max analog gain) - 511 (super noisy)
/*max_gain=*/510, //0 (ISO 100)- 448 (ISO 800, max analog gain) - 511 (super noisy)
#ifdef HIGH_FPS
/*fps*/60
/*fps*/ 60,
#else
/*fps*/20
/*fps*/ 20,
#endif
);
device_id, ctx);
s->rear.apply_exposure = imx298_apply_exposure;
if (s->device == DEVICE_OP3T) {
camera_init(&s->front, CAMERA_ID_S5K3P8SP, 1,
/*pixel_clock=*/560000000, /*line_length_pclk=*/5120,
/*max_gain=*/510, 10);
/*max_gain=*/510, 10, device_id, ctx);
s->front.apply_exposure = imx179_s5k3p8sp_apply_exposure;
} else if (s->device == DEVICE_LP3) {
camera_init(&s->front, CAMERA_ID_OV8865, 1,
/*pixel_clock=*/72000000, /*line_length_pclk=*/1602,
/*max_gain=*/510, 10);
/*max_gain=*/510, 10, device_id, ctx);
s->front.apply_exposure = ov8865_apply_exposure;
} else {
camera_init(&s->front, CAMERA_ID_IMX179, 1,
/*pixel_clock=*/251200000, /*line_length_pclk=*/3440,
/*max_gain=*/224, 20);
/*max_gain=*/224, 20, device_id, ctx);
s->front.apply_exposure = imx179_s5k3p8sp_apply_exposure;
}
@ -338,6 +331,40 @@ void cameras_init(MultiCameraState *s) {
s->rear.device = s->device;
s->front.device = s->device;
s->sm = new SubMaster({"driverState", "sensorEvents"});
s->pm = new PubMaster({"frame", "frontFrame", "thumbnail"});
int err;
const int rgb_width = s->rear.buf.rgb_width;
const int rgb_height = s->rear.buf.rgb_height;
for (int i = 0; i < FRAME_BUF_COUNT; i++) {
// TODO: make lengths correct
s->focus_bufs[i] = visionbuf_allocate(0xb80);
s->stats_bufs[i] = visionbuf_allocate(0xb80);
}
s->prg_rgb_laplacian = build_conv_program(device_id, ctx, rgb_width/NUM_SEGMENTS_X, rgb_height/NUM_SEGMENTS_Y, 3);
s->krnl_rgb_laplacian = clCreateKernel(s->prg_rgb_laplacian, "rgb2gray_conv2d", &err);
assert(err == 0);
// TODO: Removed CL_MEM_SVM_FINE_GRAIN_BUFFER, confirm it doesn't matter
s->rgb_conv_roi_cl = clCreateBuffer(ctx, CL_MEM_READ_WRITE,
rgb_width/NUM_SEGMENTS_X * rgb_height/NUM_SEGMENTS_Y * 3 * sizeof(uint8_t), NULL, NULL);
s->rgb_conv_result_cl = clCreateBuffer(ctx, CL_MEM_READ_WRITE,
rgb_width/NUM_SEGMENTS_X * rgb_height/NUM_SEGMENTS_Y * sizeof(int16_t), NULL, NULL);
s->rgb_conv_filter_cl = clCreateBuffer(ctx, CL_MEM_READ_ONLY | CL_MEM_COPY_HOST_PTR,
9 * sizeof(int16_t), (void*)&lapl_conv_krnl, NULL);
s->conv_cl_localMemSize = ( CONV_LOCAL_WORKSIZE + 2 * (3 / 2) ) * ( CONV_LOCAL_WORKSIZE + 2 * (3 / 2) );
s->conv_cl_localMemSize *= 3 * sizeof(uint8_t);
s->conv_cl_globalWorkSize[0] = rgb_width/NUM_SEGMENTS_X;
s->conv_cl_globalWorkSize[1] = rgb_height/NUM_SEGMENTS_Y;
s->conv_cl_localWorkSize[0] = CONV_LOCAL_WORKSIZE;
s->conv_cl_localWorkSize[1] = CONV_LOCAL_WORKSIZE;
for (int i=0; i<ARRAYSIZE(s->lapres); i++) {s->lapres[i] = 16160;}
const size_t size = (rgb_width/NUM_SEGMENTS_X)*(rgb_height/NUM_SEGMENTS_Y);
s->rgb_roi_buf = std::make_unique<uint8_t[]>(size*3);
s->conv_result = std::make_unique<int16_t[]>(size);
}
static void set_exposure(CameraState *s, float exposure_frac, float gain_frac) {
@ -442,21 +469,10 @@ static void do_autoexposure(CameraState *s, float grey_frac) {
}
}
void camera_autoexposure(CameraState *s, float grey_frac) {
CameraMsg msg = {
.type = CAMERA_MSG_AUTOEXPOSE,
.camera_num = s->camera_num,
.grey_frac = grey_frac,
};
zmq_send(s->ops_sock_handle, &msg, sizeof(msg), ZMQ_DONTWAIT);
}
static uint8_t* get_eeprom(int eeprom_fd, size_t *out_len) {
int err;
struct msm_eeprom_cfg_data cfg;
memset(&cfg, 0, sizeof(struct msm_eeprom_cfg_data));
struct msm_eeprom_cfg_data cfg = {};
cfg.cfgtype = CFG_EEPROM_GET_CAL_DATA;
err = ioctl(eeprom_fd, VIDIOC_MSM_EEPROM_CFG, &cfg);
assert(err >= 0);
@ -556,8 +572,7 @@ static void sensors_init(MultiCameraState *s) {
}
assert(sensorinit_fd >= 0);
struct sensor_init_cfg_data sensor_init_cfg;
memset(&sensor_init_cfg, 0, sizeof(struct sensor_init_cfg_data));
struct sensor_init_cfg_data sensor_init_cfg = {};
// init rear sensor
@ -1073,31 +1088,17 @@ static void sensors_init(MultiCameraState *s) {
static void camera_open(CameraState *s, bool rear) {
int err;
struct sensorb_cfg_data sensorb_cfg_data;
memset(&sensorb_cfg_data, 0, sizeof(struct sensorb_cfg_data));
struct csid_cfg_data csid_cfg_data;
memset(&csid_cfg_data, 0, sizeof(struct csid_cfg_data));
struct csiphy_cfg_data csiphy_cfg_data;
memset(&csiphy_cfg_data, 0, sizeof(struct csiphy_cfg_data));
struct msm_camera_csiphy_params csiphy_params;
memset(&csiphy_params, 0, sizeof(struct msm_camera_csiphy_params));
struct msm_camera_csid_params csid_params;
memset(&csid_params, 0, sizeof(struct msm_camera_csid_params));
struct msm_vfe_input_cfg input_cfg;
memset(&input_cfg, 0, sizeof(struct msm_vfe_input_cfg));
struct msm_vfe_axi_stream_update_cmd update_cmd;
memset(&update_cmd, 0, sizeof(struct msm_vfe_axi_stream_update_cmd));
struct v4l2_event_subscription sub;
memset(&sub, 0, sizeof(struct v4l2_event_subscription));
struct ispif_cfg_data ispif_cfg_data;
memset(&ispif_cfg_data, 0, sizeof(struct ispif_cfg_data));
struct msm_vfe_cfg_cmd_list cfg_cmd_list;
memset(&cfg_cmd_list, 0, sizeof(struct msm_vfe_cfg_cmd_list));
struct sensorb_cfg_data sensorb_cfg_data = {};
struct csid_cfg_data csid_cfg_data = {};
struct csiphy_cfg_data csiphy_cfg_data = {};
struct msm_camera_csiphy_params csiphy_params = {};
struct msm_camera_csid_params csid_params = {};
struct msm_vfe_input_cfg input_cfg = {};
struct msm_vfe_axi_stream_update_cmd update_cmd = {};
struct v4l2_event_subscription sub = {};
struct msm_actuator_cfg_data actuator_cfg_data;
memset(&actuator_cfg_data, 0, sizeof(struct msm_actuator_cfg_data));
struct msm_ois_cfg_data ois_cfg_data;
memset(&ois_cfg_data, 0, sizeof(struct msm_ois_cfg_data));
struct msm_actuator_cfg_data actuator_cfg_data = {};
struct msm_ois_cfg_data ois_cfg_data = {};
// open devices
const char *sensor_dev;
@ -1817,6 +1818,19 @@ static void do_autofocus(CameraState *s) {
actuator_move(s, target);
}
void camera_autoexposure(CameraState *s, float grey_frac) {
if (s->camera_num == 0) {
CameraExpInfo tmp = rear_exp.load();
tmp.op_id++;
tmp.grey_frac = grey_frac;
rear_exp.store(tmp);
} else {
CameraExpInfo tmp = front_exp.load();
tmp.op_id++;
tmp.grey_frac = grey_frac;
front_exp.store(tmp);
}
}
static void front_start(CameraState *s) {
int err;
@ -1827,16 +1841,10 @@ static void front_start(CameraState *s) {
LOG("sensor start regs: %d", err);
}
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front) {
void cameras_open(MultiCameraState *s) {
int err;
struct ispif_cfg_data ispif_cfg_data;
memset(&ispif_cfg_data, 0, sizeof(struct ispif_cfg_data));
struct msm_ispif_param_data ispif_params;
memset(&ispif_params, 0, sizeof(struct msm_ispif_param_data));
struct ispif_cfg_data ispif_cfg_data = {};
struct msm_ispif_param_data ispif_params = {};
ispif_params.num = 4;
// rear camera
ispif_params.entries[0].vfe_intf = VFE0;
@ -1863,9 +1871,6 @@ void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *c
ispif_params.entries[3].cids[0] = CID2;
ispif_params.entries[3].csid = CSID0;
assert(camera_bufs_rear);
assert(camera_bufs_front);
s->msmcfg_fd = open("/dev/media0", O_RDWR | O_NONBLOCK);
assert(s->msmcfg_fd >= 0);
@ -1889,13 +1894,13 @@ void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *c
// LOG("ispif stop: %d", err);
LOG("*** open front ***");
s->front.ss[0].bufs = camera_bufs_front;
s->front.ss[0].bufs = s->front.buf.camera_bufs.get();
camera_open(&s->front, false);
LOG("*** open rear ***");
s->rear.ss[0].bufs = camera_bufs_rear;
s->rear.ss[1].bufs = camera_bufs_focus;
s->rear.ss[2].bufs = camera_bufs_stats;
s->rear.ss[0].bufs = s->rear.buf.camera_bufs.get();
s->rear.ss[1].bufs = s->focus_bufs;
s->rear.ss[2].bufs = s->stats_bufs;
camera_open(&s->rear, true);
if (getenv("CAMERA_TEST")) {
@ -1936,7 +1941,7 @@ void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *c
static void camera_close(CameraState *s) {
int err;
tbuffer_stop(&s->camera_tb);
s->buf.stop();
// ISP: STOP_STREAM
s->stream_cfg.cmd = STOP_STREAM;
@ -1958,8 +1963,6 @@ static void camera_close(CameraState *s) {
}
free(s->eeprom);
zsock_destroy(&s->ops_sock);
}
@ -2006,107 +2009,152 @@ static FrameMetadata get_frame_metadata(CameraState *s, uint32_t frame_id) {
};
}
static void ops_term() {
zsock_t *ops_sock = zsock_new_push(">inproc://cameraops");
assert(ops_sock);
CameraMsg msg = {.type = -1};
zmq_send(zsock_resolve(ops_sock), &msg, sizeof(msg), ZMQ_DONTWAIT);
zsock_destroy(&ops_sock);
}
static void* ops_thread(void* arg) {
int err;
MultiCameraState *s = (MultiCameraState*)arg;
int rear_op_id_last = 0;
int front_op_id_last = 0;
CameraExpInfo rear_op;
CameraExpInfo front_op;
set_thread_name("camera_settings");
zsock_t *cameraops = zsock_new_pull("@inproc://cameraops");
assert(cameraops);
while(!do_exit) {
rear_op = rear_exp.load();
if (rear_op.op_id != rear_op_id_last) {
do_autoexposure(&s->rear, rear_op.grey_frac);
do_autofocus(&s->rear);
rear_op_id_last = rear_op.op_id;
}
zsock_t *terminate = zsock_new_sub(">inproc://terminate", "");
assert(terminate);
front_op = front_exp.load();
if (front_op.op_id != front_op_id_last) {
do_autoexposure(&s->front, front_op.grey_frac);
front_op_id_last = front_op.op_id;
}
zpoller_t *poller = zpoller_new(cameraops, terminate, NULL);
assert(poller);
usleep(50000);
}
SubMaster sm({"sensorEvents"}); // msgq submaster
return NULL;
}
while (!do_exit) {
// zmq handling
zsock_t *which = (zsock_t*)zpoller_wait(poller, -1);
if (which == terminate) {
break;
} else if (which != NULL) {
void* sockraw = zsock_resolve(which);
void camera_process_front(MultiCameraState *s, CameraState *c, int cnt) {
common_camera_process_front(s->sm, s->pm, c, cnt);
}
if (which == cameraops) {
zmq_msg_t msg;
err = zmq_msg_init(&msg);
assert(err == 0);
// called by processing_thread
void camera_process_frame(MultiCameraState *s, CameraState *c, int cnt) {
const CameraBuf *b = &c->buf;
// cache rgb roi and write to cl
err = zmq_msg_recv(&msg, sockraw, 0);
if (err >= 0) {
CameraMsg cmsg;
if (zmq_msg_size(&msg) == sizeof(cmsg)) {
memcpy(&cmsg, zmq_msg_data(&msg), zmq_msg_size(&msg));
//LOGD("cameraops %d", cmsg.type);
if (cmsg.type == CAMERA_MSG_AUTOEXPOSE) {
if (cmsg.camera_num == 0) {
do_autoexposure(&s->rear, cmsg.grey_frac);
do_autofocus(&s->rear);
} else {
do_autoexposure(&s->front, cmsg.grey_frac);
}
} else if (cmsg.type == -1) {
break;
}
}
} else {
// skip if zmq is interrupted by msgq
int err_no = zmq_errno();
assert(err_no == EINTR || err_no == EAGAIN);
// gz compensation
s->sm->update(0);
if (s->sm->updated("sensorEvents")) {
float vals[3] = {0.0};
bool got_accel = false;
auto sensor_events = (*(s->sm))["sensorEvents"].getSensorEvents();
for (auto sensor_event : sensor_events) {
if (sensor_event.which() == cereal::SensorEventData::ACCELERATION) {
auto v = sensor_event.getAcceleration().getV();
if (v.size() < 3) {
continue;
}
zmq_msg_close(&msg);
for (int j = 0; j < 3; j++) {
vals[j] = v[j];
}
got_accel = true;
break;
}
}
// msgq handling
if (sm.update(0) > 0) {
float vals[3] = {0.0};
bool got_accel = false;
auto sensor_events = sm["sensorEvents"].getSensorEvents();
for (auto sensor_event : sensor_events) {
if (sensor_event.which() == cereal::SensorEventData::ACCELERATION) {
auto v = sensor_event.getAcceleration().getV();
if (v.size() < 3) {
continue; //wtf
}
for (int j = 0; j < 3; j++) {
vals[j] = v[j];
}
got_accel = true;
break;
}
}
uint64_t ts = nanos_since_boot();
if (got_accel && ts - s->rear.last_sag_ts > 10000000) { // 10 ms
s->rear.last_sag_ts = ts;
s->rear.last_sag_acc_z = -vals[2];
}
uint64_t ts = nanos_since_boot();
if (got_accel && ts - s->rear.last_sag_ts > 10000000) { // 10 ms
s->rear.last_sag_ts = ts;
s->rear.last_sag_acc_z = -vals[2];
}
}
zpoller_destroy(&poller);
zsock_destroy(&cameraops);
zsock_destroy(&terminate);
// sharpness scores
int roi_id = cnt % ARRAYSIZE(s->lapres); // rolling roi
int roi_x_offset = roi_id % (ROI_X_MAX-ROI_X_MIN+1);
int roi_y_offset = roi_id / (ROI_X_MAX-ROI_X_MIN+1);
return NULL;
for (int r=0;r<(b->rgb_height/NUM_SEGMENTS_Y);r++) {
memcpy(s->rgb_roi_buf.get() + r * (b->rgb_width/NUM_SEGMENTS_X) * 3,
(uint8_t *) b->cur_rgb_buf->addr + \
(ROI_Y_MIN + roi_y_offset) * b->rgb_height/NUM_SEGMENTS_Y * FULL_STRIDE_X * 3 + \
(ROI_X_MIN + roi_x_offset) * b->rgb_width/NUM_SEGMENTS_X * 3 + r * FULL_STRIDE_X * 3,
b->rgb_width/NUM_SEGMENTS_X * 3);
}
assert(clEnqueueWriteBuffer(b->q, s->rgb_conv_roi_cl, true, 0,
b->rgb_width / NUM_SEGMENTS_X * b->rgb_height / NUM_SEGMENTS_Y * 3 * sizeof(uint8_t), s->rgb_roi_buf.get(), 0, 0, 0) == 0);
assert(clSetKernelArg(s->krnl_rgb_laplacian, 0, sizeof(cl_mem), (void *)&s->rgb_conv_roi_cl) == 0);
assert(clSetKernelArg(s->krnl_rgb_laplacian, 1, sizeof(cl_mem), (void *)&s->rgb_conv_result_cl) == 0);
assert(clSetKernelArg(s->krnl_rgb_laplacian, 2, sizeof(cl_mem), (void *)&s->rgb_conv_filter_cl) == 0);
assert(clSetKernelArg(s->krnl_rgb_laplacian, 3, s->conv_cl_localMemSize, 0) == 0);
cl_event conv_event;
assert(clEnqueueNDRangeKernel(b->q, s->krnl_rgb_laplacian, 2, NULL,
s->conv_cl_globalWorkSize, s->conv_cl_localWorkSize, 0, 0, &conv_event) == 0);
clWaitForEvents(1, &conv_event);
clReleaseEvent(conv_event);
assert(clEnqueueReadBuffer(b->q, s->rgb_conv_result_cl, true, 0,
b->rgb_width / NUM_SEGMENTS_X * b->rgb_height / NUM_SEGMENTS_Y * sizeof(int16_t), s->conv_result.get(), 0, 0, 0) == 0);
get_lapmap_one(s->conv_result.get(), &s->lapres[roi_id], b->rgb_width / NUM_SEGMENTS_X, b->rgb_height / NUM_SEGMENTS_Y);
// setup self recover
const float lens_true_pos = s->rear.lens_true_pos;
std::atomic<int>& self_recover = s->rear.self_recover;
if (is_blur(&s->lapres[0]) &&
(lens_true_pos < (s->rear.device == DEVICE_LP3 ? LP3_AF_DAC_DOWN : OP3T_AF_DAC_DOWN) + 1 ||
lens_true_pos > (s->rear.device == DEVICE_LP3 ? LP3_AF_DAC_UP : OP3T_AF_DAC_UP) - 1) &&
self_recover < 2) {
// truly stuck, needs help
self_recover -= 1;
if (self_recover < -FOCUS_RECOVER_PATIENCE) {
LOGD("rear camera bad state detected. attempting recovery from %.1f, recover state is %d",
lens_true_pos, self_recover.load());
self_recover = FOCUS_RECOVER_STEPS + ((lens_true_pos < (s->rear.device == DEVICE_LP3 ? LP3_AF_DAC_M : OP3T_AF_DAC_M)) ? 1 : 0); // parity determined by which end is stuck at
}
} else if ((lens_true_pos < (s->rear.device == DEVICE_LP3 ? LP3_AF_DAC_M - LP3_AF_DAC_3SIG : OP3T_AF_DAC_M - OP3T_AF_DAC_3SIG) ||
lens_true_pos > (s->rear.device == DEVICE_LP3 ? LP3_AF_DAC_M + LP3_AF_DAC_3SIG : OP3T_AF_DAC_M + OP3T_AF_DAC_3SIG)) &&
self_recover < 2) {
// in suboptimal position with high prob, but may still recover by itself
self_recover -= 1;
if (self_recover < -(FOCUS_RECOVER_PATIENCE * 3)) {
self_recover = FOCUS_RECOVER_STEPS / 2 + ((lens_true_pos < (s->rear.device == DEVICE_LP3 ? LP3_AF_DAC_M : OP3T_AF_DAC_M)) ? 1 : 0);
}
} else if (self_recover < 0) {
self_recover += 1; // reset if fine
}
{
MessageBuilder msg;
auto framed = msg.initEvent().initFrame();
fill_frame_data(framed, b->cur_frame_data, cnt);
framed.setFocusVal(kj::ArrayPtr<const int16_t>(&s->rear.focus[0], NUM_FOCUS));
framed.setFocusConf(kj::ArrayPtr<const uint8_t>(&s->rear.confidence[0], NUM_FOCUS));
framed.setSharpnessScore(kj::ArrayPtr<const uint16_t>(&s->lapres[0], ARRAYSIZE(s->lapres)));
framed.setRecoverState(self_recover);
framed.setTransform(kj::ArrayPtr<const float>(&b->yuv_transform.v[0], 9));
s->pm->send("frame", msg);
}
if (cnt % 100 == 3) {
create_thumbnail(s, c, (uint8_t*)b->cur_rgb_buf->addr);
}
const int exposure_x = 290;
const int exposure_y = 322;
const int exposure_width = 560;
const int exposure_height = 314;
const int skip = 1;
if (cnt % 3 == 0) {
set_exposure_target(c, (const uint8_t *)b->yuv_bufs[b->cur_yuv_idx].y, 0, exposure_x, exposure_x + exposure_width, skip, exposure_y, exposure_y + exposure_height, skip);
}
}
void cameras_run(MultiCameraState *s) {
@ -2116,6 +2164,9 @@ void cameras_run(MultiCameraState *s) {
err = pthread_create(&ops_thread_handle, NULL,
ops_thread, s);
assert(err == 0);
std::vector<std::thread> threads;
threads.push_back(start_process_thread(s, "processing", &s->rear, 51, camera_process_frame));
threads.push_back(start_process_thread(s, "frontview", &s->front, 51, camera_process_front));
CameraState* cameras[2] = {&s->rear, &s->front};
@ -2173,8 +2224,8 @@ void cameras_run(MultiCameraState *s) {
//printf("divert: %d %d %d\n", i, buffer, buf_idx);
if (buffer == 0) {
c->camera_bufs_metadata[buf_idx] = get_frame_metadata(c, isp_event_data->frame_id);
tbuffer_dispatch(&c->camera_tb, buf_idx);
c->buf.camera_bufs_metadata[buf_idx] = get_frame_metadata(c, isp_event_data->frame_id);
tbuffer_dispatch(&c->buf.camera_tb, buf_idx);
} else {
uint8_t *d = (uint8_t*)(c->ss[buffer].bufs[buf_idx].addr);
if (buffer == 1) {
@ -2214,14 +2265,27 @@ void cameras_run(MultiCameraState *s) {
LOG(" ************** STOPPING **************");
ops_term();
err = pthread_join(ops_thread_handle, NULL);
assert(err == 0);
cameras_close(s);
for (auto &t : threads) t.join();
}
void cameras_close(MultiCameraState *s) {
camera_close(&s->rear);
camera_close(&s->front);
for (int i = 0; i < FRAME_BUF_COUNT; i++) {
visionbuf_free(&s->focus_bufs[i]);
visionbuf_free(&s->stats_bufs[i]);
}
clReleaseMemObject(s->rgb_conv_roi_cl);
clReleaseMemObject(s->rgb_conv_result_cl);
clReleaseMemObject(s->rgb_conv_filter_cl);
clReleaseProgram(s->prg_rgb_laplacian);
clReleaseKernel(s->krnl_rgb_laplacian);
delete s->sm;
delete s->pm;
}

View File

@ -3,7 +3,7 @@
#include <stdint.h>
#include <stdbool.h>
#include <pthread.h>
#include <czmq.h>
#include <memory>
#include <atomic>
#include "messaging.hpp"
@ -40,10 +40,6 @@
#define FOCUS_RECOVER_PATIENCE 50 // 2.5 seconds of complete blur
#define FOCUS_RECOVER_STEPS 240 // 6 seconds
#ifdef __cplusplus
extern "C" {
#endif
typedef struct CameraState CameraState;
typedef int (*camera_apply_exposure_func)(CameraState *s, int gain, int integ_lines, int frame_length);
@ -59,13 +55,9 @@ typedef struct CameraState {
int camera_num;
int camera_id;
CameraInfo ci;
int frame_size;
int device;
void* ops_sock_handle;
zsock_t * ops_sock;
uint32_t pixel_clock;
uint32_t line_length_pclk;
unsigned int max_gain;
@ -85,8 +77,6 @@ typedef struct CameraState {
uint8_t *eeprom;
// uint32_t camera_bufs_ids[FRAME_BUF_COUNT];
FrameMetadata camera_bufs_metadata[FRAME_BUF_COUNT];
TBuffer camera_tb;
pthread_mutex_t frame_info_lock;
FrameMetadata frame_metadata[METADATA_BUF_COUNT];
@ -121,6 +111,8 @@ typedef struct CameraState {
int fps;
mat3 transform;
CameraBuf buf;
} CameraState;
@ -131,19 +123,35 @@ typedef struct MultiCameraState {
unique_fd msmcfg_fd;
unique_fd v4l_fd;
cl_mem rgb_conv_roi_cl, rgb_conv_result_cl, rgb_conv_filter_cl;
uint16_t lapres[(ROI_X_MAX-ROI_X_MIN+1)*(ROI_Y_MAX-ROI_Y_MIN+1)];
VisionBuf focus_bufs[FRAME_BUF_COUNT];
VisionBuf stats_bufs[FRAME_BUF_COUNT];
cl_program prg_rgb_laplacian;
cl_kernel krnl_rgb_laplacian;
std::unique_ptr<uint8_t[]> rgb_roi_buf;
std::unique_ptr<int16_t[]> conv_result;
int conv_cl_localMemSize;
size_t conv_cl_globalWorkSize[2];
size_t conv_cl_localWorkSize[2];
CameraState rear;
CameraState front;
SubMaster *sm;
PubMaster *pm;
} MultiCameraState;
void cameras_init(MultiCameraState *s);
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front);
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx);
void cameras_open(MultiCameraState *s);
void cameras_run(MultiCameraState *s);
void cameras_close(MultiCameraState *s);
void camera_autoexposure(CameraState *s, float grey_frac);
void actuator_move(CameraState *s, uint16_t target);
int sensor_write_regs(CameraState *s, struct msm_camera_i2c_reg_array* arr, size_t size, int data_type);
#ifdef __cplusplus
} // extern "C"
#endif

View File

@ -9,6 +9,8 @@
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <poll.h>
#include <math.h>
#include <atomic>
#include "common/util.h"
#include "common/swaglog.h"
@ -23,24 +25,19 @@
#include "sensor2_i2c.h"
#define DEBAYER_LOCAL_WORKSIZE 16
#define FRAME_WIDTH 1928
#define FRAME_HEIGHT 1208
//#define FRAME_STRIDE 1936 // for 8 bit output
#define FRAME_STRIDE 2416 // for 10 bit output
/*
static void hexdump(uint8_t *data, int len) {
for (int i = 0; i < len; i++) {
if (i!=0&&i%0x10==0) printf("\n");
printf("%02X ", data[i]);
}
printf("\n");
}
*/
extern volatile sig_atomic_t do_exit;
// global var for AE ops
std::atomic<CameraExpInfo> cam_exp[3] = {{{0}}};
CameraInfo cameras_supported[CAMERA_ID_MAX] = {
[CAMERA_ID_AR0231] = {
.frame_width = FRAME_WIDTH,
@ -149,7 +146,7 @@ void clear_req_queue(int fd, int32_t session_hdl, int32_t link_hdl) {
void sensors_poke(struct CameraState *s, int request_id) {
uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet);
struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
struct cam_packet *pkt = (struct cam_packet *)alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 1;
pkt->kmd_cmd_buf_index = -1;
@ -175,7 +172,7 @@ void sensors_i2c(struct CameraState *s, struct i2c_random_wr_payload* dat, int l
// LOGD("sensors_i2c: %d", len);
uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1;
struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
struct cam_packet *pkt = (struct cam_packet *)alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 1;
pkt->kmd_cmd_buf_index = -1;
@ -185,8 +182,8 @@ void sensors_i2c(struct CameraState *s, struct i2c_random_wr_payload* dat, int l
buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_random_wr) + (len-1)*sizeof(struct i2c_random_wr_payload);
buf_desc[0].type = CAM_CMD_BUF_I2C;
struct cam_cmd_power *power = alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[0].mem_handle);
struct cam_cmd_i2c_random_wr *i2c_random_wr = (void*)power;
struct cam_cmd_power *power = (struct cam_cmd_power *)alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[0].mem_handle);
struct cam_cmd_i2c_random_wr *i2c_random_wr = (struct cam_cmd_i2c_random_wr *)power;
i2c_random_wr->header.count = len;
i2c_random_wr->header.op_code = 1;
i2c_random_wr->header.cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_WR;
@ -212,7 +209,7 @@ void sensors_i2c(struct CameraState *s, struct i2c_random_wr_payload* dat, int l
void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
uint32_t cam_packet_handle = 0;
int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2;
struct cam_packet *pkt = alloc(video0_fd, size, 8,
struct cam_packet *pkt = (struct cam_packet *)alloc(video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 2;
pkt->kmd_cmd_buf_index = -1;
@ -222,7 +219,7 @@ void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_info) + sizeof(struct cam_cmd_probe);
buf_desc[0].type = CAM_CMD_BUF_LEGACY;
struct cam_cmd_i2c_info *i2c_info = alloc(video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[0].mem_handle);
struct cam_cmd_i2c_info *i2c_info = (struct cam_cmd_i2c_info *)alloc(video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[0].mem_handle);
struct cam_cmd_probe *probe = (struct cam_cmd_probe *)((uint8_t *)i2c_info) + sizeof(struct cam_cmd_i2c_info);
switch (camera_num) {
@ -259,7 +256,7 @@ void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
//buf_desc[1].size = buf_desc[1].length = 148;
buf_desc[1].size = buf_desc[1].length = 196;
buf_desc[1].type = CAM_CMD_BUF_I2C;
struct cam_cmd_power *power = alloc(video0_fd, buf_desc[1].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[1].mem_handle);
struct cam_cmd_power *power = (struct cam_cmd_power *)alloc(video0_fd, buf_desc[1].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[1].mem_handle);
memset(power, 0, buf_desc[1].size);
struct cam_cmd_unconditional_wait *unconditional_wait;
@ -278,39 +275,39 @@ void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
power->power_settings[1].power_seq_type = 1; // analog
power->power_settings[2].power_seq_type = 2; // digital
power->power_settings[3].power_seq_type = 8; // reset low
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
power = (struct cam_cmd_power *)((char*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)));
unconditional_wait = (void*)power;
unconditional_wait = (struct cam_cmd_unconditional_wait *)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 5;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
power = (struct cam_cmd_power *)((char*)power + sizeof(struct cam_cmd_unconditional_wait));
// set clock
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP;
power->power_settings[0].power_seq_type = 0;
power->power_settings[0].config_val_low = 24000000; //Hz
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
power = (struct cam_cmd_power *)((char*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)));
unconditional_wait = (void*)power;
unconditional_wait = (struct cam_cmd_unconditional_wait *)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 10; // ms
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
power = (struct cam_cmd_power *)((char*)power + sizeof(struct cam_cmd_unconditional_wait));
// 8,1 is this reset?
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP;
power->power_settings[0].power_seq_type = 8;
power->power_settings[0].config_val_low = 1;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
power = (struct cam_cmd_power *)((char*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)));
unconditional_wait = (void*)power;
unconditional_wait = (struct cam_cmd_unconditional_wait *)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 100; // ms
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
power = (struct cam_cmd_power *)((char*)power + sizeof(struct cam_cmd_unconditional_wait));
// probe happens here
@ -319,39 +316,39 @@ void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 0;
power->power_settings[0].config_val_low = 0;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
power = (struct cam_cmd_power *)((char*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)));
unconditional_wait = (void*)power;
unconditional_wait = (struct cam_cmd_unconditional_wait *)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 1;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
power = (struct cam_cmd_power *)((char*)power + sizeof(struct cam_cmd_unconditional_wait));
// reset high
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 8;
power->power_settings[0].config_val_low = 1;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
power = (struct cam_cmd_power *)((char*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)));
unconditional_wait = (void*)power;
unconditional_wait = (struct cam_cmd_unconditional_wait *)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 1;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
power = (struct cam_cmd_power *)((char*)power + sizeof(struct cam_cmd_unconditional_wait));
// reset low
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN;
power->power_settings[0].power_seq_type = 8;
power->power_settings[0].config_val_low = 0;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
power = (struct cam_cmd_power *)((char*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)));
unconditional_wait = (void*)power;
unconditional_wait = (struct cam_cmd_unconditional_wait *)power;
unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT;
unconditional_wait->delay = 1;
unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND;
power = (void*)power + sizeof(struct cam_cmd_unconditional_wait);
power = (struct cam_cmd_power *)((char*)power + sizeof(struct cam_cmd_unconditional_wait));
// 7750
/*power->count = 1;
@ -365,7 +362,7 @@ void sensors_init(int video0_fd, int sensor_fd, int camera_num) {
power->power_settings[0].power_seq_type = 2;
power->power_settings[1].power_seq_type = 1;
power->power_settings[2].power_seq_type = 3;
power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));
power = (struct cam_cmd_power *)((char*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)));
LOGD("probing the sensor");
int ret = cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)(uintptr_t)cam_packet_handle, 0);
@ -385,7 +382,7 @@ void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request
if (io_mem_handle != 0) {
size += sizeof(struct cam_buf_io_cfg);
}
struct cam_packet *pkt = alloc(s->video0_fd, size, 8,
struct cam_packet *pkt = (struct cam_packet *)alloc(s->video0_fd, size, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 2;
pkt->kmd_cmd_buf_index = 0;
@ -403,7 +400,7 @@ void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request
}
pkt->header.size = size;
struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload;
struct cam_buf_io_cfg *io_cfg = (void*)&pkt->payload + pkt->io_configs_offset;
struct cam_buf_io_cfg *io_cfg = (struct cam_buf_io_cfg *)((char*)&pkt->payload + pkt->io_configs_offset);
// TODO: support MMU
buf_desc[0].size = 65624;
@ -422,7 +419,7 @@ void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request
}
buf_desc[1].type = CAM_CMD_BUF_GENERIC;
buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON;
uint32_t *buf2 = alloc(s->video0_fd, buf_desc[1].size, 0x20, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[1].mem_handle);
uint32_t *buf2 = (uint32_t *)alloc(s->video0_fd, buf_desc[1].size, 0x20, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[1].mem_handle);
// cam_isp_packet_generic_blob_handler
uint32_t tmp[] = {
@ -533,7 +530,7 @@ void enqueue_buffer(struct CameraState *s, int i) {
mem_mgr_map_cmd.mmu_hdls[0] = s->device_iommu;
mem_mgr_map_cmd.num_hdl = 1;
mem_mgr_map_cmd.flags = 1;
mem_mgr_map_cmd.fd = s->bufs[i].fd;
mem_mgr_map_cmd.fd = s->buf.camera_bufs[i].fd;
ret = cam_control(s->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd));
// LOGD("map buf req: (fd: %d) 0x%x %d", s->bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret);
s->buf_handle[i] = mem_mgr_map_cmd.out.buf_handle;
@ -555,7 +552,7 @@ void enqueue_req_multi(struct CameraState *s, int start, int n) {
// ******************* camera *******************
static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned int fps) {
static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned int fps, cl_device_id device_id, cl_context ctx) {
LOGD("camera init %d", camera_num);
assert(camera_id < ARRAYSIZE(cameras_supported));
@ -563,9 +560,6 @@ static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned
assert(s->ci.frame_width != 0);
s->camera_num = camera_num;
s->frame_size = s->ci.frame_height * s->ci.frame_stride;
tbuffer_init2(&s->camera_tb, FRAME_BUF_COUNT, "frame", NULL, s);
s->transform = (mat3){{
1.0, 0.0, 0.0,
@ -582,12 +576,18 @@ static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned
s->request_id_last = 0;
s->skipped = true;
s->ef_filtered = 1.0;
s->debayer_cl_localMemSize = (DEBAYER_LOCAL_WORKSIZE + 2 * (3 / 2)) * (DEBAYER_LOCAL_WORKSIZE + 2 * (3 / 2)) * sizeof(float);
s->debayer_cl_globalWorkSize[0] = s->ci.frame_width;
s->debayer_cl_globalWorkSize[1] = s->ci.frame_height;
s->debayer_cl_localWorkSize[0] = DEBAYER_LOCAL_WORKSIZE;
s->debayer_cl_localWorkSize[1] = DEBAYER_LOCAL_WORKSIZE;
s->buf.init(device_id, ctx, s, FRAME_BUF_COUNT, "frame");
}
static void camera_open(CameraState *s, VisionBuf* b) {
static void camera_open(CameraState *s) {
int ret;
s->bufs = b;
// /dev/v4l-subdev10 is sensor, 11, 12, 13 are the other sensors
switch (s->camera_num) {
case 0:
@ -646,7 +646,7 @@ static void camera_open(CameraState *s, VisionBuf* b) {
isp_resource.length = sizeof(struct cam_isp_in_port_info) + sizeof(struct cam_isp_out_port_info)*(1-1);
isp_resource.handle_type = CAM_HANDLE_USER_POINTER;
struct cam_isp_in_port_info *in_port_info = malloc(isp_resource.length);
struct cam_isp_in_port_info *in_port_info = (struct cam_isp_in_port_info *)malloc(isp_resource.length);
isp_resource.res_hdl = (uint64_t)in_port_info;
switch (s->camera_num) {
@ -739,7 +739,7 @@ static void camera_open(CameraState *s, VisionBuf* b) {
LOG("-- Config CSI PHY");
{
uint32_t cam_packet_handle = 0;
struct cam_packet *pkt = alloc(s->video0_fd, sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1, 8,
struct cam_packet *pkt = (struct cam_packet *)alloc(s->video0_fd, sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1, 8,
CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle);
pkt->num_cmd_buf = 1;
pkt->kmd_cmd_buf_index = -1;
@ -747,7 +747,7 @@ static void camera_open(CameraState *s, VisionBuf* b) {
buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_csiphy_info);
buf_desc[0].type = CAM_CMD_BUF_GENERIC;
struct cam_csiphy_info *csiphy_info = alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[0].mem_handle);
struct cam_csiphy_info *csiphy_info = (struct cam_csiphy_info *)alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, (uint32_t*)&buf_desc[0].mem_handle);
csiphy_info->lane_mask = 0x1f;
csiphy_info->lane_assign = 0x3210;// skip clk. How is this 16 bit for 5 channels??
@ -800,21 +800,24 @@ static void camera_open(CameraState *s, VisionBuf* b) {
enqueue_req_multi(s, 1, FRAME_BUF_COUNT);
}
void cameras_init(MultiCameraState *s) {
camera_init(&s->rear, CAMERA_ID_AR0231, 1, 20); // swap left/right
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
camera_init(&s->rear, CAMERA_ID_AR0231, 1, 20, device_id, ctx); // swap left/right
printf("rear initted \n");
camera_init(&s->wide, CAMERA_ID_AR0231, 0, 20);
camera_init(&s->wide, CAMERA_ID_AR0231, 0, 20, device_id, ctx);
printf("wide initted \n");
camera_init(&s->front, CAMERA_ID_AR0231, 2, 20);
camera_init(&s->front, CAMERA_ID_AR0231, 2, 20, device_id, ctx);
printf("front initted \n");
#ifdef NOSCREEN
zsock_t *rgb_sock = zsock_new_push("tcp://192.168.3.4:7768");
assert(rgb_sock);
s->rgb_sock = rgb_sock;
#endif
s->sm = new SubMaster({"driverState"});
s->pm = new PubMaster({"frame", "frontFrame", "wideFrame", "thumbnail"});
}
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front, VisionBuf *camera_bufs_wide) {
void cameras_open(MultiCameraState *s) {
int ret;
LOG("-- Opening devices");
@ -860,13 +863,12 @@ void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *c
ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
printf("req mgr subscribe: %d\n", ret);
camera_open(&s->rear, camera_bufs_rear);
camera_open(&s->rear);
printf("rear opened \n");
camera_open(&s->wide, camera_bufs_wide);
camera_open(&s->wide);
printf("wide opened \n");
camera_open(&s->front, camera_bufs_front);
camera_open(&s->front);
printf("front opened \n");
// TODO: refactor this api for compat
}
static void camera_close(CameraState *s) {
@ -909,7 +911,7 @@ static void camera_close(CameraState *s) {
ret = cam_control(s->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info));
LOGD("destroyed session: %d", ret);
tbuffer_stop(&s->camera_tb);
s->buf.stop();
}
static void cameras_close(MultiCameraState *s) {
@ -919,8 +921,12 @@ static void cameras_close(MultiCameraState *s) {
#ifdef NOSCREEN
zsock_destroy(&s->rgb_sock);
#endif
delete s->sm;
delete s->pm;
}
// ******************* just a helper *******************
void handle_camera_event(CameraState *s, void *evdat) {
struct cam_req_mgr_message *event_data = (struct cam_req_mgr_message *)evdat;
@ -950,15 +956,15 @@ void handle_camera_event(CameraState *s, void *evdat) {
// metas
s->frame_id_last = main_id;
s->request_id_last = real_id;
s->camera_bufs_metadata[buf_idx].frame_id = main_id - s->idx_offset;
s->camera_bufs_metadata[buf_idx].timestamp_eof = timestamp; // only has sof?
s->camera_bufs_metadata[buf_idx].global_gain = s->analog_gain + (100*s->dc_gain_enabled);
s->camera_bufs_metadata[buf_idx].gain_frac = s->analog_gain_frac;
s->camera_bufs_metadata[buf_idx].integ_lines = s->exposure_time;
s->buf.camera_bufs_metadata[buf_idx].frame_id = main_id - s->idx_offset;
s->buf.camera_bufs_metadata[buf_idx].timestamp_eof = timestamp; // only has sof?
s->buf.camera_bufs_metadata[buf_idx].global_gain = s->analog_gain + (100*s->dc_gain_enabled);
s->buf.camera_bufs_metadata[buf_idx].gain_frac = s->analog_gain_frac;
s->buf.camera_bufs_metadata[buf_idx].integ_lines = s->exposure_time;
// dispatch
enqueue_req_multi(s, real_id + FRAME_BUF_COUNT, 1);
tbuffer_dispatch(&s->camera_tb, buf_idx);
tbuffer_dispatch(&s->buf.camera_tb, buf_idx);
} else { // not ready
// reset after half second of no response
if (main_id > s->frame_id_last + 10) {
@ -970,9 +976,203 @@ void handle_camera_event(CameraState *s, void *evdat) {
}
}
// ******************* exposure control helpers *******************
void set_exposure_time_bounds(CameraState *s) {
switch (s->analog_gain) {
case 0: {
s->exposure_time_min = EXPOSURE_TIME_MIN;
s->exposure_time_max = EXPOSURE_TIME_MAX; // EXPOSURE_TIME_MIN * 4;
break;
}
case ANALOG_GAIN_MAX_IDX - 1: {
s->exposure_time_min = EXPOSURE_TIME_MIN; // EXPOSURE_TIME_MAX / 4;
s->exposure_time_max = EXPOSURE_TIME_MAX;
break;
}
default: {
// finetune margins on both ends
float k_up = sensor_analog_gains[s->analog_gain+1] / sensor_analog_gains[s->analog_gain];
float k_down = sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain];
s->exposure_time_min = k_down * EXPOSURE_TIME_MIN * 2;
s->exposure_time_max = k_up * EXPOSURE_TIME_MAX / 2;
}
}
}
void switch_conversion_gain(CameraState *s) {
if (!s->dc_gain_enabled) {
s->dc_gain_enabled = true;
s->analog_gain -= 5;
} else {
s->dc_gain_enabled = false;
s->analog_gain += 4;
}
}
static void set_camera_exposure(CameraState *s, float grey_frac) {
// TODO: get stats from sensor?
float target_grey = 0.3 - (s->analog_gain / 105.0);
float exposure_factor = 1 + 30 * pow((target_grey - grey_frac), 3);
if (s->camera_num != 1) {
s->ef_filtered = (1 - EF_LOWPASS_K) * s->ef_filtered + EF_LOWPASS_K * exposure_factor;
exposure_factor = s->ef_filtered;
}
// always prioritize exposure time adjust
s->exposure_time *= exposure_factor;
// switch gain if max/min exposure time is reached
// or always switch down to a lower gain when possible
bool kd = false;
if (s->analog_gain > 0) {
kd = 1.1 * s->exposure_time / (sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain]) < EXPOSURE_TIME_MAX / 2;
}
if (s->exposure_time > s->exposure_time_max) {
if (s->analog_gain < ANALOG_GAIN_MAX_IDX - 1) {
s->exposure_time = EXPOSURE_TIME_MAX / 2;
s->analog_gain += 1;
if (!s->dc_gain_enabled && sensor_analog_gains[s->analog_gain] == 1.0) { // switch to HCG at iso 800
switch_conversion_gain(s);
}
set_exposure_time_bounds(s);
} else {
s->exposure_time = s->exposure_time_max;
}
} else if (s->exposure_time < s->exposure_time_min || kd) {
if (s->analog_gain > 0) {
s->exposure_time = std::max(EXPOSURE_TIME_MIN * 2, (int)(s->exposure_time / (sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain])));
s->analog_gain -= 1;
if (s->dc_gain_enabled && sensor_analog_gains[s->analog_gain] == 0.25) { // switch back to LCG at iso 200
switch_conversion_gain(s);
}
set_exposure_time_bounds(s);
} else {
s->exposure_time = s->exposure_time_min;
}
}
// set up config
uint16_t AG = s->analog_gain;
AG = AG * 4096 + AG * 256 + AG * 16 + AG;
s->analog_gain_frac = sensor_analog_gains[s->analog_gain];
// printf("cam %d, min %d, max %d \n", s->camera_num, s->exposure_time_min, s->exposure_time_max);
// printf("cam %d, set AG to 0x%X, S to %d, dc %d \n", s->camera_num, AG, s->exposure_time, s->dc_gain_enabled);
struct i2c_random_wr_payload exp_reg_array[] = {{0x3366, AG}, // analog gain
{0x3362, (uint16_t)(s->dc_gain_enabled?0x1:0x0)}, // DC_GAIN
{0x305A, 0x00D8}, // red gain
{0x3058, 0x011B}, // blue gain
{0x3056, 0x009A}, // g1 gain
{0x305C, 0x009A}, // g2 gain
{0x3012, (uint16_t)s->exposure_time}}; // integ time
//{0x301A, 0x091C}}; // reset
sensors_i2c(s, exp_reg_array, sizeof(exp_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
}
void camera_autoexposure(CameraState *s, float grey_frac) {
CameraExpInfo tmp = cam_exp[s->camera_num].load();
tmp.op_id++;
tmp.grey_frac = grey_frac;
cam_exp[s->camera_num].store(tmp);
}
static void* ae_thread(void* arg) {
MultiCameraState *s = (MultiCameraState*)arg;
CameraState *c_handles[3] = {&s->wide, &s->rear, &s->front};
int op_id_last[3] = {0};
CameraExpInfo cam_op[3];
set_thread_name("camera_settings");
while(!do_exit) {
for (int i=0;i<3;i++) {
cam_op[i] = cam_exp[i].load();
if (cam_op[i].op_id != op_id_last[i]) {
set_camera_exposure(c_handles[i], cam_op[i].grey_frac);
op_id_last[i] = cam_op[i].op_id;
}
}
usleep(50000);
}
return NULL;
}
void camera_process_front(MultiCameraState *s, CameraState *c, int cnt) {
common_camera_process_front(s->sm, s->pm, c, cnt);
#ifdef NOSCREEN
const CameraBuf *b = &c->buf;
if (b->cur_frame_data.frame_id % 4 == 2) {
sendrgb(s, (uint8_t *)b->cur_rgb_buf->addr, b->cur_rgb_buf->len, 2);
}
#endif
}
// called by processing_thread
void camera_process_frame(MultiCameraState *s, CameraState *c, int cnt) {
const CameraBuf *b = &c->buf;
#ifdef NOSCREEN
if (b->cur_frame_data.frame_id % 4 == (c == &s->rear ? 1 : 0)) {
sendrgb(s, (uint8_t *)b->cur_rgb_buf->addr, b->cur_rgb_buf->len, c == &s->rear ? 0 : 1);
}
#endif
MessageBuilder msg;
auto framed = c == &s->rear ? msg.initEvent().initFrame() : msg.initEvent().initWideFrame();
fill_frame_data(framed, b->cur_frame_data, cnt);
if (c == &s->rear) {
framed.setTransform(kj::ArrayPtr<const float>(&b->yuv_transform.v[0], 9));
}
s->pm->send(c == &s->rear ? "frame" : "wideFrame", msg);
if (c == &s->rear && cnt % 100 == 3) {
create_thumbnail(s, c, (uint8_t*)b->cur_rgb_buf->addr);
}
if (cnt % 3 == 0) {
int exposure_x;
int exposure_y;
int exposure_width;
int exposure_height;
if (c == &s->rear) {
exposure_x = 96;
exposure_y = 160;
exposure_width = 1734;
exposure_height = 986;
} else { // c == &s->wide
exposure_x = 96;
exposure_y = 250;
exposure_width = 1734;
exposure_height = 524;
}
int skip = 2;
set_exposure_target(c, (const uint8_t *)b->yuv_bufs[b->cur_yuv_idx].y, 0, exposure_x, exposure_x + exposure_width, skip, exposure_y, exposure_y + exposure_height, skip);
}
}
void cameras_run(MultiCameraState *s) {
int err;
// start threads
LOG("-- Starting threads");
pthread_t ae_thread_handle;
err = pthread_create(&ae_thread_handle, NULL,
ae_thread, s);
assert(err == 0);
std::vector<std::thread> threads;
threads.push_back(start_process_thread(s, "processing", &s->rear, 51, camera_process_frame));
threads.push_back(start_process_thread(s, "frontview", &s->front, 51, camera_process_front));
threads.push_back(start_process_thread(s, "wideview", &s->wide, 51, camera_process_frame));
// start devices
LOG("-- Start devices");
LOG("-- Starting devices");
int start_reg_len = sizeof(start_reg_array) / sizeof(struct i2c_random_wr_payload);
sensors_i2c(&s->rear, start_reg_array, start_reg_len, CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
sensors_i2c(&s->wide, start_reg_array, start_reg_len, CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
@ -1016,106 +1216,13 @@ void cameras_run(MultiCameraState *s) {
}
LOG(" ************** STOPPING **************");
err = pthread_join(ae_thread_handle, NULL);
assert(err == 0);
cameras_close(s);
}
// ******************* exposure control helpers *******************
void set_exposure_time_bounds(CameraState *s) {
switch (s->analog_gain) {
case 0: {
s->exposure_time_min = EXPOSURE_TIME_MIN;
s->exposure_time_max = EXPOSURE_TIME_MAX; // EXPOSURE_TIME_MIN * 4;
break;
}
case ANALOG_GAIN_MAX_IDX - 1: {
s->exposure_time_min = EXPOSURE_TIME_MIN; // EXPOSURE_TIME_MAX / 4;
s->exposure_time_max = EXPOSURE_TIME_MAX;
break;
}
default: {
// finetune margins on both ends
float k_up = sensor_analog_gains[s->analog_gain+1] / sensor_analog_gains[s->analog_gain];
float k_down = sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain];
s->exposure_time_min = k_down * EXPOSURE_TIME_MIN * 2;
s->exposure_time_max = k_up * EXPOSURE_TIME_MAX / 2;
}
}
}
void switch_conversion_gain(CameraState *s) {
if (!s->dc_gain_enabled) {
s->dc_gain_enabled = true;
s->analog_gain -= 5;
} else {
s->dc_gain_enabled = false;
s->analog_gain += 4;
}
}
void camera_autoexposure(CameraState *s, float grey_frac) {
// TODO: get stats from sensor?
float target_grey = 0.3 - (s->analog_gain / 105.0);
float exposure_factor = 1 + 30 * pow((target_grey - grey_frac), 3);
if (s->camera_num != 1) {
s->ef_filtered = (1 - EF_LOWPASS_K) * s->ef_filtered + EF_LOWPASS_K * exposure_factor;
exposure_factor = s->ef_filtered;
}
// always prioritize exposure time adjust
s->exposure_time *= exposure_factor;
// switch gain if max/min exposure time is reached
// or always switch down to a lower gain when possible
bool kd = false;
if (s->analog_gain > 0) {
kd = 1.1 * s->exposure_time / (sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain]) < EXPOSURE_TIME_MAX / 2;
}
if (s->exposure_time > s->exposure_time_max) {
if (s->analog_gain < ANALOG_GAIN_MAX_IDX - 1) {
s->exposure_time = EXPOSURE_TIME_MAX / 2;
s->analog_gain += 1;
if (!s->dc_gain_enabled && sensor_analog_gains[s->analog_gain] == 1.0) { // switch to HCG at iso 800
switch_conversion_gain(s);
}
set_exposure_time_bounds(s);
} else {
s->exposure_time = s->exposure_time_max;
}
} else if (s->exposure_time < s->exposure_time_min || kd) {
if (s->analog_gain > 0) {
s->exposure_time = max(EXPOSURE_TIME_MIN * 2, s->exposure_time / (sensor_analog_gains[s->analog_gain-1] / sensor_analog_gains[s->analog_gain]));
s->analog_gain -= 1;
if (s->dc_gain_enabled && sensor_analog_gains[s->analog_gain] == 0.25) { // switch back to LCG at iso 200
switch_conversion_gain(s);
}
set_exposure_time_bounds(s);
} else {
s->exposure_time = s->exposure_time_min;
}
}
// set up config
uint16_t AG = s->analog_gain;
AG = AG * 4096 + AG * 256 + AG * 16 + AG;
s->analog_gain_frac = sensor_analog_gains[s->analog_gain];
// printf("cam %d, min %d, max %d \n", s->camera_num, s->exposure_time_min, s->exposure_time_max);
// printf("cam %d, set AG to 0x%X, S to %d, dc %d \n", s->camera_num, AG, s->exposure_time, s->dc_gain_enabled);
struct i2c_random_wr_payload exp_reg_array[] = {{0x3366, AG}, // analog gain
{0x3362, s->dc_gain_enabled?0x1:0x0}, // DC_GAIN
{0x305A, 0x00D8}, // red gain
{0x3058, 0x011B}, // blue gain
{0x3056, 0x009A}, // g1 gain
{0x305C, 0x009A}, // g2 gain
{0x3012, s->exposure_time}}; // integ time
//{0x301A, 0x091C}}; // reset
sensors_i2c(s, exp_reg_array, sizeof(exp_reg_array)/sizeof(struct i2c_random_wr_payload),
CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG);
for (auto &t : threads) t.join();
}
#ifdef NOSCREEN

View File

@ -1,17 +1,10 @@
#ifndef CAMERA_H
#define CAMERA_H
#pragma once
#include <stdint.h>
#include <stdbool.h>
#include <pthread.h>
#include <czmq.h>
#include "common/mat.h"
#include "common/visionbuf.h"
#include "common/buffering.h"
#include "camera_common.h"
#include "media/cam_req_mgr.h"
#define FRAME_BUF_COUNT 4
@ -25,16 +18,10 @@
#define EF_LOWPASS_K 0.35
#ifdef __cplusplus
extern "C" {
#endif
typedef struct CameraState {
CameraInfo ci;
FrameMetadata camera_bufs_metadata[FRAME_BUF_COUNT];
TBuffer camera_tb;
int frame_size;
float analog_gain_frac;
uint16_t analog_gain;
bool dc_gain_enabled;
@ -57,7 +44,6 @@ typedef struct CameraState {
int camera_num;
VisionBuf *bufs;
uint32_t session_handle;
@ -76,8 +62,13 @@ typedef struct CameraState {
int idx_offset;
bool skipped;
int debayer_cl_localMemSize;
size_t debayer_cl_globalWorkSize[2];
size_t debayer_cl_localWorkSize[2];
struct cam_req_mgr_session_info req_mgr_session_info;
CameraBuf buf;
} CameraState;
typedef struct MultiCameraState {
@ -95,19 +86,15 @@ typedef struct MultiCameraState {
#endif
pthread_mutex_t isp_lock;
SubMaster *sm;
PubMaster *pm;
} MultiCameraState;
void cameras_init(MultiCameraState *s);
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front, VisionBuf *camera_bufs_wide);
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx);
void cameras_open(MultiCameraState *s);
void cameras_run(MultiCameraState *s);
void camera_autoexposure(CameraState *s, float grey_frac);
#ifdef NOSCREEN
void sendrgb(MultiCameraState *s, uint8_t* dat, int len, uint8_t cam_id);
#endif
#ifdef __cplusplus
} // extern "C"
#endif
#endif

View File

@ -1,6 +1,7 @@
#include "camera_webcam.h"
#include <unistd.h>
#include <assert.h>
#include <string.h>
#include <signal.h>
#include <pthread.h>
@ -8,7 +9,6 @@
#include "common/util.h"
#include "common/timing.h"
#include "common/swaglog.h"
#include "buffering.h"
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wundefined-inline"
@ -27,27 +27,21 @@ extern volatile sig_atomic_t do_exit;
#define FRAME_HEIGHT_FRONT 864
namespace {
void camera_open(CameraState *s, VisionBuf *camera_bufs, bool rear) {
assert(camera_bufs);
s->camera_bufs = camera_bufs;
void camera_open(CameraState *s, bool rear) {
}
void camera_close(CameraState *s) {
tbuffer_stop(&s->camera_tb);
s->buf.stop();
}
void camera_release_buffer(void *cookie, int buf_idx) {
}
void camera_init(CameraState *s, int camera_id, unsigned int fps) {
void camera_init(CameraState *s, int camera_id, unsigned int fps, cl_device_id device_id, cl_context ctx) {
assert(camera_id < ARRAYSIZE(cameras_supported));
s->ci = cameras_supported[camera_id];
assert(s->ci.frame_width != 0);
s->frame_size = s->ci.frame_height * s->ci.frame_stride;
s->fps = fps;
tbuffer_init2(&s->camera_tb, FRAME_BUF_COUNT, "frame", camera_release_buffer, s);
s->buf.init(device_id, ctx, s, FRAME_BUF_COUNT, "frame");
}
static void* rear_thread(void *arg) {
@ -83,7 +77,7 @@ static void* rear_thread(void *arg) {
}
uint32_t frame_id = 0;
TBuffer* tb = &s->camera_tb;
TBuffer* tb = &s->buf.camera_tb;
while (!do_exit) {
cv::Mat frame_mat;
@ -100,12 +94,12 @@ static void* rear_thread(void *arg) {
int transformed_size = transformed_mat.total() * transformed_mat.elemSize();
const int buf_idx = tbuffer_select(tb);
s->camera_bufs_metadata[buf_idx] = {
s->buf.camera_bufs_metadata[buf_idx] = {
.frame_id = frame_id,
};
cl_command_queue q = s->camera_bufs[buf_idx].copy_q;
cl_mem yuv_cl = s->camera_bufs[buf_idx].buf_cl;
cl_command_queue q = s->buf.camera_bufs[buf_idx].copy_q;
cl_mem yuv_cl = s->buf.camera_bufs[buf_idx].buf_cl;
cl_event map_event;
void *yuv_buf = (void *)clEnqueueMapBuffer(q, yuv_cl, CL_TRUE,
CL_MAP_WRITE, 0, transformed_size,
@ -157,7 +151,7 @@ void front_thread(CameraState *s) {
}
uint32_t frame_id = 0;
TBuffer* tb = &s->camera_tb;
TBuffer* tb = &s->buf.camera_tb;
while (!do_exit) {
cv::Mat frame_mat;
@ -174,12 +168,12 @@ void front_thread(CameraState *s) {
int transformed_size = transformed_mat.total() * transformed_mat.elemSize();
const int buf_idx = tbuffer_select(tb);
s->camera_bufs_metadata[buf_idx] = {
s->buf.camera_bufs_metadata[buf_idx] = {
.frame_id = frame_id,
};
cl_command_queue q = s->camera_bufs[buf_idx].copy_q;
cl_mem yuv_cl = s->camera_bufs[buf_idx].buf_cl;
cl_command_queue q = s->buf.camera_bufs[buf_idx].copy_q;
cl_mem yuv_cl = s->buf.camera_bufs[buf_idx].buf_cl;
cl_event map_event;
void *yuv_buf = (void *)clEnqueueMapBuffer(q, yuv_cl, CL_TRUE,
CL_MAP_WRITE, 0, transformed_size,
@ -224,54 +218,69 @@ CameraInfo cameras_supported[CAMERA_ID_MAX] = {
},
};
void cameras_init(MultiCameraState *s) {
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
camera_init(&s->rear, CAMERA_ID_LGC920, 20);
camera_init(&s->rear, CAMERA_ID_LGC920, 20, device_id, ctx);
s->rear.transform = (mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
}};
camera_init(&s->front, CAMERA_ID_LGC615, 10);
camera_init(&s->front, CAMERA_ID_LGC615, 10, device_id, ctx);
s->front.transform = (mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
}};
s->pm = new PubMaster({"frame", "frontFrame"});
}
void camera_autoexposure(CameraState *s, float grey_frac) {}
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear,
VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats,
VisionBuf *camera_bufs_front) {
assert(camera_bufs_rear);
assert(camera_bufs_front);
void cameras_open(MultiCameraState *s) {
// LOG("*** open front ***");
camera_open(&s->front, camera_bufs_front, false);
camera_open(&s->front, false);
// LOG("*** open rear ***");
camera_open(&s->rear, camera_bufs_rear, true);
camera_open(&s->rear, true);
}
void cameras_close(MultiCameraState *s) {
camera_close(&s->rear);
camera_close(&s->front);
delete s->pm;
}
void camera_process_front(MultiCameraState *s, CameraState *c, int cnt) {
MessageBuilder msg;
auto framed = msg.initEvent().initFrontFrame();
framed.setFrameType(cereal::FrameData::FrameType::FRONT);
fill_frame_data(framed, c->buf.cur_frame_data, cnt);
s->pm->send("frontFrame", msg);
}
void camera_process_rear(MultiCameraState *s, CameraState *c, int cnt) {
const CameraBuf *b = &c->buf;
MessageBuilder msg;
auto framed = msg.initEvent().initFrame();
fill_frame_data(framed, b->cur_frame_data, cnt);
framed.setImage(kj::arrayPtr((const uint8_t *)b->yuv_ion[b->cur_yuv_idx].addr, b->yuv_buf_size));
framed.setTransform(kj::ArrayPtr<const float>(&b->yuv_transform.v[0], 9));
s->pm->send("frame", msg);
}
void cameras_run(MultiCameraState *s) {
std::vector<std::thread> threads;
threads.push_back(start_process_thread(s, "processing", &s->rear, 51, camera_process_rear));
threads.push_back(start_process_thread(s, "frontview", &s->front, 51, camera_process_front));
std::thread t_rear = std::thread(rear_thread, &s->rear);
set_thread_name("webcam_thread");
int err;
pthread_t rear_thread_handle;
err = pthread_create(&rear_thread_handle, NULL,
rear_thread, &s->rear);
assert(err == 0);
front_thread(&s->front);
err = pthread_join(rear_thread_handle, NULL);
assert(err == 0);
t_rear.join();
cameras_close(s);
for (auto &t : threads) t.join();
}

View File

@ -1,5 +1,4 @@
#ifndef CAMERA_WEBCAM
#define CAMERA_WEBCAM
#pragma once
#include <stdbool.h>
@ -9,26 +8,13 @@
#include <CL/cl.h>
#endif
#include "common/mat.h"
#include "buffering.h"
#include "common/visionbuf.h"
#include "camera_common.h"
#define FRAME_BUF_COUNT 16
#ifdef __cplusplus
extern "C" {
#endif
typedef struct CameraState {
int camera_id;
CameraInfo ci;
int frame_size;
VisionBuf *camera_bufs;
FrameMetadata camera_bufs_metadata[FRAME_BUF_COUNT];
TBuffer camera_tb;
int fps;
float digital_gain;
@ -36,6 +22,8 @@ typedef struct CameraState {
float cur_gain_frac;
mat3 transform;
CameraBuf buf;
} CameraState;
@ -44,15 +32,13 @@ typedef struct MultiCameraState {
CameraState rear;
CameraState front;
SubMaster *sm;
PubMaster *pm;
} MultiCameraState;
void cameras_init(MultiCameraState *s);
void cameras_open(MultiCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front);
void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx);
void cameras_open(MultiCameraState *s);
void cameras_run(MultiCameraState *s);
void cameras_close(MultiCameraState *s);
void camera_autoexposure(CameraState *s, float grey_frac);
#ifdef __cplusplus
} // extern "C"
#endif
#endif

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
#ifndef IONBUF_H
#define IONBUF_H
#pragma once
#define CL_USE_DEPRECATED_OPENCL_1_2_APIS
#ifdef __APPLE__
@ -29,13 +28,10 @@ typedef struct VisionBuf {
#define VISIONBUF_SYNC_TO_DEVICE 1
VisionBuf visionbuf_allocate(size_t len);
VisionBuf visionbuf_allocate_cl(size_t len, cl_device_id device_id, cl_context ctx, cl_mem *out_mem);
cl_mem visionbuf_to_cl(const VisionBuf* buf, cl_device_id device_id, cl_context ctx);
VisionBuf visionbuf_allocate_cl(size_t len, cl_device_id device_id, cl_context ctx);
void visionbuf_sync(const VisionBuf* buf, int dir);
void visionbuf_free(const VisionBuf* buf);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -45,17 +45,8 @@ VisionBuf visionbuf_allocate(size_t len) {
};
}
cl_mem visionbuf_to_cl(const VisionBuf* buf, cl_device_id device_id, cl_context ctx) {
// HACK because this platform is just for convenience
VisionBuf *w_buf = (VisionBuf*)buf;
cl_mem ret;
*w_buf = visionbuf_allocate_cl(buf->len, device_id, ctx, &ret);
return ret;
}
VisionBuf visionbuf_allocate_cl(size_t len, cl_device_id device_id, cl_context ctx, cl_mem *out_mem) {
VisionBuf visionbuf_allocate_cl(size_t len, cl_device_id device_id, cl_context ctx) {
int err;
assert(out_mem);
#if __OPENCL_VERSION__ >= 200
void* host_ptr =
@ -72,8 +63,6 @@ VisionBuf visionbuf_allocate_cl(size_t len, cl_device_id device_id, cl_context c
cl_mem mem = clCreateBuffer(ctx, CL_MEM_READ_WRITE | CL_MEM_USE_HOST_PTR, len, host_ptr, &err);
assert(err == 0);
*out_mem = mem;
return (VisionBuf){
.len = len, .addr = host_ptr, .handle = 0, .fd = fd,
.device_id = device_id, .ctx = ctx, .buf_cl = mem,

View File

@ -71,32 +71,27 @@ VisionBuf visionbuf_allocate(size_t len) {
};
}
VisionBuf visionbuf_allocate_cl(size_t len, cl_device_id device_id, cl_context ctx, cl_mem *out_mem) {
VisionBuf r = visionbuf_allocate(len);
*out_mem = visionbuf_to_cl(&r, device_id, ctx);
r.buf_cl = *out_mem;
return r;
}
VisionBuf visionbuf_allocate_cl(size_t len, cl_device_id device_id, cl_context ctx) {
VisionBuf buf = visionbuf_allocate(len);
int err = 0;
cl_mem visionbuf_to_cl(const VisionBuf* buf, cl_device_id device_id, cl_context ctx) {
int err = 0;
assert(((uintptr_t)buf->addr % DEVICE_PAGE_SIZE_CL) == 0);
assert(((uintptr_t)buf.addr % DEVICE_PAGE_SIZE_CL) == 0);
cl_mem_ion_host_ptr ion_cl = {0};
ion_cl.ext_host_ptr.allocation_type = CL_MEM_ION_HOST_PTR_QCOM;
ion_cl.ext_host_ptr.host_cache_policy = CL_MEM_HOST_UNCACHED_QCOM;
ion_cl.ion_filedesc = buf->fd;
ion_cl.ion_hostptr = buf->addr;
ion_cl.ion_filedesc = buf.fd;
ion_cl.ion_hostptr = buf.addr;
cl_mem mem = clCreateBuffer(ctx,
buf.buf_cl = clCreateBuffer(ctx,
CL_MEM_USE_HOST_PTR | CL_MEM_EXT_HOST_PTR_QCOM,
buf->len, &ion_cl, &err);
buf.len, &ion_cl, &err);
assert(err == 0);
return mem;
return buf;
}
void visionbuf_sync(const VisionBuf* buf, int dir) {
int err;

View File

@ -44,19 +44,18 @@ void visionimg_compute_aligned_width_and_height(int width, int height, int *alig
#endif
}
VisionImg visionimg_alloc_rgb24(int width, int height, VisionBuf *out_buf) {
VisionImg visionimg_alloc_rgb24(cl_device_id device_id, cl_context ctx, int width, int height, VisionBuf *out_buf) {
assert(out_buf != nullptr);
int aligned_w = 0, aligned_h = 0;
visionimg_compute_aligned_width_and_height(width, height, &aligned_w, &aligned_h);
int stride = aligned_w * 3;
size_t size = (size_t) aligned_w * aligned_h * 3;
VisionBuf buf = visionbuf_allocate(size);
*out_buf = buf;
*out_buf = visionbuf_allocate_cl(size, device_id, ctx);
return (VisionImg){
.fd = buf.fd,
.fd = out_buf->fd,
.format = VISIONIMG_FORMAT_RGB24,
.width = width,
.height = height,

View File

@ -28,7 +28,7 @@ typedef struct VisionImg {
} VisionImg;
void visionimg_compute_aligned_width_and_height(int width, int height, int *aligned_w, int *aligned_h);
VisionImg visionimg_alloc_rgb24(int width, int height, VisionBuf *out_buf);
VisionImg visionimg_alloc_rgb24(cl_device_id device_id, cl_context ctx, int width, int height, VisionBuf *out_buf);
EGLClientBuffer visionimg_to_egl(const VisionImg *img, void **pph);
GLuint visionimg_to_gl(const VisionImg *img, EGLImageKHR *pkhr, void **pph);

View File

@ -153,8 +153,7 @@ int main(int argc, char **argv) {
float frames_dropped = 0;
// one frame in memory
cl_mem yuv_cl;
VisionBuf yuv_ion = visionbuf_allocate_cl(buf_info.buf_len, device_id, context, &yuv_cl);
VisionBuf yuv_ion = visionbuf_allocate_cl(buf_info.buf_len, device_id, context);
uint32_t frame_id = 0, last_vipc_frame_id = 0;
double last = 0;
@ -192,7 +191,7 @@ int main(int argc, char **argv) {
memcpy(yuv_ion.addr, buf->addr, buf_info.buf_len);
ModelDataRaw model_buf =
model_eval_frame(&model, q, yuv_cl, buf_info.width, buf_info.height,
model_eval_frame(&model, q, yuv_ion.buf_cl, buf_info.width, buf_info.height,
model_transform, NULL, vec_desire);
mt2 = millis_since_boot();

View File

@ -27,7 +27,7 @@ def print_cpu_usage(first_proc, last_proc):
("selfdrive.controls.radard", 5.67),
("./boardd", 3.63),
("./_dmonitoringmodeld", 2.67),
("selfdrive.logmessaged", 2.71),
("selfdrive.logmessaged", 1.7),
("selfdrive.thermald.thermald", 2.41),
("selfdrive.locationd.calibrationd", 2.0),
("selfdrive.monitoring.dmonitoringd", 1.90),