send frame in packet on demand (#2567)

* focus lens helper2

* fix

* yes screen

* s

* new/del

* once

* cleanup

Co-authored-by: Comma Device <device@comma.ai>
pull/2573/head
ZwX1616 2020-11-18 13:39:50 -08:00 committed by GitHub
parent cc1bca542d
commit c630cd44da
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 43 additions and 60 deletions

View File

@ -11,10 +11,6 @@ if arch == "aarch64":
elif arch == "larch64":
libs += ['atomic']
cameras = ['cameras/camera_qcom2.cc']
# no screen
# env = env.Clone()
# env.Append(CXXFLAGS = '-DNOSCREEN')
# env.Append(CFLAGS = '-DNOSCREEN')
else:
if USE_WEBCAM:
libs += ['opencv_core', 'opencv_highgui', 'opencv_imgproc', 'opencv_videoio']

View File

@ -24,6 +24,12 @@
#include "common/util.h"
#include "imgproc/utils.h"
const int env_xmin = getenv("XMIN") ? atoi(getenv("XMIN")) : 0;
const int env_xmax = getenv("XMAX") ? atoi(getenv("XMAX")) : -1;
const int env_ymin = getenv("YMIN") ? atoi(getenv("YMIN")) : 0;
const int env_ymax = getenv("YMAX") ? atoi(getenv("YMAX")) : -1;
const int env_scale = getenv("SCALE") ? atoi(getenv("SCALE")) : 1;
static cl_program build_debayer_program(cl_device_id device_id, cl_context context, const CameraInfo *ci, const CameraBuf *b) {
char args[4096];
snprintf(args, sizeof(args),
@ -219,6 +225,27 @@ void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &fr
framed.setGainFrac(frame_data.gain_frac);
}
void fill_frame_image(cereal::FrameData::Builder &framed, uint8_t *dat, int w, int h, int stride) {
if (dat != nullptr) {
int scale = env_scale;
int x_min = env_xmin; int y_min = env_ymin; int x_max = w-1; int y_max = h-1;
if (env_xmax != -1) x_max = env_xmax;
if (env_ymax != -1) y_max = env_ymax;
int new_width = (x_max - x_min + 1) / scale;
int new_height = (y_max - y_min + 1) / scale;
uint8_t *resized_dat = new uint8_t[new_width*new_height*3];
int goff = x_min*3 + y_min*stride;
for (int r=0;r<new_height;r++) {
for (int c=0;c<new_width;c++) {
memcpy(&resized_dat[(r*new_width+c)*3], &dat[goff+r*stride*scale+c*3*scale], 3*sizeof(uint8_t));
}
}
framed.setImage(kj::arrayPtr((const uint8_t*)resized_dat, new_width*new_height*3));
delete[] resized_dat;
}
}
void create_thumbnail(MultiCameraState *s, CameraState *c, uint8_t *bgr_ptr) {
const CameraBuf *b = &c->buf;
@ -402,5 +429,8 @@ void common_camera_process_front(SubMaster *sm, PubMaster *pm, CameraState *c, i
auto framed = msg.initEvent().initFrontFrame();
framed.setFrameType(cereal::FrameData::FrameType::FRONT);
fill_frame_data(framed, b->cur_frame_data, cnt);
if (env_send_front) {
fill_frame_image(framed, (uint8_t*)b->cur_rgb_buf->addr, b->rgb_width, b->rgb_height, b->rgb_stride);
}
pm->send("frontFrame", msg);
}

View File

@ -1,5 +1,6 @@
#pragma once
#include <stdlib.h>
#include <stdbool.h>
#include <stdint.h>
#include <memory>
@ -34,6 +35,10 @@
#define LOG_CAMERA_ID_QCAMERA 3
#define LOG_CAMERA_ID_MAX 4
const bool env_send_front = getenv("SEND_FRONT") != NULL;
const bool env_send_rear = getenv("SEND_REAR") != NULL;
const bool env_send_wide = getenv("SEND_WIDE") != NULL;
typedef struct CameraInfo {
const char* name;
int frame_width, frame_height;
@ -128,6 +133,7 @@ public:
typedef void (*process_thread_cb)(MultiCameraState *s, CameraState *c, int cnt);
void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, uint32_t cnt);
void fill_frame_image(cereal::FrameData::Builder &framed, uint8_t *dat, int w, int h, int stride);
void create_thumbnail(MultiCameraState *s, CameraState *c, uint8_t *bgr_ptr);
void set_exposure_target(CameraState *c, const uint8_t *pix_ptr, int x_start, int x_end, int x_skip, int y_start, int y_end, int y_skip);
std::thread start_process_thread(MultiCameraState *cameras, const char *tname,

View File

@ -1,4 +1,3 @@
#include <stdlib.h>
#include <stdio.h>
#include <stdbool.h>
#include <assert.h>
@ -2141,6 +2140,9 @@ void camera_process_frame(MultiCameraState *s, CameraState *c, int cnt) {
MessageBuilder msg;
auto framed = msg.initEvent().initFrame();
fill_frame_data(framed, b->cur_frame_data, cnt);
if (env_send_rear) {
fill_frame_image(framed, (uint8_t*)b->cur_rgb_buf->addr, b->rgb_width, b->rgb_height, b->rgb_stride);
}
framed.setFocusVal(kj::ArrayPtr<const int16_t>(&s->rear.focus[0], NUM_FOCUS));
framed.setFocusConf(kj::ArrayPtr<const uint8_t>(&s->rear.confidence[0], NUM_FOCUS));
framed.setSharpnessScore(kj::ArrayPtr<const uint16_t>(&s->lapres[0], ARRAYSIZE(s->lapres)));

View File

@ -807,11 +807,6 @@ void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx) {
printf("wide initted \n");
camera_init(&s->front, CAMERA_ID_AR0231, 2, 20, device_id, ctx);
printf("front initted \n");
#ifdef NOSCREEN
zsock_t *rgb_sock = zsock_new_push("tcp://192.168.3.4:7768");
assert(rgb_sock);
s->rgb_sock = rgb_sock;
#endif
s->sm = new SubMaster({"driverState"});
s->pm = new PubMaster({"frame", "frontFrame", "wideFrame", "thumbnail"});
@ -918,9 +913,7 @@ static void cameras_close(MultiCameraState *s) {
camera_close(&s->rear);
camera_close(&s->wide);
camera_close(&s->front);
#ifdef NOSCREEN
zsock_destroy(&s->rgb_sock);
#endif
delete s->sm;
delete s->pm;
}
@ -1107,27 +1100,18 @@ static void* ae_thread(void* arg) {
void camera_process_front(MultiCameraState *s, CameraState *c, int cnt) {
common_camera_process_front(s->sm, s->pm, c, cnt);
#ifdef NOSCREEN
const CameraBuf *b = &c->buf;
if (b->cur_frame_data.frame_id % 4 == 2) {
sendrgb(s, (uint8_t *)b->cur_rgb_buf->addr, b->cur_rgb_buf->len, 2);
}
#endif
}
// called by processing_thread
void camera_process_frame(MultiCameraState *s, CameraState *c, int cnt) {
const CameraBuf *b = &c->buf;
#ifdef NOSCREEN
if (b->cur_frame_data.frame_id % 4 == (c == &s->rear ? 1 : 0)) {
sendrgb(s, (uint8_t *)b->cur_rgb_buf->addr, b->cur_rgb_buf->len, c == &s->rear ? 0 : 1);
}
#endif
MessageBuilder msg;
auto framed = c == &s->rear ? msg.initEvent().initFrame() : msg.initEvent().initWideFrame();
fill_frame_data(framed, b->cur_frame_data, cnt);
if ((c == &s->rear && env_send_rear) || (c == &s->wide && env_send_wide)) {
fill_frame_image(framed, (uint8_t*)b->cur_rgb_buf->addr, b->rgb_width, b->rgb_height, b->rgb_stride);
}
if (c == &s->rear) {
framed.setTransform(kj::ArrayPtr<const float>(&b->yuv_transform.v[0], 9));
}
@ -1224,32 +1208,3 @@ void cameras_run(MultiCameraState *s) {
for (auto &t : threads) t.join();
}
#ifdef NOSCREEN
void sendrgb(MultiCameraState *s, uint8_t* dat, int len, uint8_t cam_id) {
int err, err2;
int scale = 6;
int old_width = FRAME_WIDTH;
// int old_height = FRAME_HEIGHT;
int new_width = FRAME_WIDTH / scale;
int new_height = FRAME_HEIGHT / scale;
uint8_t resized_dat[new_width*new_height*3];
// int goff, loff;
// goff = ((old_width*(scale-1)*old_height/scale)/2);
memset(&resized_dat, cam_id, 3);
for (uint32_t r=1;r<new_height;r++) {
for (uint32_t c=1;c<new_width;c++) {
resized_dat[(r*new_width+c)*3] = dat[(r*old_width + c)*3*scale];
resized_dat[(r*new_width+c)*3+1] = dat[(r*old_width + c)*3*scale+1];
resized_dat[(r*new_width+c)*3+2] = dat[(r*old_width + c)*3*scale+2];
// loff = r*old_width + c;
// resized_dat[(r*new_width+c)*3] = dat[(goff+loff)*3];
// resized_dat[(r*new_width+c)*3+1] = dat[(goff+loff)*3+1];
// resized_dat[(r*new_width+c)*3+2] = dat[(goff+loff)*3+2];
}
}
err = zmq_send(zsock_resolve(s->rgb_sock), &resized_dat, new_width*new_height*3, 0);
err2 = zmq_errno();
//printf("zmq errcode %d, %d\n",err,err2);
}
#endif

View File

@ -81,9 +81,6 @@ typedef struct MultiCameraState {
CameraState rear;
CameraState front;
CameraState wide;
#ifdef NOSCREEN
zsock_t *rgb_sock;
#endif
pthread_mutex_t isp_lock;
@ -95,6 +92,3 @@ void cameras_init(MultiCameraState *s, cl_device_id device_id, cl_context ctx);
void cameras_open(MultiCameraState *s);
void cameras_run(MultiCameraState *s);
void camera_autoexposure(CameraState *s, float grey_frac);
#ifdef NOSCREEN
void sendrgb(MultiCameraState *s, uint8_t* dat, int len, uint8_t cam_id);
#endif