Compare commits

...

No commits in common. "v0.7.8" and "spacecruft" have entirely different histories.

2205 changed files with 299764 additions and 115070 deletions

19
.clang-tidy 100644
View File

@ -0,0 +1,19 @@
---
Checks: '
bugprone-*,
-bugprone-integer-division,
-bugprone-narrowing-conversions,
performance-*,
clang-analyzer-*,
misc-*,
-misc-unused-parameters,
modernize-*,
-modernize-avoid-c-arrays,
-modernize-deprecated-headers,
-modernize-use-auto,
-modernize-use-using,
-modernize-use-nullptr,
-modernize-use-trailing-return-type,
'
CheckOptions:
...

3
.dir-locals.el 100644
View File

@ -0,0 +1,3 @@
((c++-mode (flycheck-gcc-language-standard . "c++11")
(flycheck-clang-language-standard . "c++11")
))

40
.dockerignore 100644
View File

@ -0,0 +1,40 @@
**/.git
.DS_Store
*.dylib
*.DSYM
*.d
*.pyc
*.pyo
.*.swp
.*.swo
.*.un~
*.tmp
*.o
*.o-*
*.os
*.os-*
*.so
*.a
notebooks
phone
massivemap
neos
installer
chffr/app2
chffr/backend/env
selfdrive/nav
selfdrive/baseui
chffr/lib/vidindex/vidindex
selfdrive/test/simulator2
**/cache_data
xx/chffr/lib/vidindex/vidindex
xx/plus
xx/community
xx/projects
!xx/projects/eon_testing_master
!xx/projects/map3d
xx/ops
xx/junk
tools/sim/carla
tools/sim/*.tar.gz

11
.editorconfig 100644
View File

@ -0,0 +1,11 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[{*.py, *.pyx, *.pxd}]
charset = utf-8
indent_style = space
indent_size = 2

23
.gitattributes vendored 100644
View File

@ -0,0 +1,23 @@
*.keras filter=lfs diff=lfs merge=lfs -text
*.dlc filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.pb filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.ipynb filter=nbstripout -diff
external/ffmpeg/bin/ffmpeg_cuda filter=lfs diff=lfs merge=lfs -text
models/segnet.keras filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/x86_64/lib/libacado_toolkit.a filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/x86_64/lib/libacado_toolkit_s.so.1.2.2beta filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/x86_64/lib/libacado_casadi.a filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/x86_64/lib/libacado_csparse.a filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/x86_64/lib/libacado_qpoases.a filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/aarch64/lib/libacado_toolkit.a filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/aarch64/lib/libacado_toolkit_s.so.1.2.2beta filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/aarch64/lib/libacado_casadi.a filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/aarch64/lib/libacado_csparse.a filter=lfs diff=lfs merge=lfs -text
phonelibs/acado/aarch64/lib/libacado_qpoases.a filter=lfs diff=lfs merge=lfs -text
phonelibs/fastcv/aarch64/libfastcvopt.so filter=lfs diff=lfs merge=lfs -text
phonelibs/fastcv/aarch64/libfastcvadsp_stub.so filter=lfs diff=lfs merge=lfs -text
models/segnet2.keras filter=lfs diff=lfs merge=lfs -text
phonelibs/zmq/aarch64-linux/lib/libzmq.a filter=lfs diff=lfs merge=lfs -text

View File

@ -0,0 +1,31 @@
---
name: Bug report
about: For issues with running openpilot on your comma device
title: ''
labels: 'bug'
assignees: ''
---
**Describe the bug**
<!-- A clear and concise description of what the bug is. -->
**How to reproduce or log data**
<!-- Steps to reproduce the behavior. -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Device/Version information (please complete the following information):**
- Device: [e.g. EON/EON Gold/comma two]
- Dongle ID: [e.g. 77611a1fac303767, can be found in Settings -> Device -> Dongle ID or my.comma.ai/useradmin]
- Route: [e.g. 77611a1fac303767|2020-05-11--16-37-07, can be found in my.comma.ai/useradmin]
- Timestamp: [When in the route the bug occurs (e.g. 4min 30s into the drive)]
- Version: [commit hash when on a non-release branch, or version number when on devel or release2 (e.g. 0.7.6)]
- Car make/model: [e.g. Toyota Prius 2016]
**Additional context**
<!-- Add any other context about the problem here. -->

View File

@ -0,0 +1,31 @@
---
name: Car bug report
about: For issues with a particular car or make
title: ''
labels: 'car bug'
assignees: ''
---
**Describe the bug**
<!-- A clear and concise description of what the bug is. -->
**How to reproduce or log data**
<!-- Steps to reproduce the behavior. -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Device/Version information (please complete the following information):**
- Device: [e.g. EON/EON Gold/comma two]
- Dongle ID: [e.g. 77611a1fac303767, can be found in Settings -> Device -> Dongle ID or my.comma.ai/useradmin]
- Route: [e.g. 77611a1fac303767|2020-05-11--16-37-07, can be found in my.comma.ai/useradmin]
- Timestamp: [When in the route the bug occurs (e.g. 4min 30s into the drive)]
- Version: [commit hash when on a non-release branch, or version number when on devel or release2 (e.g. 0.7.6)]
- Car make/model: [e.g. Toyota Prius 2016]
**Additional context**
<!-- Add any other context about the problem here. -->

View File

@ -0,0 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: Discussions
url: https://github.com/commaai/openpilot/discussions
about: For questions and discussion about openpilot
- name: Community Wiki
url: https://github.com/commaai/openpilot/wiki
about: Check out our community wiki
- name: Community Discord
url: https://discord.comma.ai
about: Check out our community discord

View File

@ -0,0 +1,8 @@
---
name: Enhancement
about: For openpilot enhancement suggestions
title: ''
labels: 'enhancement'
assignees: ''
---

View File

@ -0,0 +1,25 @@
---
name: PC Bug report
about: For issues with running openpilot on PC
title: ''
labels: 'PC'
assignees: ''
---
**Describe the bug**
<!-- A clear and concise description of what the bug is. Add the `simulation` label if running in an environment like CARLA. -->
**How to reproduce or log data**
<!-- Steps to reproduce the behavior. -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Additional context**
<!-- Add any other context about the problem here. -->
Operating system: [e.g. Ubuntu 16.04]

View File

@ -0,0 +1,15 @@
---
name: Bug fix
about: For openpilot bug fixes
title: ''
labels: 'bugfix'
assignees: ''
---
**Description**
<!-- A description of the bug and the fix. Also link the issue if it exists. -->
**Verification**
<!-- Explain how you tested this bug fix. -->

View File

@ -0,0 +1,19 @@
---
name: Car Bug fix
about: For vehicle/brand specifc bug fixes
title: ''
labels: 'car bug fix'
assignees: ''
---
**Description**
<!-- A description of the bug and the fix. Also link the issue if it exists. -->
**Verification**
<!-- Explain how you tested this bug fix. -->
**Route**
Route: [a route with the bug fix]

View File

@ -0,0 +1,14 @@
---
name: Car port
about: For new car ports
title: ''
labels: 'car port'
assignees: ''
---
**Checklist**
- [ ] added to README
- [ ] test route added to [test_routes.py](../../selfdrive/test/test_routes.py)
- [ ] route with openpilot:
- [ ] route with stock system:

View File

@ -0,0 +1,11 @@
---
name: Fingerprint
about: For adding fingerprints to existing cars
title: ''
labels: 'fingerprint'
assignees: ''
---
Discord username: []
Route: []

View File

@ -0,0 +1,15 @@
---
name: Refactor
about: For code refactors
title: ''
labels: 'refactor'
assignees: ''
---
**Description**
<!-- A description of the refactor, including the goals it accomplishes. -->
**Verification**
<!-- Explain how you tested the refactor for regressions. -->

8
.github/dependabot.yml vendored 100644
View File

@ -0,0 +1,8 @@
version: 2
updates:
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: '15:00'
open-pull-requests-limit: 10

View File

@ -0,0 +1,38 @@
<!-- Please copy and paste the relevant template -->
<!--- ***** Template: Car bug fix *****
**Description** [](A description of the bug and the fix. Also link any relevant issues.)
**Verification** [](Explain how you tested this bug fix.)
**Route**
Route: [a route with the bug fix]
-->
<!--- ***** Template: Bug fix *****
**Description** [](A description of the bug and the fix. Also link any relevant issues.)
**Verification** [](Explain how you tested this bug fix.)
-->
<!--- ***** Template: Car port *****
**Checklist**
- [ ] added to README
- [ ] test route added to [test_routes.py](../../selfdrive/test/test_routes.py)
- [ ] route with openpilot:
- [ ] route with stock system:
-->
<!--- ***** Template: Refactor *****
**Description** [](A description of the refactor, including the goals it accomplishes.)
**Verification** [](Explain how you tested the refactor for regressions.)
-->

42
.github/workflows/prebuilt.yaml vendored 100644
View File

@ -0,0 +1,42 @@
name: prebuilt
on:
schedule:
- cron: '0 * * * *'
env:
BASE_IMAGE: openpilot-base
DOCKER_REGISTRY: ghcr.io/commaai
DOCKER_LOGIN: docker login ghcr.io -u adeebshihadeh -p ${{ secrets.CONTAINER_TOKEN }}
BUILD: |
docker pull $(grep -iohP '(?<=^from)\s+\S+' Dockerfile.openpilot_base) || true
docker pull $DOCKER_REGISTRY/$BASE_IMAGE:latest || true
docker build --cache-from $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $BASE_IMAGE:latest -f Dockerfile.openpilot_base .
jobs:
build_prebuilt:
name: build prebuilt
runs-on: ubuntu-20.04
timeout-minutes: 60
if: github.repository == 'commaai/openpilot'
env:
IMAGE_NAME: openpilot-prebuilt
steps:
- name: Wait for green check mark
uses: lewagon/wait-on-check-action@v0.2
with:
ref: master
wait-interval: 30
running-workflow-name: 'build prebuilt'
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: |
eval "$BUILD"
docker pull $DOCKER_REGISTRY/$IMAGE_NAME:latest || true
docker build --cache-from $DOCKER_REGISTRY/$IMAGE_NAME:latest -t $DOCKER_REGISTRY/$IMAGE_NAME:latest -f Dockerfile.openpilot .
- name: Push to container registry
run: |
$DOCKER_LOGIN
docker push $DOCKER_REGISTRY/$IMAGE_NAME:latest

View File

@ -0,0 +1,293 @@
name: selfdrive
on:
push:
branches-ignore:
- 'testing-closet*'
pull_request:
env:
BASE_IMAGE: openpilot-base
DOCKER_REGISTRY: ghcr.io/commaai
DOCKER_LOGIN: docker login ghcr.io -u adeebshihadeh -p ${{ secrets.CONTAINER_TOKEN }}
BUILD: |
docker pull $(grep -iohP '(?<=^from)\s+\S+' Dockerfile.openpilot_base) || true
docker pull $DOCKER_REGISTRY/$BASE_IMAGE:latest || true
docker build --cache-from $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $BASE_IMAGE:latest -f Dockerfile.openpilot_base .
RUN: docker run --shm-size 1G -v $PWD:/tmp/openpilot -w /tmp/openpilot -e PYTHONPATH=/tmp/openpilot -e SCONS_CACHE=1 -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e GITHUB_REPOSITORY -e GITHUB_RUN_ID -v /tmp/scons_cache:/tmp/scons_cache -v /tmp/comma_download_cache:/tmp/comma_download_cache $BASE_IMAGE /bin/sh -c
UNIT_TEST: coverage run --append -m unittest discover
jobs:
# TODO: once actions/cache supports read only mode, use the cache for all jobs
build_release:
name: build release
runs-on: ubuntu-20.04
timeout-minutes: 50
env:
STRIPPED_DIR: tmppilot
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache dependencies
id: dependency-cache
uses: actions/cache@v2
with:
path: /tmp/scons_cache
key: scons-cache-${{ hashFiles('selfdrive/**') }}
restore-keys: scons-cache-
- name: Strip non-release files
run: |
mkdir $STRIPPED_DIR
cp -pR --parents $(cat release/files_common) $STRIPPED_DIR
cp Dockerfile.openpilot_base $STRIPPED_DIR
# need this to build on x86
cp -pR --parents phonelibs/libyuv phonelibs/snpe selfdrive/modeld/runners $STRIPPED_DIR
- name: Build Docker image
run: eval "$BUILD"
- name: Build openpilot and run checks
run: |
cd $STRIPPED_DIR
${{ env.RUN }} "python selfdrive/manager/build.py && \
python -m unittest discover selfdrive/car"
- name: Cleanup scons cache
run: |
cd $STRIPPED_DIR
${{ env.RUN }} "scons -j$(nproc) && \
rm -rf /tmp/scons_cache/* && \
scons -j$(nproc) --cache-populate"
build_mac:
name: build macos
runs-on: macos-10.15
timeout-minutes: 60
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Determine pre-existing Homebrew packages
if: steps.dependency-cache.outputs.cache-hit != 'true'
run: |
echo 'EXISTING_CELLAR<<EOF' >> $GITHUB_ENV
ls -1 /usr/local/Cellar >> $GITHUB_ENV
echo 'EOF' >> $GITHUB_ENV
- name: Cache dependencies
id: dependency-cache
uses: actions/cache@v2
with:
path: |
~/.pyenv
~/Library/Caches/pip
~/Library/Caches/pipenv
/usr/local/Cellar
~/github_brew_cache_entries.txt
key: macos-cache-${{ hashFiles('tools/mac_setup.sh') }}
- name: Brew link restored dependencies
if: steps.dependency-cache.outputs.cache-hit == 'true'
run: |
while read pkg; do
brew link --force "$pkg" # `--force` for keg-only packages
done < ~/github_brew_cache_entries.txt
- name: Install dependencies
run: ./tools/mac_setup.sh
- name: Build openpilot
run: eval "$(pyenv init -)" && scons -j$(nproc)
- name: Remove pre-existing Homebrew packages for caching
if: steps.dependency-cache.outputs.cache-hit != 'true'
run: |
cd /usr/local/Cellar
new_cellar=$(ls -1)
comm -12 <(echo "$EXISTING_CELLAR") <(echo "$new_cellar") | while read pkg; do
if [[ $pkg != "zstd" ]]; then # caching step needs zstd
rm -rf "$pkg"
fi
done
comm -13 <(echo "$EXISTING_CELLAR") <(echo "$new_cellar") | tee ~/github_brew_cache_entries.txt
build_webcam:
name: build webcam
runs-on: ubuntu-20.04
timeout-minutes: 90
env:
IMAGE_NAME: openpilotwebcamci
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: |
eval "$BUILD"
docker pull $DOCKER_REGISTRY/$IMAGE_NAME:latest || true
docker build --cache-from $DOCKER_REGISTRY/$IMAGE_NAME:latest -t $DOCKER_REGISTRY/$IMAGE_NAME:latest -f tools/webcam/Dockerfile .
- name: Build openpilot
run: docker run --shm-size 1G --rm -v $PWD:/tmp/openpilot -e PYTHONPATH=/tmp/openpilot $DOCKER_REGISTRY/$IMAGE_NAME /bin/sh -c "cd /tmp/openpilot && USE_WEBCAM=1 scons -j$(nproc)"
- name: Push to container registry
if: github.ref == 'refs/heads/master' && github.repository == 'commaai/openpilot'
run: |
$DOCKER_LOGIN
docker push $DOCKER_REGISTRY/$IMAGE_NAME:latest
docker_push:
name: docker push
runs-on: ubuntu-20.04
timeout-minutes: 50
if: github.ref == 'refs/heads/master' && github.event_name != 'pull_request' && github.repository == 'commaai/openpilot'
needs: static_analysis # hack to ensure slow tests run first since this and static_analysis are fast
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Push to container registry
run: |
$DOCKER_LOGIN
docker push $DOCKER_REGISTRY/$BASE_IMAGE:latest
static_analysis:
name: static analysis
runs-on: ubuntu-20.04
timeout-minutes: 50
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: pre-commit
run: ${{ env.RUN }} "git init && git add -A && pre-commit run --all"
valgrind:
name: valgrind
runs-on: ubuntu-20.04
timeout-minutes: 50
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache dependencies
id: dependency-cache
uses: actions/cache@v2
with:
path: /tmp/comma_download_cache
key: ${{ hashFiles('.github/workflows/test.yaml', 'selfdrive/test/test_valgrind_replay.py') }}
- name: Build Docker image
run: eval "$BUILD"
- name: Run valgrind
run: |
${{ env.RUN }} "scons -j$(nproc) && \
FILEREADER_CACHE=1 python selfdrive/test/test_valgrind_replay.py"
- name: Print logs
if: always()
run: cat selfdrive/test/valgrind_logs.txt
unit_tests:
name: unit tests
runs-on: ubuntu-20.04
timeout-minutes: 50
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Run unit tests
run: |
${{ env.RUN }} "scons -j$(nproc) --test && \
coverage run selfdrive/test/test_fingerprints.py && \
$UNIT_TEST common && \
$UNIT_TEST opendbc/can && \
$UNIT_TEST selfdrive/boardd && \
$UNIT_TEST selfdrive/controls && \
$UNIT_TEST selfdrive/monitoring && \
$UNIT_TEST selfdrive/loggerd && \
$UNIT_TEST selfdrive/car && \
$UNIT_TEST selfdrive/locationd && \
$UNIT_TEST selfdrive/athena && \
$UNIT_TEST selfdrive/thermald && \
$UNIT_TEST tools/lib/tests && \
./selfdrive/camerad/test/ae_gray_test"
- name: Upload coverage to Codecov
run: bash <(curl -s https://codecov.io/bash) -v -F unit_tests
process_replay:
name: process replay
runs-on: ubuntu-20.04
timeout-minutes: 50
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache dependencies
id: dependency-cache
uses: actions/cache@v2
with:
path: /tmp/comma_download_cache
key: ${{ hashFiles('.github/workflows/test.yaml', 'selfdrive/test/process_replay/test_processes.py') }}
- name: Build Docker image
run: eval "$BUILD"
- name: Run replay
run: |
${{ env.RUN }} "scons -j$(nproc) && \
FILEREADER_CACHE=1 CI=1 coverage run selfdrive/test/process_replay/test_processes.py"
- name: Upload coverage to Codecov
run: bash <(curl -s https://codecov.io/bash) -v -F process_replay
- name: Print diff
if: always()
run: cat selfdrive/test/process_replay/diff.txt
- uses: actions/upload-artifact@v2
if: always()
continue-on-error: true
with:
name: process_replay_diff.txt
path: selfdrive/test/process_replay/diff.txt
test_longitudinal:
name: longitudinal
runs-on: ubuntu-20.04
timeout-minutes: 50
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Test longitudinal
run: |
${{ env.RUN }} "mkdir -p selfdrive/test/out && \
scons -j$(nproc) && \
cd selfdrive/test/longitudinal_maneuvers && \
./test_longitudinal.py"
- uses: actions/upload-artifact@v2
if: always()
continue-on-error: true
with:
name: longitudinal
path: selfdrive/test/longitudinal_maneuvers/out/longitudinal/
test_car_models:
name: car models
runs-on: ubuntu-20.04
timeout-minutes: 50
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache dependencies
id: dependency-cache
uses: actions/cache@v2
with:
path: /tmp/comma_download_cache
key: ${{ hashFiles('.github/workflows/test.yaml', 'selfdrive/test/test_routes.py') }}
- name: Build Docker image
run: eval "$BUILD"
- name: Test car models
run: |
${{ env.RUN }} "scons -j$(nproc) && \
FILEREADER_CACHE=1 coverage run --parallel-mode -m nose --processes=4 --process-timeout=60 \
selfdrive/test/test_models.py && \
coverage combine"
- name: Upload coverage to Codecov
run: bash <(curl -s https://codecov.io/bash) -v -F test_car_models

View File

@ -0,0 +1,58 @@
name: tools
on:
push:
pull_request:
env:
BASE_IMAGE: openpilot-base
DOCKER_REGISTRY: ghcr.io/commaai
DOCKER_LOGIN: docker login ghcr.io -u adeebshihadeh -p ${{ secrets.CONTAINER_TOKEN }}
BUILD: |
docker pull $(grep -iohP '(?<=^from)\s+\S+' Dockerfile.openpilot_base) || true
docker pull $DOCKER_REGISTRY/$BASE_IMAGE:latest || true
docker build --cache-from $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $DOCKER_REGISTRY/$BASE_IMAGE:latest -t $BASE_IMAGE:latest -f Dockerfile.openpilot_base .
RUN: docker run --shm-size 1G -v $PWD:/tmp/openpilot -e PYTHONPATH=/tmp/openpilot -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e \
GITHUB_REPOSITORY -e GITHUB_RUN_ID -v /tmp/comma_download_cache:/tmp/comma_download_cache $BASE_IMAGE /bin/sh -c
jobs:
plotjuggler:
name: plotjuggler
runs-on: ubuntu-20.04
timeout-minutes: 30
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Unit test
run: |
${{ env.RUN }} "scons -j$(nproc) --directory=/tmp/openpilot/cereal && \
apt-get update && \
apt-get install -y libdw-dev libqt5svg5-dev libqt5x11extras5-dev && \
cd /tmp/openpilot/tools/plotjuggler && \
./test_plotjuggler.py"
simulator:
name: simulator
runs-on: ubuntu-20.04
timeout-minutes: 50
env:
IMAGE_NAME: openpilot-sim
if: github.repository == 'commaai/openpilot'
steps:
- uses: actions/checkout@v2
with:
submodules: true
lfs: true
- name: Build Docker image
run: |
eval "$BUILD"
docker pull $DOCKER_REGISTRY/$IMAGE_NAME:latest || true
docker build --cache-from $DOCKER_REGISTRY/$IMAGE_NAME:latest -t $DOCKER_REGISTRY/$IMAGE_NAME:latest -f tools/sim/Dockerfile.sim .
- name: Push to container registry
if: github.ref == 'refs/heads/master' && github.repository == 'commaai/openpilot'
run: |
$DOCKER_LOGIN
docker push $DOCKER_REGISTRY/$IMAGE_NAME:latest

15
.gitignore vendored
View File

@ -1,4 +1,5 @@
venv/
.clang-format
.DS_Store
.tags
.ipynb_checkpoints
@ -6,9 +7,10 @@ venv/
.overlay_init
.overlay_consistent
.sconsign.dblite
.vscode
.vscode*
model2.png
a.out
.hypothesis
*.dylib
*.DSYM
@ -31,6 +33,7 @@ a.out
*.vcd
config.json
clcache
compile_commands.json
persist
board/obj/
@ -42,9 +45,11 @@ selfdrive/ui/_ui
selfdrive/test/longitudinal_maneuvers/out
selfdrive/visiond/visiond
selfdrive/loggerd/loggerd
selfdrive/loggerd/bootlog
selfdrive/sensord/_gpsd
selfdrive/sensord/_sensord
selfdrive/camerad/camerad
selfdrive/camerad/test/ae_gray_test
selfdrive/modeld/_modeld
selfdrive/modeld/_dmonitoringmodeld
/src/
@ -53,9 +58,9 @@ one
openpilot
notebooks
xx
hyperthneed
panda_jungle
apks
openpilot-apks
provisioning
.coverage*
coverage.xml
@ -66,3 +71,7 @@ pandaextra
flycheck_*
cppcheck_report.txt
comma*.sh
selfdrive/modeld/thneed/compile
models/*.thneed

15
.gitmodules vendored 100644
View File

@ -0,0 +1,15 @@
[submodule "panda"]
path = panda
url = ../../RetroPilot/panda.git
[submodule "opendbc"]
path = opendbc
url = ../../RetroPilot/opendbc.git
[submodule "laika_repo"]
path = laika_repo
url = ../../RetroPilot/laika.git
[submodule "cereal"]
path = cereal
url = ../../RetroPilot/cereal.git
[submodule "rednose_repo"]
path = rednose_repo
url = ../../RetroPilot/rednose.git

View File

@ -0,0 +1,47 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0
hooks:
- id: check-ast
- id: check-json
- id: check-xml
- id: check-yaml
- id: check-merge-conflict
- id: check-symlinks
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.800
hooks:
- id: mypy
exclude: '^(pyextra)|(cereal)|(rednose)|(panda)|(laika)|(opendbc)|(laika_repo)|(rednose_repo)/'
additional_dependencies: ['git+https://github.com/numpy/numpy-stubs']
- repo: https://github.com/PyCQA/flake8
rev: 3.8.4
hooks:
- id: flake8
exclude: '^(pyextra)|(cereal)|(rednose)|(panda)|(laika)|(opendbc)|(laika_repo)|(rednose_repo)|(selfdrive/debug)/'
args:
- --select=F,E112,E113,E304,E501,E502,E701,E702,E703,E71,E72,E731,W191,W6
- --max-line-length=240
- --statistics
- repo: local
hooks:
- id: pylint
name: pylint
entry: pylint
language: system
types: [python]
exclude: '^(pyextra)|(cereal)|(rednose)|(panda)|(laika)|(laika_repo)|(rednose_repo)/'
- repo: local
hooks:
- id: cppcheck
name: cppcheck
entry: cppcheck
language: system
types: [c++]
exclude: '^(phonelibs)|(cereal)|(opendbc)|(panda)|(tools)|(selfdrive/modeld/thneed/debug)|(selfdrive/modeld/test)|(selfdrive/camerad/test)/|(installer)'
args:
- --error-exitcode=1
- --language=c++
- --quiet
- --force
- -j8

471
.pylintrc 100644
View File

@ -0,0 +1,471 @@
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=scipy cereal.messaging.messaging_pyx
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint.
jobs=4
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=C,R,W0613,W0511,W0212,W0201,W0311,W0106,W0603,W0621,W0703,W1201,W1203,E1136
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio).You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=optparse.Values,sys.exit
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=capnp.* cereal.* pygame.* zmq.* setproctitle.* smbus2.* usb1.* serial.* cv2.* ft4222.*
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=flask setproctitle usb1 flask.ext.socketio smbus2 usb1.*
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,
dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[BASIC]
# Naming style matching correct argument names
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style
#argument-rgx=
# Naming style matching correct attribute names
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Naming style matching correct class attribute names
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style
#class-attribute-rgx=
# Naming style matching correct class names
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-style
#class-rgx=
# Naming style matching correct constant names
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style
#function-rgx=
# Good variable names which should always be accepted, separated by a comma
good-names=i,
j,
k,
ex,
Run,
_
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Naming style matching correct inline iteration names
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style
#inlinevar-rgx=
# Naming style matching correct method names
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style
#method-rgx=
# Naming style matching correct module names
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Naming style matching correct variable names
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style
#variable-rgx=
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of statements in function / method body
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[IMPORTS]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,
TERMIOS,
Bastion,
rexec
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

1
.python-version 100644
View File

@ -0,0 +1 @@
3.8.5

View File

@ -2,7 +2,7 @@
Our software is open source so you can solve your own problems without needing help from others. And if you solve a problem and are so kind, you can upstream it for the rest of the world to use.
Most open source development activity is coordinated through our [Discord](https://discord.comma.ai). A lot of documentation is available on our [medium](https://medium.com/@comma_ai/).
Most open source development activity is coordinated through our [GitHub Discussions](https://github.com/commaai/openpilot/discussions) and [Discord](https://discord.comma.ai). A lot of documentation is available on our [medium](https://medium.com/@comma_ai/).
## Getting Started
@ -12,23 +12,19 @@ Most open source development activity is coordinated through our [Discord](https
## Testing
### Local Testing
You can test your changes on your machine by running `run_docker_tests.sh`. This will run some automated tests in docker against your code.
### Automated Testing
All PRs and commits are automatically checked by Github Actions. Check out `.github/workflows/` for what Github Actions runs. Any new tests sould be added to Github Actions.
All PRs and commits are automatically checked by GitHub Actions. Check out `.github/workflows/` for what GitHub Actions runs. Any new tests should be added to GitHub Actions.
### Code Style and Linting
Code is automatically checked for style by Github Actions as part of the automated tests. You can also run these tests yourself by running `pre-commit run --all`.
Code is automatically checked for style by GitHub Actions as part of the automated tests. You can also run these tests yourself by running `pre-commit run --all`.
## Car Ports (openpilot)
We've released a [Model Port guide](https://medium.com/@comma_ai/openpilot-port-guide-for-toyota-models-e5467f4b5fe6) for porting to Toyota/Lexus models.
If you port openpilot to a substantially new car brand, see this more generic [Brand Port guide](https://medium.com/@comma_ai/how-to-write-a-car-port-for-openpilot-7ce0785eda84). You might also be eligible for a bounty. See our bounties at [comma.ai/bounties.html](https://comma.ai/bounties.html)
If you port openpilot to a substantially new car brand, see this more generic [Brand Port guide](https://medium.com/@comma_ai/how-to-write-a-car-port-for-openpilot-7ce0785eda84).
## Pull Requests
@ -38,12 +34,10 @@ git clone https://github.com/commaai/openpilot.git --recursive
```
Or alternatively, when on the master branch:
```
git submodule init
git submodule update
git submodule update --init
```
The reasons for having submodules on a dedicated repository and our new development philosophy can be found in our [post about externalization](https://medium.com/@comma_ai/a-2020-theme-externalization-13b33326d8b3).
Modules that are in seperate repositories include:
* apks
* cereal
* laika
* opendbc

View File

@ -0,0 +1,32 @@
FROM ghcr.io/commaai/openpilot-base:latest
ENV PYTHONUNBUFFERED 1
ENV OPENPILOT_PATH /home/batman/openpilot/
ENV PYTHONPATH ${OPENPILOT_PATH}:${PYTHONPATH}
RUN mkdir -p ${OPENPILOT_PATH}
WORKDIR ${OPENPILOT_PATH}
COPY Pipfile Pipfile.lock $OPENPILOT_PATH
RUN pip install --no-cache-dir pipenv==2020.8.13 && \
pipenv install --system --deploy --dev --clear && \
pip uninstall -y pipenv
COPY SConstruct ${OPENPILOT_PATH}
COPY ./pyextra ${OPENPILOT_PATH}/pyextra
COPY ./phonelibs ${OPENPILOT_PATH}/phonelibs
COPY ./site_scons ${OPENPILOT_PATH}/site_scons
COPY ./laika ${OPENPILOT_PATH}/laika
COPY ./laika_repo ${OPENPILOT_PATH}/laika_repo
COPY ./rednose ${OPENPILOT_PATH}/rednose
COPY ./tools ${OPENPILOT_PATH}/tools
COPY ./release ${OPENPILOT_PATH}/release
COPY ./common ${OPENPILOT_PATH}/common
COPY ./opendbc ${OPENPILOT_PATH}/opendbc
COPY ./cereal ${OPENPILOT_PATH}/cereal
COPY ./panda ${OPENPILOT_PATH}/panda
COPY ./selfdrive ${OPENPILOT_PATH}/selfdrive
RUN scons -j$(nproc)

View File

@ -0,0 +1,70 @@
FROM ubuntu:20.04
ENV PYTHONUNBUFFERED 1
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends \
autoconf \
build-essential \
bzip2 \
ca-certificates \
capnproto \
clang \
cmake \
cppcheck \
curl \
ffmpeg \
gcc-arm-none-eabi \
git \
iputils-ping \
libarchive-dev \
libbz2-dev \
libcapnp-dev \
libcurl4-openssl-dev \
libeigen3-dev \
libffi-dev \
libgles2-mesa-dev \
libglew-dev \
libglib2.0-0 \
liblzma-dev \
libomp-dev \
libopencv-dev \
libqt5sql5-sqlite \
libqt5svg5-dev \
libsqlite3-dev \
libssl-dev \
libsystemd-dev \
libusb-1.0-0-dev \
libzmq3-dev \
locales \
ocl-icd-libopencl1 \
ocl-icd-opencl-dev \
opencl-headers \
python-dev \
qml-module-qtquick2 \
qt5-default \
qtlocation5-dev \
qtmultimedia5-dev \
qtpositioning5-dev \
qtwebengine5-dev \
sudo \
valgrind \
wget \
&& rm -rf /var/lib/apt/lists/*
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
RUN curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash
ENV PATH="/root/.pyenv/bin:/root/.pyenv/shims:${PATH}"
COPY Pipfile Pipfile.lock /tmp/
RUN pyenv install 3.8.5 && \
pyenv global 3.8.5 && \
pyenv rehash && \
pip install --no-cache-dir --upgrade pip==20.1.1 && \
pip install --no-cache-dir pipenv==2020.8.13 && \
cd /tmp && \
pipenv install --system --deploy --dev --clear && \
pip uninstall -y pipenv

152
Jenkinsfile vendored
View File

@ -1,25 +1,42 @@
def phone(String ip, String step_label, String cmd) {
def ci_env = "CI=1 TEST_DIR=${env.TEST_DIR} GIT_BRANCH=${env.GIT_BRANCH} GIT_COMMIT=${env.GIT_COMMIT}"
withCredentials([file(credentialsId: 'id_rsa', variable: 'key_file')]) {
def ssh_cmd = """
ssh -tt -o StrictHostKeyChecking=no -i ${key_file} -p 8022 'comma@${ip}' /usr/bin/bash <<'EOF'
set -e
export CI=1
export TEST_DIR=${env.TEST_DIR}
export GIT_BRANCH=${env.GIT_BRANCH}
export GIT_COMMIT=${env.GIT_COMMIT}
source ~/.bash_profile
if [ -f /TICI ]; then
source /etc/profile
fi
ln -snf ${env.TEST_DIR} /data/pythonpath
if [ -f /EON ]; then
echo \$\$ > /dev/cpuset/app/tasks || true
echo \$PPID > /dev/cpuset/app/tasks || true
mkdir -p /dev/shm
chmod 777 /dev/shm
fi
withCredentials([file(credentialsId: 'id_rsa_public', variable: 'key_file')]) {
sh label: step_label,
script: """
ssh -tt -o StrictHostKeyChecking=no -i ${key_file} -p 8022 root@${ip} '${ci_env} /usr/bin/bash -le' <<'EOF'
echo \$\$ > /dev/cpuset/app/tasks || true
echo \$PPID > /dev/cpuset/app/tasks || true
mkdir -p /dev/shm
chmod 777 /dev/shm
cd ${env.TEST_DIR} || true
${cmd}
exit 0
EOF"""
sh script: ssh_cmd, label: step_label
}
}
def phone_steps(String device_type, steps) {
lock(resource: "", label: device_type, inversePrecedence: true, variable: 'device_ip', quantity: 1) {
timeout(time: 60, unit: 'MINUTES') {
phone(device_ip, "kill old processes", "pkill -f comma || true")
timeout(time: 90, unit: 'MINUTES') {
phone(device_ip, "git checkout", readFile("selfdrive/test/setup_device_ci.sh"),)
steps.each { item ->
phone(device_ip, item[0], item[1])
@ -34,10 +51,13 @@ pipeline {
COMMA_JWT = credentials('athena-test-jwt')
TEST_DIR = "/data/openpilot"
}
options {
timeout(time: 2, unit: 'HOURS')
}
stages {
stage('Release Build') {
stage('Build release2') {
agent {
docker {
image 'python:3.7.3'
@ -58,19 +78,18 @@ pipeline {
when {
not {
anyOf {
branch 'master-ci'; branch 'devel'; branch 'devel-staging'; branch 'release2'; branch 'release2-staging'; branch 'dashcam'; branch 'dashcam-staging'
branch 'master-ci'; branch 'devel'; branch 'devel-staging'; branch 'release2'; branch 'release2-staging'; branch 'dashcam'; branch 'dashcam-staging'; branch 'testing-closet*'
}
}
}
stages {
/*
stage('PC tests') {
agent {
dockerfile {
filename 'Dockerfile.openpilot'
filename 'Dockerfile.openpilotci'
args '--privileged --shm-size=1G --user=root'
}
}
@ -93,6 +112,10 @@ pipeline {
stage('On-device Tests') {
agent {
docker {
/*
filename 'Dockerfile.ondevice_ci'
args "--privileged -v /dev:/dev --shm-size=1G --user=root"
*/
image 'python:3.7.3'
args '--user=root'
}
@ -101,19 +124,13 @@ pipeline {
stages {
stage('parallel tests') {
parallel {
stage('Devel Build') {
environment {
CI_PUSH = "${env.BRANCH_NAME == 'master' ? 'master-ci' : ' '}"
}
stage('Devel Tests') {
steps {
phone_steps("eon", [
["build devel", "cd release && CI_PUSH=${env.CI_PUSH} ./build_devel.sh"],
["test openpilot", "nosetests -s selfdrive/test/test_openpilot.py"],
["test cpu usage", "cd selfdrive/test/ && ./test_cpu_usage.py"],
phone_steps("eon-build", [
["build devel", "cd release && SCONS_CACHE=1 DEVEL_TEST=1 ./build_devel.sh"],
["test manager", "python selfdrive/manager/test/test_manager.py"],
["onroad tests", "cd selfdrive/test/ && ./test_onroad.py"],
["test car interfaces", "cd selfdrive/car/tests/ && ./test_car_interfaces.py"],
["test spinner build", "cd selfdrive/ui/spinner && make clean && make"],
["test text window build", "cd selfdrive/ui/text && make clean && make"],
])
}
}
@ -121,6 +138,7 @@ pipeline {
stage('Replay Tests') {
steps {
phone_steps("eon2", [
["build QCOM_REPLAY", "SCONS_CACHE=1 QCOM_REPLAY=1 scons -j4"],
["camerad/modeld replay", "cd selfdrive/test/process_replay && ./camera_replay.py"],
])
}
@ -129,20 +147,100 @@ pipeline {
stage('HW + Unit Tests') {
steps {
phone_steps("eon", [
["build cereal", "SCONS_CACHE=1 scons -j4 cereal/"],
["build", "SCONS_CACHE=1 scons -j4"],
["test athena", "nosetests -s selfdrive/athena/tests/test_athenad_old.py"],
["test sounds", "nosetests -s selfdrive/test/test_sounds.py"],
["test boardd loopback", "nosetests -s selfdrive/boardd/tests/test_boardd_loopback.py"],
["test loggerd", "python selfdrive/loggerd/tests/test_loggerd.py"],
["test encoder", "python selfdrive/loggerd/tests/test_encoder.py"],
["test logcatd", "python selfdrive/logcatd/tests/test_logcatd_android.py"],
//["test updater", "python installer/updater/test_updater.py"],
])
}
}
/*
stage('Power Consumption Tests') {
steps {
lock(resource: "", label: "c2-zookeeper", inversePrecedence: true, variable: 'device_ip', quantity: 1) {
timeout(time: 90, unit: 'MINUTES') {
sh script: "/home/batman/tools/zookeeper/enable_and_wait.py $device_ip 120", label: "turn on device"
phone(device_ip, "git checkout", readFile("selfdrive/test/setup_device_ci.sh"),)
phone(device_ip, "build", "SCONS_CACHE=1 scons -j4 && sync")
sh script: "/home/batman/tools/zookeeper/disable.py $device_ip", label: "turn off device"
sh script: "/home/batman/tools/zookeeper/enable_and_wait.py $device_ip 120", label: "turn on device"
sh script: "/home/batman/tools/zookeeper/check_consumption.py 60 3", label: "idle power consumption after boot"
sh script: "/home/batman/tools/zookeeper/ignition.py 1", label: "go onroad"
sh script: "/home/batman/tools/zookeeper/check_consumption.py 60 10", label: "onroad power consumption"
sh script: "/home/batman/tools/zookeeper/ignition.py 0", label: "go offroad"
sh script: "/home/batman/tools/zookeeper/check_consumption.py 60 2", label: "idle power consumption offroad"
}
}
}
}
*/
stage('Tici Build') {
environment {
R3_PUSH = "${env.BRANCH_NAME == 'master' ? '1' : ' '}"
}
steps {
phone_steps("tici", [
["build", "SCONS_CACHE=1 scons -j8"],
["test loggerd", "python selfdrive/loggerd/tests/test_loggerd.py"],
["test encoder", "LD_LIBRARY_PATH=/usr/local/lib python selfdrive/loggerd/tests/test_encoder.py"],
["onroad tests", "cd selfdrive/test/ && ./test_onroad.py"],
//["build release3-staging", "cd release && PUSH=${env.R3_PUSH} ./build_release3.sh"],
])
}
}
stage('camerad') {
steps {
phone_steps("eon-party", [
["build", "SCONS_CACHE=1 scons -j8"],
["test camerad", "python selfdrive/camerad/test/test_camerad.py"],
// ["test exposure", "python selfdrive/camerad/test/test_exposure.py"],
])
}
}
stage('Tici camerad') {
steps {
phone_steps("tici-party", [
["build", "SCONS_CACHE=1 scons -j8"],
["test camerad", "python selfdrive/camerad/test/test_camerad.py"],
// ["test exposure", "python selfdrive/camerad/test/test_exposure.py"],
])
}
}
}
}
stage('Push master-ci') {
when {
branch 'master'
}
steps {
phone_steps("eon-build", [
["push devel", "cd release && CI_PUSH='master-ci' ./build_devel.sh"],
])
}
}
}
post {
always {
cleanWs()
}
}
}
}
}
}
}

119
Pipfile 100644
View File

@ -0,0 +1,119 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
opencv-python= "*"
ipython = "*"
networkx = "~=2.3"
azure-core = "*"
azure-common = "*"
azure-nspkg = "~=3.0"
azure-storage-blob = "~=2.1"
azure-storage-common = "~=2.1"
azure-storage-nspkg = "~=3.1"
boto = "*"
"boto3" = "*"
control = "*"
datadog = "*"
elasticsearch = "*"
gunicorn = "*"
"h5py" = "*"
hexdump = "*"
imageio = "*"
ipykernel = "*"
joblib = "*"
json-logging-py = "*"
jupyter = "*"
"mpld3" = "*"
msgpack-python = "*"
numpy = "*"
osmium = "*"
pycurl = "*"
git-pylint-commit-hook = "*"
pymongo = "*"
"pynmea2" = "*"
python-logstash = "*"
redis = "*"
"s2sphere" = "*"
"subprocess32" = "*"
tenacity = "*"
keras_applications = "*"
PyMySQL = "~=0.9"
Werkzeug = "*"
"backports.lzma" = "*"
Flask-Cors = "*"
Flask-SocketIO = "*"
"GeoAlchemy2" = "*"
Pygments = "*"
reverse_geocoder = "*"
Shapely = "*"
SQLAlchemy = "*"
scipy = "*"
fastcluster = "*"
simplejson = "*"
seaborn = "*"
pyproj = "*"
mock = "*"
matplotlib = "*"
dictdiffer = "*"
aenum = "*"
coverage = "*"
azure-cli-core = "*"
paramiko = "*"
aiohttp = "*"
lru-dict = "*"
scikit-image = "*"
pygame = "==2.0.0.dev8"
pprofile = "*"
pyprof2calltree = "*"
pre-commit = "*"
mypy = "*"
parameterized = "*"
ft4222 = "*"
hypothesis = "*"
[packages]
atomicwrites = "*"
cffi = "*"
crcmod = "*"
hexdump = "*"
libusb1 = "*"
numpy = "*"
psutil = "*"
pycapnp = "==1.0.0"
cryptography = "*"
python-dateutil = "*"
pyzmq = "*"
requests = "*"
setproctitle = "*"
six = "*"
smbus2 = "*"
sympy = "!=1.6.1"
tqdm = "*"
Cython = "*"
PyYAML = "*"
websocket_client = "*"
urllib3 = "*"
gunicorn = "*"
utm = "*"
json-rpc = "*"
Flask = "*"
nose = "*"
flake8 = "*"
pylint = "*"
pillow = "*"
scons = "*"
cysignals = "*"
pycryptodome = "*"
"Jinja2" = "*"
PyJWT = "*"
pyserial = "*"
onnx = "*"
onnxruntime = "*"
timezonefinder = "*"
sentry-sdk = "*"
[requires]
python_version = "3.8"

3298
Pipfile.lock generated 100644

File diff suppressed because it is too large Load Diff

152
README.md
View File

@ -64,58 +64,63 @@ Supported Cars
| Make | Model (US Market Reference) | Supported Package | ACC | No ACC accel below | No ALC below |
| ----------| ------------------------------| ------------------| -----------------| -------------------| ------------------|
| Acura | ILX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>1</sup> | 25mph |
| Acura | ILX 2016-19 | AcuraWatch Plus | openpilot | 25mph<sup>1</sup> | 25mph |
| Acura | RDX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>1</sup> | 12mph |
| Acura | RDX 2019-21 | All | Stock | 0mph | 3mph |
| Honda | Accord 2018-20 | All | Stock | 0mph | 3mph |
| Honda | Accord Hybrid 2018-20 | All | Stock | 0mph | 3mph |
| Honda | Civic Hatchback 2017-19 | Honda Sensing | Stock | 0mph | 12mph |
| Honda | Civic Hatchback 2017-21 | Honda Sensing | Stock | 0mph | 12mph |
| Honda | Civic Sedan/Coupe 2016-18 | Honda Sensing | openpilot | 0mph | 12mph |
| Honda | Civic Sedan/Coupe 2019-20 | Honda Sensing | Stock | 0mph | 2mph<sup>2</sup> |
| Honda | Civic Sedan/Coupe 2019-20 | All | Stock | 0mph | 2mph<sup>2</sup> |
| Honda | CR-V 2015-16 | Touring | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | CR-V 2017-20 | Honda Sensing | Stock | 0mph | 12mph |
| Honda | CR-V Hybrid 2017-2019 | Honda Sensing | Stock | 0mph | 12mph |
| Honda | Fit 2018-19 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | HR-V 2019 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Insight 2019-20 | Honda Sensing | Stock | 0mph | 3mph |
| Honda | HR-V 2019-20 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Insight 2019-21 | All | Stock | 0mph | 3mph |
| Honda | Inspire 2018 | All | Stock | 0mph | 3mph |
| Honda | Odyssey 2018-20 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 0mph |
| Honda | Passport 2019 | All | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Pilot 2016-18 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Pilot 2019 | All | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Ridgeline 2017-20 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Hyundai | Sonata 2020 | All | Stock | 0mph | 0mph |
| Lexus | CT Hybrid 2017-18 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | ES 2019 | All | openpilot | 0mph | 0mph |
| Lexus | ES Hybrid 2019 | All | openpilot | 0mph | 0mph |
| Honda | Pilot 2016-19 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Ridgeline 2017-21 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Hyundai | Palisade 2020-21 | All | Stock | 0mph | 0mph |
| Hyundai | Sonata 2020-21 | All | Stock | 0mph | 0mph |
| Lexus | CT Hybrid 2017-18 | LSS | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | ES 2019-21 | All | openpilot | 0mph | 0mph |
| Lexus | ES Hybrid 2017-18 | LSS | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | ES Hybrid 2019-21 | All | openpilot | 0mph | 0mph |
| Lexus | IS 2017-2019 | All | Stock | 22mph | 0mph |
| Lexus | IS Hybrid 2017 | All | Stock | 0mph | 0mph |
| Lexus | NX 2018 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | NX 2020 | All | openpilot | 0mph | 0mph |
| Lexus | NX Hybrid 2018 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | RX 2016-17 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | RX 2020 | All | openpilot | 0mph | 0mph |
| Lexus | RX 2016-18 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | RX 2020-21 | All | openpilot | 0mph | 0mph |
| Lexus | RX Hybrid 2016-19 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | RX Hybrid 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Avalon 2016 | TSS-P | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Avalon 2017-18 | All | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Avalon 2016-21 | TSS-P | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Camry 2018-20 | All | Stock | 0mph<sup>4</sup> | 0mph |
| Toyota | Camry Hybrid 2018-19 | All | Stock | 0mph<sup>4</sup> | 0mph |
| Toyota | C-HR 2017-19 | All | Stock | 0mph | 0mph |
| Toyota | Camry 2021 | All | openpilot | 0mph | 0mph |
| Toyota | Camry Hybrid 2018-20 | All | Stock | 0mph<sup>4</sup> | 0mph |
| Toyota | Camry Hybrid 2021 | All | openpilot | 0mph | 0mph |
| Toyota | C-HR 2017-20 | All | Stock | 0mph | 0mph |
| Toyota | C-HR Hybrid 2017-19 | All | Stock | 0mph | 0mph |
| Toyota | Corolla 2017-19 | All | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Corolla 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Corolla Hatchback 2019-20 | All | openpilot | 0mph | 0mph |
| Toyota | Corolla Hybrid 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Corolla 2020-21 | All | openpilot | 0mph | 0mph |
| Toyota | Corolla Hatchback 2019-21 | All | openpilot | 0mph | 0mph |
| Toyota | Corolla Hybrid 2020-21 | All | openpilot | 0mph | 0mph |
| Toyota | Highlander 2017-19 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Highlander 2020-21 | All | openpilot | 0mph | 0mph |
| Toyota | Highlander Hybrid 2017-19 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Highlander 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Highlander Hybrid 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Prius 2016 | TSS-P | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Prius 2017-20 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Highlander Hybrid 2020-21 | All | openpilot | 0mph | 0mph |
| Toyota | Mirai 2021 | All | openpilot | 0mph | 0mph |
| Toyota | Prius 2016-20 | TSS-P | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Prius 2021 | All | openpilot | 0mph | 0mph |
| Toyota | Prius Prime 2017-20 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Rav4 2016 | TSS-P | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Rav4 2017-18 | All | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Rav4 2019-20 | All | openpilot | 0mph | 0mph |
| Toyota | Rav4 Hybrid 2016 | TSS-P | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Rav4 Hybrid 2017-18 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Rav4 Hybrid 2019-20 | All | openpilot | 0mph | 0mph |
| Toyota | Prius Prime 2021 | All | openpilot | 0mph | 0mph |
| Toyota | Rav4 2016-18 | TSS-P | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Rav4 2019-21 | All | openpilot | 0mph | 0mph |
| Toyota | Rav4 Hybrid 2016-18 | TSS-P | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Rav4 Hybrid 2019-21 | All | openpilot | 0mph | 0mph |
| Toyota | Sienna 2018-20 | All | Stock<sup>3</sup>| 0mph | 0mph |
<sup>1</sup>[Comma Pedal](https://github.com/commaai/openpilot/wiki/comma-pedal) is used to provide stop-and-go capability to some of the openpilot-supported cars that don't currently support stop-and-go. ***NOTE: The Comma Pedal is not officially supported by [comma](https://comma.ai).*** <br />
@ -128,6 +133,8 @@ Community Maintained Cars and Features
| Make | Model (US Market Reference) | Supported Package | ACC | No ACC accel below | No ALC below |
| ----------| ------------------------------| ------------------| -----------------| -------------------| -------------|
| Audi | A3 2014-17 | Prestige | Stock | 0mph | 0mph |
| Audi | A3 Sportback e-tron 2017-18 | Prestige | Stock | 0mph | 0mph |
| Buick | Regal 2018<sup>1</sup> | Adaptive Cruise | openpilot | 0mph | 7mph |
| Cadillac | ATS 2018<sup>1</sup> | Adaptive Cruise | openpilot | 0mph | 7mph |
| Chevrolet | Malibu 2017<sup>1</sup> | Adaptive Cruise | openpilot | 0mph | 7mph |
@ -135,47 +142,65 @@ Community Maintained Cars and Features
| Chrysler | Pacifica 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph |
| Chrysler | Pacifica 2020 | Adaptive Cruise | Stock | 0mph | 39mph |
| Chrysler | Pacifica Hybrid 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph |
| Chrysler | Pacifica Hybrid 2019-20 | Adaptive Cruise | Stock | 0mph | 39mph |
| Chrysler | Pacifica Hybrid 2019-21 | Adaptive Cruise | Stock | 0mph | 39mph |
| Genesis | G70 2018 | All | Stock | 0mph | 0mph |
| Genesis | G80 2018 | All | Stock | 0mph | 0mph |
| Genesis | G90 2018 | All | Stock | 0mph | 0mph |
| GMC | Acadia Denali 2018<sup>2</sup>| Adaptive Cruise | openpilot | 0mph | 7mph |
| GMC | Acadia 2018<sup>1</sup> | Adaptive Cruise | openpilot | 0mph | 7mph |
| Holden | Astra 2017<sup>1</sup> | Adaptive Cruise | openpilot | 0mph | 7mph |
| Hyundai | Elantra 2017-19 | SCC + LKAS | Stock | 19mph | 34mph |
| Hyundai | Genesis 2015-16 | SCC + LKAS | Stock | 19mph | 37mph |
| Hyundai | Ioniq Electric Premium SE 2020| SCC + LKAS | Stock | 0mph | 32mph |
| Hyundai | Ioniq Electric Limited 2019 | SCC + LKAS | Stock | 0mph | 32mph |
| Hyundai | Ioniq Electric 2019 | SCC + LKAS | Stock | 0mph | 32mph |
| Hyundai | Ioniq Electric 2020 | SCC + LKAS | Stock | 0mph | 0mph |
| Hyundai | Kona 2020 | SCC + LKAS | Stock | 0mph | 0mph |
| Hyundai | Kona EV 2019 | SCC + LKAS | Stock | 0mph | 0mph |
| Hyundai | Palisade 2020 | All | Stock | 0mph | 0mph |
| Hyundai | Santa Fe 2019 | All | Stock | 0mph | 0mph |
| Hyundai | Sonata 2019 | All | Stock | 0mph | 0mph |
| Hyundai | Santa Fe 2019-20 | All | Stock | 0mph | 0mph |
| Hyundai | Sonata 2018-2019 | SCC + LKAS | Stock | 0mph | 0mph |
| Hyundai | Veloster 2019 | SCC + LKAS | Stock | 5mph | 0mph |
| Jeep | Grand Cherokee 2016-18 | Adaptive Cruise | Stock | 0mph | 9mph |
| Jeep | Grand Cherokee 2019-20 | Adaptive Cruise | Stock | 0mph | 39mph |
| Kia | Forte 2018-19 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Optima 2017 | SCC + LKAS/LDWS | Stock | 0mph | 32mph |
| Kia | Forte 2018-2021 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Niro EV 2020 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Optima 2017 | SCC + LKAS | Stock | 0mph | 32mph |
| Kia | Optima 2019 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Sorento 2018 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Seltos 2021 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Sorento 2018-19 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Stinger 2018 | SCC + LKAS | Stock | 0mph | 0mph |
| Nissan | Leaf 2018-19 | Propilot | Stock | 0mph | 0mph |
| Nissan | Rogue 2019 | Propilot | Stock | 0mph | 0mph |
| Nissan | X-Trail 2017 | Propilot | Stock | 0mph | 0mph |
| Kia | Ceed 2019 | SCC + LKAS | Stock | 0mph | 0mph |
| Nissan | Altima 2020 | ProPILOT | Stock | 0mph | 0mph |
| Nissan | Leaf 2018-20 | ProPILOT | Stock | 0mph | 0mph |
| Nissan | Rogue 2018-20 | ProPILOT | Stock | 0mph | 0mph |
| Nissan | X-Trail 2017 | ProPILOT | Stock | 0mph | 0mph |
| SEAT | Ateca 2018 | Driver Assistance | Stock | 0mph | 0mph |
| Ĺ koda | Kodiaq 2018 | Driver Assistance | Stock | 0mph | 0mph |
| Ĺ koda | Scala 2020 | Driver Assistance | Stock | 0mph | 0mph |
| Ĺ koda | Superb 2015-18 | Driver Assistance | Stock | 0mph | 0mph |
| Subaru | Ascent 2019 | EyeSight | Stock | 0mph | 0mph |
| Subaru | Crosstrek 2018-19 | EyeSight | Stock | 0mph | 0mph |
| Subaru | Forester 2019 | EyeSight | Stock | 0mph | 0mph |
| Subaru | Forester 2019-21 | EyeSight | Stock | 0mph | 0mph |
| Subaru | Impreza 2017-19 | EyeSight | Stock | 0mph | 0mph |
| Volkswagen| Atlas 2018-19 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| e-Golf 2014, 2019-20 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Golf 2015-19 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Golf Alltrack 2017-18 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Golf GTE 2016 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Golf GTI 2018-19 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Golf R 2016-19 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Golf SportsVan 2016 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Jetta 2018-20 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Jetta GLI 2021 | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Passat 2016-17<sup>2</sup> | Driver Assistance | Stock | 0mph | 0mph |
| Volkswagen| Tiguan 2020 | Driver Assistance | Stock | 0mph | 0mph |
<sup>1</sup>Requires an [OBD-II car harness](https://comma.ai/shop/products/comma-car-harness) and [community built giraffe](https://github.com/commaai/openpilot/wiki/GM). ***NOTE: disconnecting the ASCM disables Automatic Emergency Braking (AEB).*** <br />
<sup>2</sup>Requires a custom connector for the developer [car harness](https://comma.ai/shop/products/car-harness) <br />
Although it's not upstream, there's a community of people getting openpilot to run on Tesla's [here](https://tinkla.us/)
<sup>1</sup>Requires an [OBD-II car harness](https://comma.ai/shop/products/comma-car-harness) and [community built ASCM harness](https://github.com/commaai/openpilot/wiki/GM#hardware). ***NOTE: disconnecting the ASCM disables Automatic Emergency Braking (AEB).*** <br />
<sup>2</sup>Only includes the MQB Passat sold outside of North America. The NMS Passat made in Chattanooga TN is not yet supported.
Community Maintained Cars and Features are not verified by comma to meet our [safety model](SAFETY.md). Be extra cautious using them. They are only available after enabling the toggle in `Settings->Developer->Enable Community Features`.
To promote a car from community maintained, it must meet a few requirements. We must own one from the brand, we must sell the harness for it, has full ISO26262 in both panda and openpilot, there must be a path forward for longitudinal control, it must have AEB still enabled, and it must support fingerprinting 2.0
Although they're not upstream, the community has openpilot running on other makes and models. See the 'Community Supported Models' section of each make [on our wiki](https://wiki.comma.ai/).
Installation Instructions
------
@ -244,7 +269,6 @@ Many factors can impact the performance of openpilot DM, causing it to be unable
* Low light conditions, such as driving at night or in dark tunnels.
* Bright light (due to oncoming headlights, direct sunlight, etc.).
* The driver's face is partially or completely outside field of view of the driver facing camera.
* Right hand driving vehicles.
* The driver facing camera is obstructed, covered, or damaged.
The list above does not represent an exhaustive list of situations that may interfere with proper operation of openpilot components. A driver should not rely on openpilot DM to assess their level of attention.
@ -264,7 +288,7 @@ By using openpilot, you agree to [our Privacy Policy](https://my.comma.ai/privac
Safety and Testing
----
* openpilot observes ISO26262 guidelines, see [SAFETY.md](SAFETY.md) for more detail.
* openpilot observes ISO26262 guidelines, see [SAFETY.md](SAFETY.md) for more details.
* openpilot has software in the loop [tests](.github/workflows/test.yaml) that run on every commit.
* The safety model code lives in panda and is written in C, see [code rigor](https://github.com/commaai/panda#code-rigor) for more details.
* panda has software in the loop [safety tests](https://github.com/commaai/panda/tree/master/tests/safety).
@ -274,8 +298,23 @@ Safety and Testing
Testing on PC
------
For simplified development and experimentation, openpilot can be run in the CARLA driving simulator, which allows you to develop openpilot without a car. The whole setup should only take a few minutes.
Steps:
1) Start the CARLA server on first terminal
```
bash -c "$(curl https://raw.githubusercontent.com/commaai/openpilot/master/tools/sim/start_carla.sh)"
```
2) Start openpilot on second terminal
```
bash -c "$(curl https://raw.githubusercontent.com/commaai/openpilot/master/tools/sim/start_openpilot_docker.sh)"
```
3) Press 1 to engage openpilot
See the full [README](tools/sim/README.md)
You should also take a look at the tools directory in master: lots of tools you can use to replay driving data, test, and develop openpilot from your PC.
Check out the tools directory in master: lots of tools you can use to replay driving data, test and develop openpilot from your pc.
Community and Contributing
------
@ -291,16 +330,15 @@ And [follow us on Twitter](https://twitter.com/comma_ai).
Directory Structure
------
.
├── apk # The apk files used for the UI
├── cereal # The messaging spec and libs used for all logs
├── common # Library like functionality we've developed here
├── installer/updater # Manages auto-updates of openpilot
├── installer/updater # Manages auto-updates of NEOS
├── opendbc # Files showing how to interpret data from cars
├── panda # Code used to communicate on CAN
├── phonelibs # Libraries used on NEOS devices
├── pyextra # Libraries used on NEOS devices
└── selfdrive # Code needed to drive the car
├── assets # Fonts, images, and sounds for UI
├── assets # Fonts, images and sounds for UI
├── athena # Allows communication with the app
├── boardd # Daemon to talk to the board
├── camerad # Driver to capture images from the camera sensors
@ -317,8 +355,6 @@ Directory Structure
├── test # Unit tests, system tests and a car simulator
└── ui # The UI
To understand how the services interact, see `cereal/service_list.yaml`.
Licensing
------

View File

@ -1,3 +1,87 @@
Version 0.8.5 (2021-XX-XX)
========================
Version 0.8.4 (2021-05-17)
========================
* Delay controls start until system is ready
* Fuzzy car identification, enabled with Community Features toggle
* Localizer optimized for increased precision and less CPU usage
* Retuned lateral control to be more aggressive when model is confident
* Toyota Mirai 2021 support
* Lexus NX 300 2020 support thanks to goesreallyfast!
* Volkswagen Atlas 2018-19 support thanks to jyoung8607!
Version 0.8.3 (2021-04-01)
========================
* New model
* Trained on new diverse dataset from 2000+ users from 30+ countries
* Trained with improved segnet from the comma-pencil community project
* 🥬 Dramatically improved end-to-end lateral performance 🥬
* Toggle added to disable the use of lanelines
* NEOS update: update packages and support for new UI
* New offroad UI based on Qt
* Default SSH key only used for setup
* Kia Ceed 2019 support thanks to ZanZaD13!
* Kia Seltos 2021 support thanks to speedking456!
* Added support for many Volkswagen and Ĺ koda models thanks to jyoung8607!
Version 0.8.2 (2021-02-26)
========================
* Use model points directly in MPC (no more polyfits), making lateral planning more accurate
* Use model heading prediction for smoother lateral control
* Smarter actuator delay compensation
* Improve qcamera resolution for improved video in explorer and connect
* Adjust maximum engagement speed to better fit the model's training distribution
* New driver monitoring model trained with 3x more diverse data
* Improved face detection with masks
* More predictable DM alerts when visibility is bad
* Rewritten video streaming between openpilot processes
* Improved longitudinal tuning on TSS2 Corolla and Rav4 thanks to briskspirit!
* Audi A3 2015 and 2017 support thanks to keeleysam!
* Nissan Altima 2020 support thanks to avolmensky!
* Lexus ES Hybrid 2018 support thanks to TheInventorMan!
* Toyota Camry Hybrid 2021 support thanks to alancyau!
Version 0.8.1 (2020-12-21)
========================
* Original EON is deprecated, upgrade to comma two
* Better model performance in heavy rain
* Better lane positioning in turns
* Fixed bug where model would cut turns on empty roads at night
* Fixed issue where some Toyotas would not completely stop thanks to briskspirit!
* Toyota Camry 2021 with TSS2.5 support
* Hyundai Ioniq Electric 2020 support thanks to baldwalker!
Version 0.8.0 (2020-11-30)
========================
* New driving model: fully 3D and improved cut-in detection
* UI draws 2 road edges, 4 lanelines and paths in 3D
* Major fixes to cut-in detection for openpilot longitudinal
* Grey panda is no longer supported, upgrade to comma two or black panda
* Lexus NX 2018 support thanks to matt12eagles!
* Kia Niro EV 2020 support thanks to nickn17!
* Toyota Prius 2021 support thanks to rav4kumar!
* Improved lane positioning with uncertain lanelines, wide lanes and exits
* Improved lateral control for Prius and Subaru
Version 0.7.10 (2020-10-29)
========================
* Grey panda is deprecated, upgrade to comma two or black panda
* NEOS update: update to Python 3.8.2 and lower CPU frequency
* Improved thermals due to reduced CPU frequency
* Update SNPE to 1.41.0
* Reduced offroad power consumption
* Various system stability improvements
* Acura RDX 2020 support thanks to csouers!
Version 0.7.9 (2020-10-09)
========================
* Improved car battery power management
* Improved updater robustness
* Improved realtime performance
* Reduced UI and modeld lags
* Increased torque on 2020 Hyundai Sonata and Palisade
Version 0.7.8 (2020-08-19)
========================
* New driver monitoring model: improved face detection and better compatibility with sunglasses
@ -21,24 +105,24 @@ Version 0.7.7 (2020-07-20)
Version 0.7.6.1 (2020-06-16)
========================
* Hotfix: update kernel on some comma twos (orders #8570-#8680)
* Hotfix: update kernel on some comma twos (orders #8570-#8680)
Version 0.7.6 (2020-06-05)
========================
* White panda is deprecated, upgrade to comma two or black panda
* 2017 Nissan X-Trail, 2018-19 Leaf and 2019 Rogue support thanks to avolmensky!
* 2017 Mazda CX-5 support in dashcam mode thanks to Jafaral!
* Huge CPU savings in modeld by using thneed!
* Lots of code cleanup and refactors
* White panda is deprecated, upgrade to comma two or black panda
* 2017 Nissan X-Trail, 2018-19 Leaf and 2019 Rogue support thanks to avolmensky!
* 2017 Mazda CX-5 support in dashcam mode thanks to Jafaral!
* Huge CPU savings in modeld by using thneed!
* Lots of code cleanup and refactors
Version 0.7.5 (2020-05-13)
========================
* Right-Hand Drive support for both driving and driver monitoring!
* New driving model: improved at sharp turns and lead speed estimation
* New driver monitoring model: overall improvement on comma two
* Driver camera preview in settings to improve mounting position
* Added support for many Hyundai, Kia, Genesis models thanks to xx979xx!
* Improved lateral tuning for 2020 Toyota Rav 4 (hybrid)
* Right-Hand Drive support for both driving and driver monitoring!
* New driving model: improved at sharp turns and lead speed estimation
* New driver monitoring model: overall improvement on comma two
* Driver camera preview in settings to improve mounting position
* Added support for many Hyundai, Kia, Genesis models thanks to xx979xx!
* Improved lateral tuning for 2020 Toyota Rav 4 (hybrid)
Version 0.7.4 (2020-03-20)
========================
@ -474,96 +558,96 @@ Version 0.3.4 (2017-07-28)
Version 0.3.3 (2017-06-28)
===========================
* Improved model trained on more data
* Alpha CR-V support thanks to energee and johnnwvs!
* Using the opendbc project for DBC files
* Minor performance improvements
* UI update thanks to pjlao307
* Power off button
* 6% more torque on the Civic
* Improved model trained on more data
* Alpha CR-V support thanks to energee and johnnwvs!
* Using the opendbc project for DBC files
* Minor performance improvements
* UI update thanks to pjlao307
* Power off button
* 6% more torque on the Civic
Version 0.3.2 (2017-05-22)
===========================
* Minor stability bugfixes
* Added metrics and rear view mirror disable to settings
* Update model with more crowdsourced data
* Minor stability bugfixes
* Added metrics and rear view mirror disable to settings
* Update model with more crowdsourced data
Version 0.3.1 (2017-05-17)
===========================
* visiond stability bugfix
* Add logging for angle and flashing
* visiond stability bugfix
* Add logging for angle and flashing
Version 0.3.0 (2017-05-12)
===========================
* Add CarParams struct to improve the abstraction layer
* Refactor visiond IPC to support multiple clients
* Add raw GPS and beginning support for navigation
* Improve model in visiond using crowdsourced data
* Add improved system logging to diagnose instability
* Rewrite baseui in React Native
* Moved calibration to the cloud
* Add CarParams struct to improve the abstraction layer
* Refactor visiond IPC to support multiple clients
* Add raw GPS and beginning support for navigation
* Improve model in visiond using crowdsourced data
* Add improved system logging to diagnose instability
* Rewrite baseui in React Native
* Moved calibration to the cloud
Version 0.2.9 (2017-03-01)
===========================
* Retain compatibility with NEOS v1
* Retain compatibility with NEOS v1
Version 0.2.8 (2017-02-27)
===========================
* Fix bug where frames were being dropped in minute 71
* Fix bug where frames were being dropped in minute 71
Version 0.2.7 (2017-02-08)
===========================
* Better performance and pictures at night
* Fix ptr alignment issue in boardd
* Fix brake error light, fix crash if too cold
* Better performance and pictures at night
* Fix ptr alignment issue in boardd
* Fix brake error light, fix crash if too cold
Version 0.2.6 (2017-01-31)
===========================
* Fix bug in visiond model execution
* Fix bug in visiond model execution
Version 0.2.5 (2017-01-30)
===========================
* Fix race condition in manager
* Fix race condition in manager
Version 0.2.4 (2017-01-27)
===========================
* OnePlus 3T support
* Enable installation as NEOS app
* Various minor bugfixes
* OnePlus 3T support
* Enable installation as NEOS app
* Various minor bugfixes
Version 0.2.3 (2017-01-11)
===========================
* Reduce space usage by 80%
* Add better logging
* Add Travis CI
* Reduce space usage by 80%
* Add better logging
* Add Travis CI
Version 0.2.2 (2017-01-10)
===========================
* Board triggers started signal on CAN messages
* Improved autoexposure
* Handle out of space, improve upload status
* Board triggers started signal on CAN messages
* Improved autoexposure
* Handle out of space, improve upload status
Version 0.2.1 (2016-12-14)
===========================
* Performance improvements, removal of more numpy
* Fix boardd process priority
* Make counter timer reset on use of steering wheel
* Performance improvements, removal of more numpy
* Fix boardd process priority
* Make counter timer reset on use of steering wheel
Version 0.2 (2016-12-12)
=========================
* Car/Radar abstraction layers have shipped, see cereal/car.capnp
* controlsd has been refactored
* Shipped plant model and testing maneuvers
* visiond exits more gracefully now
* Hardware encoder in visiond should always init
* ui now turns off the screen after 30 seconds
* Switch to openpilot release branch for future releases
* Added preliminary Docker container to run tests on PC
* Car/Radar abstraction layers have shipped, see cereal/car.capnp
* controlsd has been refactored
* Shipped plant model and testing maneuvers
* visiond exits more gracefully now
* Hardware encoder in visiond should always init
* ui now turns off the screen after 30 seconds
* Switch to openpilot release branch for future releases
* Added preliminary Docker container to run tests on PC
Version 0.1 (2016-11-29)
=========================
* Initial release of openpilot
* Adaptive cruise control is working
* Lane keep assist is working
* Support for Acura ILX 2016 with AcuraWatch Plus
* Support for Honda Civic 2016 Touring Edition
* Initial release of openpilot
* Adaptive cruise control is working
* Lane keep assist is working
* Support for Acura ILX 2016 with AcuraWatch Plus
* Support for Honda Civic 2016 Touring Edition

View File

@ -1,37 +1,64 @@
import Cython
import distutils
import os
import shutil
import subprocess
import sys
import sysconfig
import platform
import numpy as np
TICI = os.path.isfile('/TICI')
Decider('MD5-timestamp')
AddOption('--test',
action='store_true',
help='build test files')
AddOption('--kaitai',
action='store_true',
help='Regenerate kaitai struct parsers')
AddOption('--asan',
action='store_true',
help='turn on ASAN')
# Rebuild cython extensions if python, distutils, or cython change
cython_dependencies = [Value(v) for v in (sys.version, distutils.__version__, Cython.__version__)]
Export('cython_dependencies')
AddOption('--ubsan',
action='store_true',
help='turn on UBSan')
arch = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip()
AddOption('--clazy',
action='store_true',
help='build with clazy')
AddOption('--compile_db',
action='store_true',
help='build clang compilation database')
AddOption('--mpc-generate',
action='store_true',
help='regenerates the mpc sources')
AddOption('--external-sconscript',
action='store',
metavar='FILE',
dest='external_sconscript',
help='add an external SConscript to the build')
real_arch = arch = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip()
if platform.system() == "Darwin":
arch = "Darwin"
if arch == "aarch64" and not os.path.isdir("/system"):
if arch == "aarch64" and TICI:
arch = "larch64"
webcam = bool(ARGUMENTS.get("use_webcam", 0))
USE_WEBCAM = os.getenv("USE_WEBCAM") is not None
QCOM_REPLAY = arch == "aarch64" and os.getenv("QCOM_REPLAY") is not None
lenv = {
"PATH": os.environ['PATH'],
}
if arch == "aarch64" or arch == "larch64":
lenv = {
"LD_LIBRARY_PATH": '/data/data/com.termux/files/usr/lib',
"PATH": os.environ['PATH'],
}
lenv["LD_LIBRARY_PATH"] = '/data/data/com.termux/files/usr/lib'
if arch == "aarch64":
# android
@ -43,8 +70,8 @@ if arch == "aarch64" or arch == "larch64":
]
libpath = [
"/usr/local/lib",
"/usr/lib",
"/data/data/com.termux/files/usr/lib",
"/system/vendor/lib64",
"/system/comma/usr/lib",
"#phonelibs/nanovg",
@ -56,48 +83,52 @@ if arch == "aarch64" or arch == "larch64":
"#phonelibs/libyuv/larch64/lib",
"/usr/lib/aarch64-linux-gnu"
]
cpppath += [
"#selfdrive/camerad/include",
]
cflags = ["-DQCOM2", "-mcpu=cortex-a57"]
cxxflags = ["-DQCOM2", "-mcpu=cortex-a57"]
rpath = ["/usr/local/lib"]
else:
libpath += [
"#phonelibs/snpe/aarch64",
"#phonelibs/libyuv/lib"
"#phonelibs/libyuv/lib",
"/system/vendor/lib64"
]
cflags = ["-DQCOM", "-mcpu=cortex-a57"]
cxxflags = ["-DQCOM", "-mcpu=cortex-a57"]
rpath = ["/system/vendor/lib64"]
rpath = []
if QCOM_REPLAY:
cflags += ["-DQCOM_REPLAY"]
cxxflags += ["-DQCOM_REPLAY"]
else:
cflags = []
cxxflags = []
lenv = {
"PATH": "#external/bin:" + os.environ['PATH'],
}
cpppath = [
"#external/tensorflow/include",
]
cpppath = []
if arch == "Darwin":
yuv_dir = "mac" if real_arch != "arm64" else "mac_arm64"
libpath = [
"#phonelibs/libyuv/mac/lib",
"#cereal",
"#selfdrive/common",
f"#phonelibs/libyuv/{yuv_dir}/lib",
"/usr/local/lib",
"/opt/homebrew/lib",
"/usr/local/opt/openssl/lib",
"/opt/homebrew/opt/openssl/lib",
"/System/Library/Frameworks/OpenGL.framework/Libraries",
]
cflags += ["-DGL_SILENCE_DEPRECATION"]
cxxflags += ["-DGL_SILENCE_DEPRECATION"]
cpppath += [
"/opt/homebrew/include",
"/usr/local/opt/openssl/include",
"/opt/homebrew/opt/openssl/include"
]
else:
libpath = [
"#phonelibs/snpe/x86_64-linux-clang",
"#phonelibs/libyuv/x64/lib",
"#external/tensorflow/lib",
"#phonelibs/mapbox-gl-native-qt/x86_64",
"#cereal",
"#selfdrive/common",
"/usr/lib",
@ -105,7 +136,7 @@ else:
]
rpath = [
"external/tensorflow/lib",
"phonelibs/snpe/x86_64-linux-clang",
"cereal",
"selfdrive/common"
]
@ -114,11 +145,18 @@ else:
rpath = [os.path.join(os.getcwd(), x) for x in rpath]
if GetOption('asan'):
ccflags_asan = ["-fsanitize=address", "-fno-omit-frame-pointer"]
ldflags_asan = ["-fsanitize=address"]
ccflags = ["-fsanitize=address", "-fno-omit-frame-pointer"]
ldflags = ["-fsanitize=address"]
elif GetOption('ubsan'):
ccflags = ["-fsanitize=undefined"]
ldflags = ["-fsanitize=undefined"]
else:
ccflags_asan = []
ldflags_asan = []
ccflags = []
ldflags = []
# no --as-needed on mac linker
if arch != "Darwin":
ldflags += ["-Wl,--as-needed"]
# change pythonpath to this
lenv["PYTHONPATH"] = Dir("#").path
@ -131,64 +169,69 @@ env = Environment(
"-O2",
"-Wunused",
"-Werror",
"-Wno-unknown-warning-option",
"-Wno-deprecated-register",
"-Wno-register",
"-Wno-inconsistent-missing-override",
] + cflags + ccflags_asan,
"-Wno-c99-designator",
"-Wno-reorder-init-list",
] + cflags + ccflags,
CPPPATH=cpppath + [
"#",
"#selfdrive",
"#phonelibs/catch2/include",
"#phonelibs/bzip2",
"#phonelibs/libyuv/include",
"#phonelibs/openmax/include",
"#phonelibs/json11",
"#phonelibs/curl/include",
#"#phonelibs/opencv/include", # use opencv4 instead
"#phonelibs/libgralloc/include",
"#phonelibs/android_frameworks_native/include",
"#phonelibs/android_hardware_libhardware/include",
"#phonelibs/android_system_core/include",
"#phonelibs/linux/include",
"#phonelibs/snpe/include",
"#phonelibs/mapbox-gl-native-qt/include",
"#phonelibs/nanovg",
"#selfdrive/common",
"#selfdrive/camerad",
"#selfdrive/camerad/include",
"#selfdrive/loggerd/include",
"#selfdrive/modeld",
"#cereal/messaging",
"#phonelibs/qrcode",
"#phonelibs",
"#cereal",
"#cereal/messaging",
"#cereal/visionipc",
"#opendbc/can",
],
CC='clang',
CXX='clang++',
LINKFLAGS=ldflags_asan,
LINKFLAGS=ldflags,
RPATH=rpath,
CFLAGS=["-std=gnu11"] + cflags,
CXXFLAGS=["-std=c++14"] + cxxflags,
CXXFLAGS=["-std=c++1z"] + cxxflags,
LIBPATH=libpath + [
"#cereal",
"#selfdrive/common",
"#phonelibs",
]
"#opendbc/can",
"#selfdrive/boardd",
"#selfdrive/common",
],
CYTHONCFILESUFFIX=".cpp",
COMPILATIONDB_USE_ABSPATH=True,
tools=["default", "cython", "compilation_db"],
)
if GetOption('compile_db'):
env.CompilationDatabase('compile_commands.json')
if os.environ.get('SCONS_CACHE'):
cache_dir = '/tmp/scons_cache'
if TICI:
cache_dir = '/data/scons_cache'
if os.getenv('CI'):
branch = os.getenv('GIT_BRANCH')
if QCOM_REPLAY:
cache_dir = '/tmp/scons_cache_qcom_replay'
if QCOM_REPLAY:
cache_dir = '/tmp/scons_cache_qcom_replay'
elif branch is not None and branch != 'master':
cache_dir_branch = '/tmp/scons_cache_' + branch
if not os.path.isdir(cache_dir_branch) and os.path.isdir(cache_dir):
shutil.copytree(cache_dir, cache_dir_branch)
cache_dir = cache_dir_branch
CacheDir(cache_dir)
node_interval = 5
@ -212,9 +255,92 @@ def abspath(x):
# rpath works elsewhere
return x[0].path.rsplit("/", 1)[1][:-3]
# still needed for apks
zmq = 'zmq'
Export('env', 'arch', 'zmq', 'SHARED', 'webcam', 'QCOM_REPLAY')
# Cython build enviroment
py_include = sysconfig.get_paths()['include']
envCython = env.Clone()
envCython["CPPPATH"] += [py_include, np.get_include()]
envCython["CCFLAGS"] += ["-Wno-#warnings", "-Wno-deprecated-declarations"]
envCython["LIBS"] = []
if arch == "Darwin":
envCython["LINKFLAGS"] = ["-bundle", "-undefined", "dynamic_lookup"]
elif arch == "aarch64":
envCython["LINKFLAGS"] = ["-shared"]
envCython["LIBS"] = [os.path.basename(py_include)]
else:
envCython["LINKFLAGS"] = ["-pthread", "-shared"]
Export('envCython')
# Qt build environment
qt_env = env.Clone()
qt_modules = ["Widgets", "Gui", "Core", "Network", "Concurrent", "Multimedia", "Quick", "Qml", "QuickWidgets", "Location", "Positioning"]
if arch != "aarch64":
qt_modules += ["DBus"]
qt_libs = []
if arch == "Darwin":
if real_arch == "arm64":
qt_env['QTDIR'] = "/opt/homebrew/opt/qt@5"
else:
qt_env['QTDIR'] = "/usr/local/opt/qt@5"
qt_dirs = [
os.path.join(qt_env['QTDIR'], "include"),
]
qt_dirs += [f"{qt_env['QTDIR']}/include/Qt{m}" for m in qt_modules]
qt_env["LINKFLAGS"] += ["-F" + os.path.join(qt_env['QTDIR'], "lib")]
qt_env["FRAMEWORKS"] += [f"Qt{m}" for m in qt_modules] + ["OpenGL"]
elif arch == "aarch64":
qt_env['QTDIR'] = "/system/comma/usr"
qt_dirs = [
f"/system/comma/usr/include/qt",
]
qt_dirs += [f"/system/comma/usr/include/qt/Qt{m}" for m in qt_modules]
qt_libs = [f"Qt5{m}" for m in qt_modules]
qt_libs += ['EGL', 'GLESv3', 'c++_shared']
else:
qt_env['QTDIR'] = "/usr"
qt_dirs = [
f"/usr/include/{real_arch}-linux-gnu/qt5",
f"/usr/include/{real_arch}-linux-gnu/qt5/QtGui/5.12.8/QtGui",
]
qt_dirs += [f"/usr/include/{real_arch}-linux-gnu/qt5/Qt{m}" for m in qt_modules]
qt_libs = [f"Qt5{m}" for m in qt_modules]
if arch == "larch64":
qt_libs += ["GLESv2", "wayland-client"]
elif arch != "Darwin":
qt_libs += ["GL"]
qt_env.Tool('qt')
qt_env['CPPPATH'] += qt_dirs + ["#selfdrive/ui/qt/"]
qt_flags = [
"-D_REENTRANT",
"-DQT_NO_DEBUG",
"-DQT_WIDGETS_LIB",
"-DQT_GUI_LIB",
"-DQT_QUICK_LIB",
"-DQT_QUICKWIDGETS_LIB",
"-DQT_QML_LIB",
"-DQT_CORE_LIB"
]
qt_env['CXXFLAGS'] += qt_flags
qt_env['LIBPATH'] += ['#selfdrive/ui']
qt_env['LIBS'] = qt_libs
if GetOption("clazy"):
checks = [
"level0",
"level1",
"no-range-loop",
"no-non-pod-global-static",
]
qt_env['CXX'] = 'clazy'
qt_env['ENV']['CLAZY_IGNORE_DIRS'] = qt_dirs[0]
qt_env['ENV']['CLAZY_CHECKS'] = ','.join(checks)
Export('env', 'qt_env', 'arch', 'real_arch', 'SHARED', 'USE_WEBCAM', 'QCOM_REPLAY')
# cereal and messaging are shared with the system
SConscript(['cereal/SConscript'])
@ -224,26 +350,55 @@ if SHARED:
else:
cereal = [File('#cereal/libcereal.a')]
messaging = [File('#cereal/libmessaging.a')]
visionipc = [File('#cereal/libvisionipc.a')]
Export('cereal', 'messaging')
SConscript(['selfdrive/common/SConscript'])
Import('_common', '_visionipc', '_gpucommon', '_gpu_libs')
Import('_common', '_gpucommon', '_gpu_libs')
if SHARED:
common, visionipc, gpucommon = abspath(common), abspath(visionipc), abspath(gpucommon)
common, gpucommon = abspath(common), abspath(gpucommon)
else:
common = [_common, 'json11']
visionipc = _visionipc
gpucommon = [_gpucommon] + _gpu_libs
Export('common', 'visionipc', 'gpucommon')
Export('common', 'gpucommon', 'visionipc')
# Build rednose library and ekf models
rednose_config = {
'generated_folder': '#selfdrive/locationd/models/generated',
'to_build': {
'live': ('#selfdrive/locationd/models/live_kf.py', True, ['live_kf_constants.h']),
'car': ('#selfdrive/locationd/models/car_kf.py', True, []),
},
}
if arch != "aarch64":
rednose_config['to_build'].update({
'gnss': ('#selfdrive/locationd/models/gnss_kf.py', True, []),
'loc_4': ('#selfdrive/locationd/models/loc_kf.py', True, []),
'pos_computer_4': ('#rednose/helpers/lst_sq_computer.py', False, []),
'pos_computer_5': ('#rednose/helpers/lst_sq_computer.py', False, []),
'feature_handler_5': ('#rednose/helpers/feature_handler.py', False, []),
'lane': ('#xx/pipeline/lib/ekf/lane_kf.py', True, []),
})
Export('rednose_config')
SConscript(['rednose/SConscript'])
# Build openpilot
SConscript(['cereal/SConscript'])
SConscript(['panda/board/SConscript'])
SConscript(['opendbc/can/SConscript'])
SConscript(['phonelibs/SConscript'])
SConscript(['common/SConscript'])
SConscript(['common/kalman/SConscript'])
SConscript(['common/transformations/SConscript'])
SConscript(['phonelibs/SConscript'])
SConscript(['selfdrive/camerad/SConscript'])
SConscript(['selfdrive/modeld/SConscript'])
@ -255,16 +410,17 @@ SConscript(['selfdrive/controls/lib/longitudinal_mpc_model/SConscript'])
SConscript(['selfdrive/boardd/SConscript'])
SConscript(['selfdrive/proclogd/SConscript'])
SConscript(['selfdrive/clocksd/SConscript'])
SConscript(['selfdrive/ui/SConscript'])
SConscript(['selfdrive/loggerd/SConscript'])
SConscript(['selfdrive/locationd/SConscript'])
SConscript(['selfdrive/locationd/models/SConscript'])
SConscript(['selfdrive/sensord/SConscript'])
SConscript(['selfdrive/ui/SConscript'])
if arch == "aarch64":
if arch != "Darwin":
SConscript(['selfdrive/logcatd/SConscript'])
SConscript(['selfdrive/sensord/SConscript'])
SConscript(['selfdrive/clocksd/SConscript'])
else:
SConscript(['tools/lib/index_log/SConscript'])
external_sconscript = GetOption('external_sconscript')
if external_sconscript:
SConscript([external_sconscript])

1
cereal 160000

@ -0,0 +1 @@
Subproject commit 7c3a2f87196f312529dae9d0ba05d6a449b14482

14
cereal/.gitignore vendored
View File

@ -1,14 +0,0 @@
gen
node_modules
package-lock.json
*.pyc
__pycache__
.*.swp
.*.swo
libcereal*.a
libmessaging.*
libmessaging_shared.*
services.h
.sconsign.dblite
libcereal_shared.*
.mypy_cache/

View File

@ -1,64 +0,0 @@
Import('env', 'arch', 'zmq', 'cython_dependencies')
import shutil
gen_dir = Dir('gen')
messaging_dir = Dir('messaging')
# TODO: remove src-prefix and cereal from command string. can we set working directory?
env.Command(["gen/c/include/c++.capnp.h", "gen/c/include/java.capnp.h"], [], "mkdir -p " + gen_dir.path + "/c/include && touch $TARGETS")
env.Command(['gen/cpp/car.capnp.c++', 'gen/cpp/log.capnp.c++', 'gen/cpp/car.capnp.h', 'gen/cpp/log.capnp.h'],
['car.capnp', 'log.capnp'],
'capnpc $SOURCES --src-prefix=cereal -o c++:' + gen_dir.path + '/cpp/')
if shutil.which('capnpc-java'):
env.Command(['gen/java/Car.java', 'gen/java/Log.java'],
['car.capnp', 'log.capnp'],
'capnpc $SOURCES --src-prefix=cereal -o java:' + gen_dir.path + '/java/')
# TODO: remove non shared cereal and messaging
cereal_objects = env.SharedObject([
'gen/cpp/car.capnp.c++',
'gen/cpp/log.capnp.c++',
])
env.Library('cereal', cereal_objects)
env.SharedLibrary('cereal_shared', cereal_objects)
cereal_dir = Dir('.')
services_h = env.Command(['services.h'],
['service_list.yaml', 'services.py'],
'python3 ' + cereal_dir.path + '/services.py > $TARGET')
messaging_objects = env.SharedObject([
'messaging/messaging.cc',
'messaging/impl_zmq.cc',
'messaging/impl_msgq.cc',
'messaging/msgq.cc',
'messaging/socketmaster.cc',
])
messaging_lib = env.Library('messaging', messaging_objects)
Depends('messaging/impl_zmq.cc', services_h)
# note, this rebuilds the deps shared, zmq is statically linked to make APK happy
# TODO: get APK to load system zmq to remove the static link
if arch == "aarch64":
zmq_static = FindFile("libzmq.a", "/usr/lib")
shared_lib_shared_lib = [zmq_static, 'm', 'stdc++', "gnustl_shared", "kj", "capnp"]
env.SharedLibrary('messaging_shared', messaging_objects, LIBS=shared_lib_shared_lib)
env.Program('messaging/bridge', ['messaging/bridge.cc'], LIBS=[messaging_lib, 'zmq'])
Depends('messaging/bridge.cc', services_h)
# different target?
#env.Program('messaging/demo', ['messaging/demo.cc'], LIBS=[messaging_lib, 'zmq'])
env.Command(['messaging/messaging_pyx.so', 'messaging/messaging_pyx.cpp'],
cython_dependencies + [messaging_lib, 'messaging/messaging_pyx_setup.py', 'messaging/messaging_pyx.pyx', 'messaging/messaging.pxd'],
"cd " + messaging_dir.path + " && python3 messaging_pyx_setup.py build_ext --inplace")
if GetOption('test'):
env.Program('messaging/test_runner', ['messaging/test_runner.cc', 'messaging/msgq_tests.cc'], LIBS=[messaging_lib])

View File

@ -1,9 +0,0 @@
# pylint: skip-file
import os
import capnp
CEREAL_PATH = os.path.dirname(os.path.abspath(__file__))
capnp.remove_import_hook()
log = capnp.load(os.path.join(CEREAL_PATH, "log.capnp"))
car = capnp.load(os.path.join(CEREAL_PATH, "car.capnp"))

View File

@ -1,534 +0,0 @@
using Cxx = import "./include/c++.capnp";
$Cxx.namespace("cereal");
using Java = import "./include/java.capnp";
$Java.package("ai.comma.openpilot.cereal");
$Java.outerClassname("Car");
@0x8e2af1e708af8b8d;
# ******* events causing controls state machine transition *******
struct CarEvent @0x9b1657f34caf3ad3 {
name @0 :EventName;
enable @1 :Bool;
noEntry @2 :Bool;
warning @3 :Bool; # alerts presented only when enabled or soft disabling
userDisable @4 :Bool;
softDisable @5 :Bool;
immediateDisable @6 :Bool;
preEnable @7 :Bool;
permanent @8 :Bool; # alerts presented regardless of openpilot state
enum EventName @0xbaa8c5d505f727de {
# TODO: copy from error list
canError @0;
steerUnavailable @1;
brakeUnavailable @2;
gasUnavailable @3;
wrongGear @4;
doorOpen @5;
seatbeltNotLatched @6;
espDisabled @7;
wrongCarMode @8;
steerTempUnavailable @9;
reverseGear @10;
buttonCancel @11;
buttonEnable @12;
pedalPressed @13;
cruiseDisabled @14;
radarCanError @15;
dataNeededDEPRECATED @16;
speedTooLow @17;
outOfSpace @18;
overheat @19;
calibrationIncomplete @20;
calibrationInvalid @21;
controlsMismatch @22;
pcmEnable @23;
pcmDisable @24;
noTarget @25;
radarFault @26;
modelCommIssueDEPRECATED @27;
brakeHold @28;
parkBrake @29;
manualRestart @30;
lowSpeedLockout @31;
plannerError @32;
ipasOverrideDEPRECATED @33;
debugAlert @34;
steerTempUnavailableMute @35;
resumeRequired @36;
preDriverDistracted @37;
promptDriverDistracted @38;
driverDistracted @39;
geofenceDEPRECATED @40;
driverMonitorOnDEPRECATED @41;
driverMonitorOffDEPRECATED @42;
preDriverUnresponsive @43;
promptDriverUnresponsive @44;
driverUnresponsive @45;
belowSteerSpeed @46;
calibrationProgressDEPRECATED @47;
lowBattery @48;
invalidGiraffeHondaDEPRECATED @49;
vehicleModelInvalid @50;
controlsFailed @51;
sensorDataInvalid @52;
commIssue @53;
tooDistracted @54;
posenetInvalid @55;
soundsUnavailable @56;
preLaneChangeLeft @57;
preLaneChangeRight @58;
laneChange @59;
invalidGiraffeToyota @60;
internetConnectivityNeeded @61;
communityFeatureDisallowed @62;
lowMemory @63;
stockAeb @64;
ldw @65;
carUnrecognized @66;
radarCommIssue @67;
driverMonitorLowAcc @68;
invalidLkasSetting @69;
speedTooHigh @70;
laneChangeBlocked @71;
relayMalfunction @72;
gasPressed @73;
stockFcw @74;
startup @75;
startupNoCar @76;
startupNoControl @77;
startupMaster @78;
fcw @79;
steerSaturated @80;
whitePandaUnsupported @81;
startupWhitePanda @82;
canErrorPersistentDEPRECATED @83;
belowEngageSpeed @84;
noGps @85;
focusRecoverActive @86;
wrongCruiseMode @87;
neosUpdateRequired @88;
modeldLagging @89;
deviceFalling @90;
}
}
# ******* main car state @ 100hz *******
# all speeds in m/s
struct CarState {
errorsDEPRECATED @0 :List(CarEvent.EventName);
events @13 :List(CarEvent);
# car speed
vEgo @1 :Float32; # best estimate of speed
aEgo @16 :Float32; # best estimate of acceleration
vEgoRaw @17 :Float32; # unfiltered speed from CAN sensors
yawRate @22 :Float32; # best estimate of yaw rate
standstill @18 :Bool;
wheelSpeeds @2 :WheelSpeeds;
# gas pedal, 0.0-1.0
gas @3 :Float32; # this is user + computer
gasPressed @4 :Bool; # this is user pedal only
# brake pedal, 0.0-1.0
brake @5 :Float32; # this is user pedal only
brakePressed @6 :Bool; # this is user pedal only
brakeLights @19 :Bool;
# steering wheel
steeringAngle @7 :Float32; # deg
steeringRate @15 :Float32; # deg/s
steeringTorque @8 :Float32; # TODO: standardize units
steeringTorqueEps @27 :Float32; # TODO: standardize units
steeringPressed @9 :Bool; # if the user is using the steering wheel
steeringRateLimited @29 :Bool; # if the torque is limited by the rate limiter
steerWarning @35 :Bool; # temporary steer unavailble
steerError @36 :Bool; # permanent steer error
stockAeb @30 :Bool;
stockFcw @31 :Bool;
espDisabled @32 :Bool;
# cruise state
cruiseState @10 :CruiseState;
# gear
gearShifter @14 :GearShifter;
# button presses
buttonEvents @11 :List(ButtonEvent);
leftBlinker @20 :Bool;
rightBlinker @21 :Bool;
genericToggle @23 :Bool;
# lock info
doorOpen @24 :Bool;
seatbeltUnlatched @25 :Bool;
canValid @26 :Bool;
# clutch (manual transmission only)
clutchPressed @28 :Bool;
# which packets this state came from
canMonoTimes @12: List(UInt64);
# blindspot sensors
leftBlindspot @33 :Bool; # Is there something blocking the left lane change
rightBlindspot @34 :Bool; # Is there something blocking the right lane change
struct WheelSpeeds {
# optional wheel speeds
fl @0 :Float32;
fr @1 :Float32;
rl @2 :Float32;
rr @3 :Float32;
}
struct CruiseState {
enabled @0 :Bool;
speed @1 :Float32;
available @2 :Bool;
speedOffset @3 :Float32;
standstill @4 :Bool;
nonAdaptive @5 :Bool;
}
enum GearShifter {
unknown @0;
park @1;
drive @2;
neutral @3;
reverse @4;
sport @5;
low @6;
brake @7;
eco @8;
manumatic @9;
}
# send on change
struct ButtonEvent {
pressed @0 :Bool;
type @1 :Type;
enum Type {
unknown @0;
leftBlinker @1;
rightBlinker @2;
accelCruise @3;
decelCruise @4;
cancel @5;
altButton1 @6;
altButton2 @7;
altButton3 @8;
setCruise @9;
resumeCruise @10;
gapAdjustCruise @11;
}
}
}
# ******* radar state @ 20hz *******
struct RadarData @0x888ad6581cf0aacb {
errors @0 :List(Error);
points @1 :List(RadarPoint);
# which packets this state came from
canMonoTimes @2 :List(UInt64);
enum Error {
canError @0;
fault @1;
wrongConfig @2;
}
# similar to LiveTracks
# is one timestamp valid for all? I think so
struct RadarPoint {
trackId @0 :UInt64; # no trackId reuse
# these 3 are the minimum required
dRel @1 :Float32; # m from the front bumper of the car
yRel @2 :Float32; # m
vRel @3 :Float32; # m/s
# these are optional and valid if they are not NaN
aRel @4 :Float32; # m/s^2
yvRel @5 :Float32; # m/s
# some radars flag measurements VS estimates
measured @6 :Bool;
}
}
# ******* car controls @ 100hz *******
struct CarControl {
# must be true for any actuator commands to work
enabled @0 :Bool;
active @7 :Bool;
gasDEPRECATED @1 :Float32;
brakeDEPRECATED @2 :Float32;
steeringTorqueDEPRECATED @3 :Float32;
actuators @6 :Actuators;
cruiseControl @4 :CruiseControl;
hudControl @5 :HUDControl;
struct Actuators {
# range from 0.0 - 1.0
gas @0: Float32;
brake @1: Float32;
# range from -1.0 - 1.0
steer @2: Float32;
steerAngle @3: Float32;
}
struct CruiseControl {
cancel @0: Bool;
override @1: Bool;
speedOverride @2: Float32;
accelOverride @3: Float32;
}
struct HUDControl {
speedVisible @0: Bool;
setSpeed @1: Float32;
lanesVisible @2: Bool;
leadVisible @3: Bool;
visualAlert @4: VisualAlert;
audibleAlert @5: AudibleAlert;
rightLaneVisible @6: Bool;
leftLaneVisible @7: Bool;
rightLaneDepart @8: Bool;
leftLaneDepart @9: Bool;
enum VisualAlert {
# these are the choices from the Honda
# map as good as you can for your car
none @0;
fcw @1;
steerRequired @2;
brakePressed @3;
wrongGear @4;
seatbeltUnbuckled @5;
speedTooHigh @6;
ldw @7;
}
enum AudibleAlert {
# these are the choices from the Honda
# map as good as you can for your car
none @0;
chimeEngage @1;
chimeDisengage @2;
chimeError @3;
chimeWarning1 @4;
chimeWarning2 @5;
chimeWarningRepeat @6;
chimePrompt @7;
chimeWarning2Repeat @8;
}
}
}
# ****** car param ******
struct CarParams {
carName @0 :Text;
carFingerprint @1 :Text;
enableGasInterceptor @2 :Bool;
enableCruise @3 :Bool;
enableCamera @4 :Bool;
enableDsu @5 :Bool; # driving support unit
enableApgs @6 :Bool; # advanced parking guidance system
minEnableSpeed @7 :Float32;
minSteerSpeed @8 :Float32;
safetyModel @9 :SafetyModel;
safetyModelPassive @42 :SafetyModel = silent;
safetyParam @10 :Int16;
steerMaxBP @11 :List(Float32);
steerMaxV @12 :List(Float32);
gasMaxBP @13 :List(Float32);
gasMaxV @14 :List(Float32);
brakeMaxBP @15 :List(Float32);
brakeMaxV @16 :List(Float32);
# things about the car in the manual
mass @17 :Float32; # [kg] running weight
wheelbase @18 :Float32; # [m] distance from rear to front axle
centerToFront @19 :Float32; # [m] GC distance to front axle
steerRatio @20 :Float32; # [] ratio between front wheels and steering wheel angles
steerRatioRear @21 :Float32; # [] rear steering ratio wrt front steering (usually 0)
# things we can derive
rotationalInertia @22 :Float32; # [kg*m2] body rotational inertia
tireStiffnessFront @23 :Float32; # [N/rad] front tire coeff of stiff
tireStiffnessRear @24 :Float32; # [N/rad] rear tire coeff of stiff
longitudinalTuning @25 :LongitudinalPIDTuning;
lateralParams @48 :LateralParams;
lateralTuning :union {
pid @26 :LateralPIDTuning;
indi @27 :LateralINDITuning;
lqr @40 :LateralLQRTuning;
}
steerLimitAlert @28 :Bool;
steerLimitTimer @47 :Float32; # time before steerLimitAlert is issued
vEgoStopping @29 :Float32; # Speed at which the car goes into stopping state
directAccelControl @30 :Bool; # Does the car have direct accel control or just gas/brake
stoppingControl @31 :Bool; # Does the car allows full control even at lows speeds when stopping
startAccel @32 :Float32; # Required acceleraton to overcome creep braking
steerRateCost @33 :Float32; # Lateral MPC cost on steering rate
steerControlType @34 :SteerControlType;
radarOffCan @35 :Bool; # True when radar objects aren't visible on CAN
steerActuatorDelay @36 :Float32; # Steering wheel actuator delay in seconds
openpilotLongitudinalControl @37 :Bool; # is openpilot doing the longitudinal control?
carVin @38 :Text; # VIN number queried during fingerprinting
isPandaBlack @39: Bool;
dashcamOnly @41: Bool;
transmissionType @43 :TransmissionType;
carFw @44 :List(CarFw);
radarTimeStep @45: Float32 = 0.05; # time delta between radar updates, 20Hz is very standard
communityFeature @46: Bool; # true if a community maintained feature is detected
fingerprintSource @49: FingerprintSource;
networkLocation @50 :NetworkLocation; # Where Panda/C2 is integrated into the car's CAN network
struct LateralParams {
torqueBP @0 :List(Int32);
torqueV @1 :List(Int32);
}
struct LateralPIDTuning {
kpBP @0 :List(Float32);
kpV @1 :List(Float32);
kiBP @2 :List(Float32);
kiV @3 :List(Float32);
kf @4 :Float32;
}
struct LongitudinalPIDTuning {
kpBP @0 :List(Float32);
kpV @1 :List(Float32);
kiBP @2 :List(Float32);
kiV @3 :List(Float32);
deadzoneBP @4 :List(Float32);
deadzoneV @5 :List(Float32);
}
struct LateralINDITuning {
outerLoopGain @0 :Float32;
innerLoopGain @1 :Float32;
timeConstant @2 :Float32;
actuatorEffectiveness @3 :Float32;
}
struct LateralLQRTuning {
scale @0 :Float32;
ki @1 :Float32;
dcGain @2 :Float32;
# State space system
a @3 :List(Float32);
b @4 :List(Float32);
c @5 :List(Float32);
k @6 :List(Float32); # LQR gain
l @7 :List(Float32); # Kalman gain
}
enum SafetyModel {
silent @0;
hondaNidec @1;
toyota @2;
elm327 @3;
gm @4;
hondaBoschGiraffe @5;
ford @6;
cadillac @7;
hyundai @8;
chrysler @9;
tesla @10;
subaru @11;
gmPassive @12;
mazda @13;
nissan @14;
volkswagen @15;
toyotaIpas @16;
allOutput @17;
gmAscm @18;
noOutput @19; # like silent but without silent CAN TXs
hondaBoschHarness @20;
volkswagenPq @21;
subaruLegacy @22; # pre-Global platform
hyundaiLegacy @23;
hyundaiCommunity @24;
}
enum SteerControlType {
torque @0;
angle @1;
}
enum TransmissionType {
unknown @0;
automatic @1; # Traditional auto, including DSG
manual @2; # True "stick shift" only
direct @3; # Electric vehicle or other direct drive
}
struct CarFw {
ecu @0 :Ecu;
fwVersion @1 :Data;
address @2: UInt32;
subAddress @3: UInt8;
}
enum Ecu {
eps @0;
esp @1;
fwdRadar @2;
fwdCamera @3;
engine @4;
unknown @5;
transmission @8; # Transmission Control Module
srs @9; # airbag
gateway @10; # can gateway
hud @11; # heads up display
combinationMeter @12; # instrument cluster
# Toyota only
dsu @6;
apgs @7;
# Honda only
vsa @13; # Vehicle Stability Assist
programmedFuelInjection @14;
electricBrakeBooster @15;
shiftByWire @16;
}
enum FingerprintSource {
can @0;
fw @1;
fixed @2;
}
enum NetworkLocation {
fwdCamera @0; # Standard/default integration at LKAS camera
gateway @1; # Integration at vehicle's CAN gateway
}
}

View File

@ -1,26 +0,0 @@
# Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
# Licensed under the MIT License:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
@0xbdf87d7bb8304e81;
$namespace("capnp::annotations");
annotation namespace(file): Text;
annotation name(field, enumerant, struct, enum, interface, method, param, group, union): Text;

View File

@ -1,28 +0,0 @@
# Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
# Licensed under the MIT License:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
@0xc5f1af96651f70ea;
annotation package @0x9ee4c8f803b3b596 (file) : Text;
# Name of the package, such as "org.example.foo", in which the generated code will reside.
annotation outerClassname @0x9b066bb4881f7cd3 (file) : Text;
# Name of the outer class that will wrap the generated code.

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +0,0 @@
demo
bridge
test_runner
*.o
*.os
*.d
*.a
*.so
messaging_pyx.cpp
build/

View File

@ -1,218 +0,0 @@
# must be build with scons
from .messaging_pyx import Context, Poller, SubSocket, PubSocket # pylint: disable=no-name-in-module, import-error
from .messaging_pyx import MultiplePublishersError, MessagingError # pylint: disable=no-name-in-module, import-error
import capnp
from cereal import log
from cereal.services import service_list
assert MultiplePublishersError
assert MessagingError
# sec_since_boot is faster, but allow to run standalone too
try:
from common.realtime import sec_since_boot
except ImportError:
import time
sec_since_boot = time.time
print("Warning, using python time.time() instead of faster sec_since_boot")
context = Context()
def new_message(service=None, size=None):
dat = log.Event.new_message()
dat.logMonoTime = int(sec_since_boot() * 1e9)
dat.valid = True
if service is not None:
if size is None:
dat.init(service)
else:
dat.init(service, size)
return dat
def pub_sock(endpoint):
sock = PubSocket()
sock.connect(context, endpoint)
return sock
def sub_sock(endpoint, poller=None, addr="127.0.0.1", conflate=False, timeout=None):
sock = SubSocket()
addr = addr.encode('utf8')
sock.connect(context, endpoint, addr, conflate)
if timeout is not None:
sock.setTimeout(timeout)
if poller is not None:
poller.registerSocket(sock)
return sock
def drain_sock_raw(sock, wait_for_one=False):
"""Receive all message currently available on the queue"""
ret = []
while 1:
if wait_for_one and len(ret) == 0:
dat = sock.receive()
else:
dat = sock.receive(non_blocking=True)
if dat is None:
break
ret.append(dat)
return ret
def drain_sock(sock, wait_for_one=False):
"""Receive all message currently available on the queue"""
ret = []
while 1:
if wait_for_one and len(ret) == 0:
dat = sock.receive()
else:
dat = sock.receive(non_blocking=True)
if dat is None: # Timeout hit
break
dat = log.Event.from_bytes(dat)
ret.append(dat)
return ret
# TODO: print when we drop packets?
def recv_sock(sock, wait=False):
"""Same as drain sock, but only returns latest message. Consider using conflate instead."""
dat = None
while 1:
if wait and dat is None:
rcv = sock.receive()
else:
rcv = sock.receive(non_blocking=True)
if rcv is None: # Timeout hit
break
dat = rcv
if dat is not None:
dat = log.Event.from_bytes(dat)
return dat
def recv_one(sock):
dat = sock.receive()
if dat is not None:
dat = log.Event.from_bytes(dat)
return dat
def recv_one_or_none(sock):
dat = sock.receive(non_blocking=True)
if dat is not None:
dat = log.Event.from_bytes(dat)
return dat
def recv_one_retry(sock):
"""Keep receiving until we get a message"""
while True:
dat = sock.receive()
if dat is not None:
return log.Event.from_bytes(dat)
class SubMaster():
def __init__(self, services, ignore_alive=None, addr="127.0.0.1"):
self.poller = Poller()
self.frame = -1
self.updated = {s: False for s in services}
self.rcv_time = {s: 0. for s in services}
self.rcv_frame = {s: 0 for s in services}
self.alive = {s: False for s in services}
self.sock = {}
self.freq = {}
self.data = {}
self.logMonoTime = {}
self.valid = {}
if ignore_alive is not None:
self.ignore_alive = ignore_alive
else:
self.ignore_alive = []
for s in services:
if addr is not None:
self.sock[s] = sub_sock(s, poller=self.poller, addr=addr, conflate=True)
self.freq[s] = service_list[s].frequency
try:
data = new_message(s)
except capnp.lib.capnp.KjException: # pylint: disable=c-extension-no-member
# lists
data = new_message(s, 0)
self.data[s] = getattr(data, s)
self.logMonoTime[s] = 0
self.valid[s] = data.valid
def __getitem__(self, s):
return self.data[s]
def update(self, timeout=1000):
msgs = []
for sock in self.poller.poll(timeout):
msgs.append(recv_one_or_none(sock))
self.update_msgs(sec_since_boot(), msgs)
def update_msgs(self, cur_time, msgs):
# TODO: add optional input that specify the service to wait for
self.frame += 1
self.updated = dict.fromkeys(self.updated, False)
for msg in msgs:
if msg is None:
continue
s = msg.which()
self.updated[s] = True
self.rcv_time[s] = cur_time
self.rcv_frame[s] = self.frame
self.data[s] = getattr(msg, s)
self.logMonoTime[s] = msg.logMonoTime
self.valid[s] = msg.valid
for s in self.data:
# arbitrary small number to avoid float comparison. If freq is 0, we can skip the check
if self.freq[s] > 1e-5:
# alive if delay is within 10x the expected frequency
self.alive[s] = (cur_time - self.rcv_time[s]) < (10. / self.freq[s])
else:
self.alive[s] = True
def all_alive(self, service_list=None):
if service_list is None: # check all
service_list = self.alive.keys()
return all(self.alive[s] for s in service_list if s not in self.ignore_alive)
def all_valid(self, service_list=None):
if service_list is None: # check all
service_list = self.valid.keys()
return all(self.valid[s] for s in service_list)
def all_alive_and_valid(self, service_list=None):
if service_list is None: # check all
service_list = self.alive.keys()
return self.all_alive(service_list=service_list) and self.all_valid(service_list=service_list)
class PubMaster():
def __init__(self, services):
self.sock = {}
for s in services:
self.sock[s] = pub_sock(s)
def send(self, s, dat):
# accept either bytes or capnp builder
if not isinstance(dat, bytes):
dat = dat.to_bytes()
self.sock[s].send(dat)

View File

@ -1,64 +0,0 @@
#include <iostream>
#include <string>
#include <cassert>
#include <csignal>
#include <map>
typedef void (*sighandler_t)(int sig);
#include "services.h"
#include "impl_msgq.hpp"
#include "impl_zmq.hpp"
void sigpipe_handler(int sig) {
assert(sig == SIGPIPE);
std::cout << "SIGPIPE received" << std::endl;
}
static std::vector<std::string> get_services() {
std::vector<std::string> name_list;
for (const auto& it : services) {
std::string name = it.name;
if (name == "plusFrame" || name == "uiLayoutState") continue;
name_list.push_back(name);
}
return name_list;
}
int main(void){
signal(SIGPIPE, (sighandler_t)sigpipe_handler);
auto endpoints = get_services();
std::map<SubSocket*, PubSocket*> sub2pub;
Context *zmq_context = new ZMQContext();
Context *msgq_context = new MSGQContext();
Poller *poller = new MSGQPoller();
for (auto endpoint: endpoints){
SubSocket * msgq_sock = new MSGQSubSocket();
msgq_sock->connect(msgq_context, endpoint, "127.0.0.1", false);
poller->registerSocket(msgq_sock);
PubSocket * zmq_sock = new ZMQPubSocket();
zmq_sock->connect(zmq_context, endpoint);
sub2pub[msgq_sock] = zmq_sock;
}
while (true){
for (auto sub_sock : poller->poll(100)){
Message * msg = sub_sock->receive();
if (msg == NULL) continue;
sub2pub[sub_sock]->sendMessage(msg);
delete msg;
}
}
return 0;
}

View File

@ -1,199 +0,0 @@
#include <cassert>
#include <cstring>
#include <iostream>
#include <cstdlib>
#include <csignal>
#include <cerrno>
#include "impl_msgq.hpp"
volatile sig_atomic_t msgq_do_exit = 0;
void sig_handler(int signal) {
assert(signal == SIGINT || signal == SIGTERM);
msgq_do_exit = 1;
}
MSGQContext::MSGQContext() {
}
MSGQContext::~MSGQContext() {
}
void MSGQMessage::init(size_t sz) {
size = sz;
data = new char[size];
}
void MSGQMessage::init(char * d, size_t sz) {
size = sz;
data = new char[size];
memcpy(data, d, size);
}
void MSGQMessage::takeOwnership(char * d, size_t sz) {
size = sz;
data = d;
}
void MSGQMessage::close() {
if (size > 0){
delete[] data;
}
size = 0;
}
MSGQMessage::~MSGQMessage() {
this->close();
}
int MSGQSubSocket::connect(Context *context, std::string endpoint, std::string address, bool conflate){
assert(context);
assert(address == "127.0.0.1");
q = new msgq_queue_t;
int r = msgq_new_queue(q, endpoint.c_str(), DEFAULT_SEGMENT_SIZE);
if (r != 0){
return r;
}
msgq_init_subscriber(q);
if (conflate){
q->read_conflate = true;
}
timeout = -1;
return 0;
}
Message * MSGQSubSocket::receive(bool non_blocking){
msgq_do_exit = 0;
void (*prev_handler_sigint)(int);
void (*prev_handler_sigterm)(int);
if (!non_blocking){
prev_handler_sigint = std::signal(SIGINT, sig_handler);
prev_handler_sigterm = std::signal(SIGTERM, sig_handler);
}
msgq_msg_t msg;
MSGQMessage *r = NULL;
int rc = msgq_msg_recv(&msg, q);
// Hack to implement blocking read with a poller. Don't use this
while (!non_blocking && rc == 0 && msgq_do_exit == 0){
msgq_pollitem_t items[1];
items[0].q = q;
int t = (timeout != -1) ? timeout : 100;
int n = msgq_poll(items, 1, t);
rc = msgq_msg_recv(&msg, q);
// The poll indicated a message was ready, but the receive failed. Try again
if (n == 1 && rc == 0){
continue;
}
if (timeout != -1){
break;
}
}
if (!non_blocking){
std::signal(SIGINT, prev_handler_sigint);
std::signal(SIGTERM, prev_handler_sigterm);
}
errno = msgq_do_exit ? EINTR : 0;
if (rc > 0){
if (msgq_do_exit){
msgq_msg_close(&msg); // Free unused message on exit
} else {
r = new MSGQMessage;
r->takeOwnership(msg.data, msg.size);
}
}
return (Message*)r;
}
void MSGQSubSocket::setTimeout(int t){
timeout = t;
}
MSGQSubSocket::~MSGQSubSocket(){
if (q != NULL){
msgq_close_queue(q);
delete q;
}
}
int MSGQPubSocket::connect(Context *context, std::string endpoint){
assert(context);
q = new msgq_queue_t;
int r = msgq_new_queue(q, endpoint.c_str(), DEFAULT_SEGMENT_SIZE);
if (r != 0){
return r;
}
msgq_init_publisher(q);
return 0;
}
int MSGQPubSocket::sendMessage(Message *message){
msgq_msg_t msg;
msg.data = message->getData();
msg.size = message->getSize();
return msgq_msg_send(&msg, q);
}
int MSGQPubSocket::send(char *data, size_t size){
msgq_msg_t msg;
msg.data = data;
msg.size = size;
return msgq_msg_send(&msg, q);
}
MSGQPubSocket::~MSGQPubSocket(){
if (q != NULL){
msgq_close_queue(q);
delete q;
}
}
void MSGQPoller::registerSocket(SubSocket * socket){
assert(num_polls + 1 < MAX_POLLERS);
polls[num_polls].q = (msgq_queue_t*)socket->getRawSocket();
sockets.push_back(socket);
num_polls++;
}
std::vector<SubSocket*> MSGQPoller::poll(int timeout){
std::vector<SubSocket*> r;
msgq_poll(polls, num_polls, timeout);
for (size_t i = 0; i < num_polls; i++){
if (polls[i].revents){
r.push_back(sockets[i]);
}
}
return r;
}

View File

@ -1,64 +0,0 @@
#pragma once
#include "messaging.hpp"
#include "msgq.hpp"
#include <zmq.h>
#include <string>
#define MAX_POLLERS 128
class MSGQContext : public Context {
private:
void * context = NULL;
public:
MSGQContext();
void * getRawContext() {return context;}
~MSGQContext();
};
class MSGQMessage : public Message {
private:
char * data;
size_t size;
public:
void init(size_t size);
void init(char *data, size_t size);
void takeOwnership(char *data, size_t size);
size_t getSize(){return size;}
char * getData(){return data;}
void close();
~MSGQMessage();
};
class MSGQSubSocket : public SubSocket {
private:
msgq_queue_t * q = NULL;
int timeout;
public:
int connect(Context *context, std::string endpoint, std::string address, bool conflate=false);
void setTimeout(int timeout);
void * getRawSocket() {return (void*)q;}
Message *receive(bool non_blocking=false);
~MSGQSubSocket();
};
class MSGQPubSocket : public PubSocket {
private:
msgq_queue_t * q = NULL;
public:
int connect(Context *context, std::string endpoint);
int sendMessage(Message *message);
int send(char *data, size_t size);
~MSGQPubSocket();
};
class MSGQPoller : public Poller {
private:
std::vector<SubSocket*> sockets;
msgq_pollitem_t polls[MAX_POLLERS];
size_t num_polls = 0;
public:
void registerSocket(SubSocket *socket);
std::vector<SubSocket*> poll(int timeout);
~MSGQPoller(){};
};

View File

@ -1,155 +0,0 @@
#include <cassert>
#include <cstring>
#include <iostream>
#include <cstdlib>
#include <cerrno>
#include <zmq.h>
#include "services.h"
#include "impl_zmq.hpp"
static int get_port(std::string endpoint) {
int port = -1;
for (const auto& it : services) {
std::string name = it.name;
if (name == endpoint) {
port = it.port;
break;
}
}
assert(port >= 0);
return port;
}
ZMQContext::ZMQContext() {
context = zmq_ctx_new();
}
ZMQContext::~ZMQContext() {
zmq_ctx_term(context);
}
void ZMQMessage::init(size_t sz) {
size = sz;
data = new char[size];
}
void ZMQMessage::init(char * d, size_t sz) {
size = sz;
data = new char[size];
memcpy(data, d, size);
}
void ZMQMessage::close() {
if (size > 0){
delete[] data;
}
size = 0;
}
ZMQMessage::~ZMQMessage() {
this->close();
}
int ZMQSubSocket::connect(Context *context, std::string endpoint, std::string address, bool conflate){
sock = zmq_socket(context->getRawContext(), ZMQ_SUB);
if (sock == NULL){
return -1;
}
zmq_setsockopt(sock, ZMQ_SUBSCRIBE, "", 0);
if (conflate){
int arg = 1;
zmq_setsockopt(sock, ZMQ_CONFLATE, &arg, sizeof(int));
}
int reconnect_ivl = 500;
zmq_setsockopt(sock, ZMQ_RECONNECT_IVL_MAX, &reconnect_ivl, sizeof(reconnect_ivl));
full_endpoint = "tcp://" + address + ":";
full_endpoint += std::to_string(get_port(endpoint));
return zmq_connect(sock, full_endpoint.c_str());
}
Message * ZMQSubSocket::receive(bool non_blocking){
zmq_msg_t msg;
assert(zmq_msg_init(&msg) == 0);
int flags = non_blocking ? ZMQ_DONTWAIT : 0;
int rc = zmq_msg_recv(&msg, sock, flags);
Message *r = NULL;
if (rc >= 0){
// Make a copy to ensure the data is aligned
r = new ZMQMessage;
r->init((char*)zmq_msg_data(&msg), zmq_msg_size(&msg));
}
zmq_msg_close(&msg);
return r;
}
void ZMQSubSocket::setTimeout(int timeout){
zmq_setsockopt(sock, ZMQ_RCVTIMEO, &timeout, sizeof(int));
}
ZMQSubSocket::~ZMQSubSocket(){
zmq_close(sock);
}
int ZMQPubSocket::connect(Context *context, std::string endpoint){
sock = zmq_socket(context->getRawContext(), ZMQ_PUB);
if (sock == NULL){
return -1;
}
full_endpoint = "tcp://*:";
full_endpoint += std::to_string(get_port(endpoint));
return zmq_bind(sock, full_endpoint.c_str());
}
int ZMQPubSocket::sendMessage(Message *message){
return zmq_send(sock, message->getData(), message->getSize(), ZMQ_DONTWAIT);
}
int ZMQPubSocket::send(char *data, size_t size){
return zmq_send(sock, data, size, ZMQ_DONTWAIT);
}
ZMQPubSocket::~ZMQPubSocket(){
zmq_close(sock);
}
void ZMQPoller::registerSocket(SubSocket * socket){
assert(num_polls + 1 < MAX_POLLERS);
polls[num_polls].socket = socket->getRawSocket();
polls[num_polls].events = ZMQ_POLLIN;
sockets.push_back(socket);
num_polls++;
}
std::vector<SubSocket*> ZMQPoller::poll(int timeout){
std::vector<SubSocket*> r;
int rc = zmq_poll(polls, num_polls, timeout);
if (rc < 0){
return r;
}
for (size_t i = 0; i < num_polls; i++){
if (polls[i].revents){
r.push_back(sockets[i]);
}
}
return r;
}

View File

@ -1,63 +0,0 @@
#pragma once
#include "messaging.hpp"
#include <zmq.h>
#include <string>
#define MAX_POLLERS 128
class ZMQContext : public Context {
private:
void * context = NULL;
public:
ZMQContext();
void * getRawContext() {return context;}
~ZMQContext();
};
class ZMQMessage : public Message {
private:
char * data;
size_t size;
public:
void init(size_t size);
void init(char *data, size_t size);
size_t getSize(){return size;}
char * getData(){return data;}
void close();
~ZMQMessage();
};
class ZMQSubSocket : public SubSocket {
private:
void * sock;
std::string full_endpoint;
public:
int connect(Context *context, std::string endpoint, std::string address, bool conflate=false);
void setTimeout(int timeout);
void * getRawSocket() {return sock;}
Message *receive(bool non_blocking=false);
~ZMQSubSocket();
};
class ZMQPubSocket : public PubSocket {
private:
void * sock;
std::string full_endpoint;
public:
int connect(Context *context, std::string endpoint);
int sendMessage(Message *message);
int send(char *data, size_t size);
~ZMQPubSocket();
};
class ZMQPoller : public Poller {
private:
std::vector<SubSocket*> sockets;
zmq_pollitem_t polls[MAX_POLLERS];
size_t num_polls = 0;
public:
void registerSocket(SubSocket *socket);
std::vector<SubSocket*> poll(int timeout);
~ZMQPoller(){};
};

View File

@ -1,123 +0,0 @@
#include "messaging.hpp"
#include "impl_zmq.hpp"
#include "impl_msgq.hpp"
#ifdef __APPLE__
const bool MUST_USE_ZMQ = true;
#else
const bool MUST_USE_ZMQ = false;
#endif
Context * Context::create(){
Context * c;
if (std::getenv("ZMQ") || MUST_USE_ZMQ){
c = new ZMQContext();
} else {
c = new MSGQContext();
}
return c;
}
SubSocket * SubSocket::create(){
SubSocket * s;
if (std::getenv("ZMQ") || MUST_USE_ZMQ){
s = new ZMQSubSocket();
} else {
s = new MSGQSubSocket();
}
return s;
}
SubSocket * SubSocket::create(Context * context, std::string endpoint){
SubSocket *s = SubSocket::create();
int r = s->connect(context, endpoint, "127.0.0.1");
if (r == 0) {
return s;
} else {
delete s;
return NULL;
}
}
SubSocket * SubSocket::create(Context * context, std::string endpoint, std::string address){
SubSocket *s = SubSocket::create();
int r = s->connect(context, endpoint, address);
if (r == 0) {
return s;
} else {
delete s;
return NULL;
}
}
SubSocket * SubSocket::create(Context * context, std::string endpoint, std::string address, bool conflate){
SubSocket *s = SubSocket::create();
int r = s->connect(context, endpoint, address, conflate);
if (r == 0) {
return s;
} else {
delete s;
return NULL;
}
}
PubSocket * PubSocket::create(){
PubSocket * s;
if (std::getenv("ZMQ") || MUST_USE_ZMQ){
s = new ZMQPubSocket();
} else {
s = new MSGQPubSocket();
}
return s;
}
PubSocket * PubSocket::create(Context * context, std::string endpoint){
PubSocket *s = PubSocket::create();
int r = s->connect(context, endpoint);
if (r == 0) {
return s;
} else {
delete s;
return NULL;
}
}
Poller * Poller::create(){
Poller * p;
if (std::getenv("ZMQ") || MUST_USE_ZMQ){
p = new ZMQPoller();
} else {
p = new MSGQPoller();
}
return p;
}
Poller * Poller::create(std::vector<SubSocket*> sockets){
Poller * p = Poller::create();
for (auto s : sockets){
p->registerSocket(s);
}
return p;
}
extern "C" Context * messaging_context_create() {
return Context::create();
}
extern "C" SubSocket * messaging_subsocket_create(Context* context, const char* endpoint) {
return SubSocket::create(context, std::string(endpoint));
}
extern "C" PubSocket * messaging_pubsocket_create(Context* context, const char* endpoint) {
return PubSocket::create(context, std::string(endpoint));
}
extern "C" Poller * messaging_poller_create(SubSocket** sockets, int size) {
std::vector<SubSocket*> socketsVec(sockets, sockets + size);
return Poller::create(socketsVec);
}

View File

@ -1,94 +0,0 @@
#pragma once
#include <cstddef>
#include <map>
#include <string>
#include <vector>
#include <capnp/serialize.h>
#include "../gen/cpp/log.capnp.h"
#define MSG_MULTIPLE_PUBLISHERS 100
class Context {
public:
virtual void * getRawContext() = 0;
static Context * create();
virtual ~Context(){};
};
class Message {
public:
virtual void init(size_t size) = 0;
virtual void init(char * data, size_t size) = 0;
virtual void close() = 0;
virtual size_t getSize() = 0;
virtual char * getData() = 0;
virtual ~Message(){};
};
class SubSocket {
public:
virtual int connect(Context *context, std::string endpoint, std::string address, bool conflate=false) = 0;
virtual void setTimeout(int timeout) = 0;
virtual Message *receive(bool non_blocking=false) = 0;
virtual void * getRawSocket() = 0;
static SubSocket * create();
static SubSocket * create(Context * context, std::string endpoint);
static SubSocket * create(Context * context, std::string endpoint, std::string address);
static SubSocket * create(Context * context, std::string endpoint, std::string address, bool conflate);
virtual ~SubSocket(){};
};
class PubSocket {
public:
virtual int connect(Context *context, std::string endpoint) = 0;
virtual int sendMessage(Message *message) = 0;
virtual int send(char *data, size_t size) = 0;
static PubSocket * create();
static PubSocket * create(Context * context, std::string endpoint);
virtual ~PubSocket(){};
};
class Poller {
public:
virtual void registerSocket(SubSocket *socket) = 0;
virtual std::vector<SubSocket*> poll(int timeout) = 0;
static Poller * create();
static Poller * create(std::vector<SubSocket*> sockets);
virtual ~Poller(){};
};
class SubMaster {
public:
SubMaster(const std::initializer_list<const char *> &service_list,
const char *address = nullptr, const std::initializer_list<const char *> &ignore_alive = {});
int update(int timeout = 1000);
inline bool allAlive(const std::initializer_list<const char *> &service_list = {}) { return all_(service_list, false, true); }
inline bool allValid(const std::initializer_list<const char *> &service_list = {}) { return all_(service_list, true, false); }
inline bool allAliveAndValid(const std::initializer_list<const char *> &service_list = {}) { return all_(service_list, true, true); }
void drain();
~SubMaster();
uint64_t frame = 0;
bool updated(const char *name) const;
uint64_t rcv_frame(const char *name) const;
cereal::Event::Reader &operator[](const char *name);
private:
bool all_(const std::initializer_list<const char *> &service_list, bool valid, bool alive);
Poller *poller_ = nullptr;
struct SubMessage;
std::map<SubSocket *, SubMessage *> messages_;
std::map<std::string, SubMessage *> services_;
};
class PubMaster {
public:
PubMaster(const std::initializer_list<const char *> &service_list);
inline int send(const char *name, capnp::byte *data, size_t size) { return sockets_.at(name)->send((char *)data, size); }
int send(const char *name, capnp::MessageBuilder &msg);
~PubMaster();
private:
std::map<std::string, PubSocket *> sockets_;
};

View File

@ -1,39 +0,0 @@
# distutils: language = c++
#cython: language_level=3
from libcpp.string cimport string
from libcpp.vector cimport vector
from libcpp cimport bool
cdef extern from "messaging.hpp":
cdef cppclass Context:
@staticmethod
Context * create()
cdef cppclass Message:
void init(size_t)
void init(char *, size_t)
void close()
size_t getSize()
char *getData()
cdef cppclass SubSocket:
@staticmethod
SubSocket * create()
int connect(Context *, string, string, bool)
Message * receive(bool)
void setTimeout(int)
cdef cppclass PubSocket:
@staticmethod
PubSocket * create()
int connect(Context *, string)
int sendMessage(Message *)
int send(char *, size_t)
cdef cppclass Poller:
@staticmethod
Poller * create()
void registerSocket(SubSocket *)
vector[SubSocket*] poll(int) nogil

View File

@ -1,151 +0,0 @@
# distutils: language = c++
# cython: c_string_encoding=ascii, language_level=3
import sys
from libcpp.string cimport string
from libcpp cimport bool
from libc cimport errno
from messaging cimport Context as cppContext
from messaging cimport SubSocket as cppSubSocket
from messaging cimport PubSocket as cppPubSocket
from messaging cimport Poller as cppPoller
from messaging cimport Message as cppMessage
class MessagingError(Exception):
pass
class MultiplePublishersError(MessagingError):
pass
cdef class Context:
cdef cppContext * context
def __cinit__(self):
self.context = cppContext.create()
def term(self):
del self.context
self.context = NULL
def __dealloc__(self):
pass
# Deleting the context will hang if sockets are still active
# TODO: Figure out a way to make sure the context is closed last
# del self.context
cdef class Poller:
cdef cppPoller * poller
cdef list sub_sockets
def __cinit__(self):
self.sub_sockets = []
self.poller = cppPoller.create()
def __dealloc__(self):
del self.poller
def registerSocket(self, SubSocket socket):
self.sub_sockets.append(socket)
self.poller.registerSocket(socket.socket)
def poll(self, timeout):
sockets = []
cdef int t = timeout
with nogil:
result = self.poller.poll(t)
for s in result:
socket = SubSocket()
socket.setPtr(s)
sockets.append(socket)
return sockets
cdef class SubSocket:
cdef cppSubSocket * socket
cdef bool is_owner
def __cinit__(self):
self.socket = cppSubSocket.create()
self.is_owner = True
if self.socket == NULL:
raise MessagingError
def __dealloc__(self):
if self.is_owner:
del self.socket
cdef setPtr(self, cppSubSocket * ptr):
if self.is_owner:
del self.socket
self.is_owner = False
self.socket = ptr
def connect(self, Context context, string endpoint, string address=b"127.0.0.1", bool conflate=False):
r = self.socket.connect(context.context, endpoint, address, conflate)
if r != 0:
if errno.errno == errno.EADDRINUSE:
raise MultiplePublishersError
else:
raise MessagingError
def setTimeout(self, int timeout):
self.socket.setTimeout(timeout)
def receive(self, bool non_blocking=False):
msg = self.socket.receive(non_blocking)
if msg == NULL:
# If a blocking read returns no message check errno if SIGINT was caught in the C++ code
if errno.errno == errno.EINTR:
print("SIGINT received, exiting")
sys.exit(1)
return None
else:
sz = msg.getSize()
m = msg.getData()[:sz]
del msg
return m
cdef class PubSocket:
cdef cppPubSocket * socket
def __cinit__(self):
self.socket = cppPubSocket.create()
if self.socket == NULL:
raise MessagingError
def __dealloc__(self):
del self.socket
def connect(self, Context context, string endpoint):
r = self.socket.connect(context.context, endpoint)
if r != 0:
if errno.errno == errno.EADDRINUSE:
raise MultiplePublishersError
else:
raise MessagingError
def send(self, string data):
length = len(data)
r = self.socket.send(<char*>data.c_str(), length)
if r != length:
if errno.errno == errno.EADDRINUSE:
raise MultiplePublishersError
else:
raise MessagingError

View File

@ -1,57 +0,0 @@
import os
import subprocess
import sysconfig
from distutils.core import Extension, setup # pylint: disable=import-error,no-name-in-module
from Cython.Build import cythonize
from Cython.Distutils import build_ext
def get_ext_filename_without_platform_suffix(filename):
name, ext = os.path.splitext(filename)
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix == ext:
return filename
ext_suffix = ext_suffix.replace(ext, '')
idx = name.find(ext_suffix)
if idx == -1:
return filename
else:
return name[:idx] + ext
class BuildExtWithoutPlatformSuffix(build_ext):
def get_ext_filename(self, ext_name):
filename = super().get_ext_filename(ext_name)
return get_ext_filename_without_platform_suffix(filename)
sourcefiles = ['messaging_pyx.pyx']
extra_compile_args = ["-std=c++14"]
libraries = ['zmq']
ARCH = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip() # pylint: disable=unexpected-keyword-arg
if ARCH == "aarch64" and os.path.isdir("/system"):
# android
extra_compile_args += ["-Wno-deprecated-register"]
libraries += ['gnustl_shared']
setup(name='messaging',
cmdclass={'build_ext': BuildExtWithoutPlatformSuffix},
ext_modules=cythonize(
Extension(
"messaging_pyx",
language="c++",
sources=sourcefiles,
extra_compile_args=extra_compile_args,
libraries=libraries,
extra_objects=[
os.path.join(os.path.dirname(os.path.realpath(__file__)), '../', 'libmessaging.a'),
]
),
nthreads=4,
),
)

View File

@ -1,457 +0,0 @@
#include <iostream>
#include <cassert>
#include <cerrno>
#include <cmath>
#include <cstring>
#include <cstdint>
#include <chrono>
#include <algorithm>
#include <cstdlib>
#include <csignal>
#include <random>
#include <poll.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/syscall.h>
#include <fcntl.h>
#include <unistd.h>
#include <stdio.h>
#include "msgq.hpp"
void sigusr2_handler(int signal) {
assert(signal == SIGUSR2);
}
uint64_t msgq_get_uid(void){
std::random_device rd("/dev/urandom");
std::uniform_int_distribution<uint64_t> distribution(0,std::numeric_limits<uint32_t>::max());
#ifdef __APPLE__
// TODO: this doesn't work
uint64_t uid = distribution(rd) << 32 | getpid();
#else
uint64_t uid = distribution(rd) << 32 | syscall(SYS_gettid);
#endif
return uid;
}
int msgq_msg_init_size(msgq_msg_t * msg, size_t size){
msg->size = size;
msg->data = new(std::nothrow) char[size];
return (msg->data == NULL) ? -1 : 0;
}
int msgq_msg_init_data(msgq_msg_t * msg, char * data, size_t size) {
int r = msgq_msg_init_size(msg, size);
if (r == 0)
memcpy(msg->data, data, size);
return r;
}
int msgq_msg_close(msgq_msg_t * msg){
if (msg->size > 0)
delete[] msg->data;
msg->size = 0;
return 0;
}
void msgq_reset_reader(msgq_queue_t * q){
int id = q->reader_id;
q->read_valids[id]->store(true);
q->read_pointers[id]->store(*q->write_pointer);
}
void msgq_wait_for_subscriber(msgq_queue_t *q){
while (*q->num_readers == 0){
;
}
return;
}
int msgq_new_queue(msgq_queue_t * q, const char * path, size_t size){
assert(size < 0xFFFFFFFF); // Buffer must be smaller than 2^32 bytes
std::signal(SIGUSR2, sigusr2_handler);
const char * prefix = "/dev/shm/";
char * full_path = new char[strlen(path) + strlen(prefix) + 1];
strcpy(full_path, prefix);
strcat(full_path, path);
auto fd = open(full_path, O_RDWR | O_CREAT, 0777);
if (fd < 0) {
std::cout << "Warning, could not open: " << full_path << std::endl;
delete[] full_path;
return -1;
}
delete[] full_path;
int rc = ftruncate(fd, size + sizeof(msgq_header_t));
if (rc < 0)
return -1;
char * mem = (char*)mmap(NULL, size + sizeof(msgq_header_t), PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
close(fd);
if (mem == NULL)
return -1;
q->mmap_p = mem;
msgq_header_t *header = (msgq_header_t *)mem;
// Setup pointers to header segment
q->num_readers = reinterpret_cast<std::atomic<uint64_t>*>(&header->num_readers);
q->write_pointer = reinterpret_cast<std::atomic<uint64_t>*>(&header->write_pointer);
q->write_uid = reinterpret_cast<std::atomic<uint64_t>*>(&header->write_uid);
for (size_t i = 0; i < NUM_READERS; i++){
q->read_pointers[i] = reinterpret_cast<std::atomic<uint64_t>*>(&header->read_pointers[i]);
q->read_valids[i] = reinterpret_cast<std::atomic<uint64_t>*>(&header->read_valids[i]);
q->read_uids[i] = reinterpret_cast<std::atomic<uint64_t>*>(&header->read_uids[i]);
}
q->data = mem + sizeof(msgq_header_t);
q->size = size;
q->reader_id = -1;
q->endpoint = path;
q->read_conflate = false;
return 0;
}
void msgq_close_queue(msgq_queue_t *q){
if (q->mmap_p != NULL){
munmap(q->mmap_p, q->size + sizeof(msgq_header_t));
}
}
void msgq_init_publisher(msgq_queue_t * q) {
//std::cout << "Starting publisher" << std::endl;
uint64_t uid = msgq_get_uid();
*q->write_uid = uid;
*q->num_readers = 0;
for (size_t i = 0; i < NUM_READERS; i++){
*q->read_valids[i] = false;
*q->read_uids[i] = 0;
}
q->write_uid_local = uid;
}
static void thread_signal(uint32_t tid) {
#ifndef SYS_tkill
// TODO: this won't work for multithreaded programs
kill(tid, SIGUSR2);
#else
syscall(SYS_tkill, tid, SIGUSR2);
#endif
}
void msgq_init_subscriber(msgq_queue_t * q) {
assert(q != NULL);
assert(q->num_readers != NULL);
uint64_t uid = msgq_get_uid();
// Get reader id
while (true){
uint64_t cur_num_readers = *q->num_readers;
uint64_t new_num_readers = cur_num_readers + 1;
// No more slots available. Reset all subscribers to kick out inactive ones
if (new_num_readers > NUM_READERS){
std::cout << "Warning, evicting all subscribers!" << std::endl;
*q->num_readers = 0;
for (size_t i = 0; i < NUM_READERS; i++){
*q->read_valids[i] = false;
uint64_t old_uid = *q->read_uids[i];
*q->read_uids[i] = 0;
// Wake up reader in case they are in a poll
thread_signal(old_uid & 0xFFFFFFFF);
}
continue;
}
// Use atomic compare and swap to handle race condition
// where two subscribers start at the same time
if (std::atomic_compare_exchange_strong(q->num_readers,
&cur_num_readers,
new_num_readers)){
q->reader_id = cur_num_readers;
q->read_uid_local = uid;
// We start with read_valid = false,
// on the first read the read pointer will be synchronized with the write pointer
*q->read_valids[cur_num_readers] = false;
*q->read_pointers[cur_num_readers] = 0;
*q->read_uids[cur_num_readers] = uid;
break;
}
}
//std::cout << "New subscriber id: " << q->reader_id << " uid: " << q->read_uid_local << " " << q->endpoint << std::endl;
msgq_reset_reader(q);
}
int msgq_msg_send(msgq_msg_t * msg, msgq_queue_t *q){
// Die if we are no longer the active publisher
if (q->write_uid_local != *q->write_uid){
std::cout << "Killing old publisher: " << q->endpoint << std::endl;
errno = EADDRINUSE;
return -1;
}
uint64_t total_msg_size = ALIGN(msg->size + sizeof(int64_t));
// We need to fit at least three messages in the queue,
// then we can always safely access the last message
assert(3 * total_msg_size <= q->size);
uint64_t num_readers = *q->num_readers;
uint32_t write_cycles, write_pointer;
UNPACK64(write_cycles, write_pointer, *q->write_pointer);
char *p = q->data + write_pointer; // add base offset
// Check remaining space
// Always leave space for a wraparound tag for the next message, including alignment
int64_t remaining_space = q->size - write_pointer - total_msg_size - sizeof(int64_t);
if (remaining_space <= 0){
// Write -1 size tag indicating wraparound
*(int64_t*)p = -1;
// Invalidate all readers that are beyond the write pointer
// TODO: should we handle the case where a new reader shows up while this is running?
for (uint64_t i = 0; i < num_readers; i++){
uint64_t read_pointer = *q->read_pointers[i];
uint64_t read_cycles = read_pointer >> 32;
read_pointer &= 0xFFFFFFFF;
if ((read_pointer > write_pointer) && (read_cycles != write_cycles)) {
*q->read_valids[i] = false;
}
}
// Update global and local copies of write pointer and write_cycles
write_pointer = 0;
write_cycles = write_cycles + 1;
PACK64(*q->write_pointer, write_cycles, write_pointer);
// Set actual pointer to the beginning of the data segment
p = q->data;
}
// Invalidate readers that are in the area that will be written
uint64_t start = write_pointer;
uint64_t end = ALIGN(start + sizeof(int64_t) + msg->size);
for (uint64_t i = 0; i < num_readers; i++){
uint32_t read_cycles, read_pointer;
UNPACK64(read_cycles, read_pointer, *q->read_pointers[i]);
if ((read_pointer >= start) && (read_pointer < end) && (read_cycles != write_cycles)) {
*q->read_valids[i] = false;
}
}
// Write size tag
std::atomic<int64_t> *size_p = reinterpret_cast<std::atomic<int64_t>*>(p);
*size_p = msg->size;
// Copy data
memcpy(p + sizeof(int64_t), msg->data, msg->size);
__sync_synchronize();
// Update write pointer
uint32_t new_ptr = ALIGN(write_pointer + msg->size + sizeof(int64_t));
PACK64(*q->write_pointer, write_cycles, new_ptr);
// Notify readers
for (uint64_t i = 0; i < num_readers; i++){
uint64_t reader_uid = *q->read_uids[i];
thread_signal(reader_uid & 0xFFFFFFFF);
}
return msg->size;
}
int msgq_msg_ready(msgq_queue_t * q){
start:
int id = q->reader_id;
assert(id >= 0); // Make sure subscriber is initialized
if (q->read_uid_local != *q->read_uids[id]){
std::cout << q->endpoint << ": Reader was evicted, reconnecting" << std::endl;
msgq_init_subscriber(q);
goto start;
}
// Check valid
if (!*q->read_valids[id]){
msgq_reset_reader(q);
goto start;
}
uint32_t read_cycles, read_pointer;
UNPACK64(read_cycles, read_pointer, *q->read_pointers[id]);
uint32_t write_cycles, write_pointer;
UNPACK64(write_cycles, write_pointer, *q->write_pointer);
// Check if new message is available
return (read_pointer != write_pointer);
}
int msgq_msg_recv(msgq_msg_t * msg, msgq_queue_t * q){
start:
int id = q->reader_id;
assert(id >= 0); // Make sure subscriber is initialized
if (q->read_uid_local != *q->read_uids[id]){
std::cout << q->endpoint << ": Reader was evicted, reconnecting" << std::endl;
msgq_init_subscriber(q);
goto start;
}
// Check valid
if (!*q->read_valids[id]){
msgq_reset_reader(q);
goto start;
}
uint32_t read_cycles, read_pointer;
UNPACK64(read_cycles, read_pointer, *q->read_pointers[id]);
uint32_t write_cycles, write_pointer;
UNPACK64(write_cycles, write_pointer, *q->write_pointer);
char * p = q->data + read_pointer;
// Check if new message is available
if (read_pointer == write_pointer) {
msg->size = 0;
return 0;
}
// Read potential message size
std::atomic<int64_t> *size_p = reinterpret_cast<std::atomic<int64_t>*>(p);
std::int64_t size = *size_p;
// Check if the size that was read is valid
if (!*q->read_valids[id]){
msgq_reset_reader(q);
goto start;
}
// If size is -1 the buffer was full, and we need to wrap around
if (size == -1){
read_cycles++;
PACK64(*q->read_pointers[id], read_cycles, 0);
goto start;
}
// crashing is better than passing garbage data to the consumer
// the size will have weird value if it was overwritten by data accidentally
assert((uint64_t)size < q->size);
assert(size > 0);
uint32_t new_read_pointer = ALIGN(read_pointer + sizeof(std::int64_t) + size);
// If conflate is true, check if this is the latest message, else start over
if (q->read_conflate){
if (new_read_pointer != write_pointer){
// Update read pointer
PACK64(*q->read_pointers[id], read_cycles, new_read_pointer);
goto start;
}
}
// Copy message
if (msgq_msg_init_size(msg, size) < 0)
return -1;
__sync_synchronize();
memcpy(msg->data, p + sizeof(int64_t), size);
__sync_synchronize();
// Update read pointer
PACK64(*q->read_pointers[id], read_cycles, new_read_pointer);
// Check if the actual data that was copied is valid
if (!*q->read_valids[id]){
msgq_msg_close(msg);
msgq_reset_reader(q);
goto start;
}
return msg->size;
}
int msgq_poll(msgq_pollitem_t * items, size_t nitems, int timeout){
assert(timeout >= 0);
int num = 0;
// Check if messages ready
for (size_t i = 0; i < nitems; i++) {
items[i].revents = msgq_msg_ready(items[i].q);
if (items[i].revents) num++;
}
int ms = (timeout == -1) ? 100 : timeout;
struct timespec ts;
ts.tv_sec = ms / 1000;
ts.tv_nsec = (ms % 1000) * 1000 * 1000;
while (num == 0) {
int ret;
ret = nanosleep(&ts, &ts);
// Check if messages ready
for (size_t i = 0; i < nitems; i++) {
if (items[i].revents == 0 && msgq_msg_ready(items[i].q)){
num += 1;
items[i].revents = 1;
}
}
// exit if we had a timeout and the sleep finished
if (timeout != -1 && ret == 0){
break;
}
}
return num;
}

View File

@ -1,66 +0,0 @@
#pragma once
#include <cstdint>
#include <cstring>
#include <string>
#include <atomic>
#define DEFAULT_SEGMENT_SIZE (10 * 1024 * 1024)
#define NUM_READERS 8
#define ALIGN(n) ((n + (8 - 1)) & -8)
#define UNPACK64(higher, lower, input) do {uint64_t tmp = input; higher = tmp >> 32; lower = tmp & 0xFFFFFFFF;} while (0)
#define PACK64(output, higher, lower) output = ((uint64_t)higher << 32 ) | ((uint64_t)lower & 0xFFFFFFFF)
struct msgq_header_t {
uint64_t num_readers;
uint64_t write_pointer;
uint64_t write_uid;
uint64_t read_pointers[NUM_READERS];
uint64_t read_valids[NUM_READERS];
uint64_t read_uids[NUM_READERS];
};
struct msgq_queue_t {
std::atomic<uint64_t> *num_readers;
std::atomic<uint64_t> *write_pointer;
std::atomic<uint64_t> *write_uid;
std::atomic<uint64_t> *read_pointers[NUM_READERS];
std::atomic<uint64_t> *read_valids[NUM_READERS];
std::atomic<uint64_t> *read_uids[NUM_READERS];
char * mmap_p;
char * data;
size_t size;
int reader_id;
uint64_t read_uid_local;
uint64_t write_uid_local;
bool read_conflate;
std::string endpoint;
};
struct msgq_msg_t {
size_t size;
char * data;
};
struct msgq_pollitem_t {
msgq_queue_t *q;
int revents;
};
void msgq_wait_for_subscriber(msgq_queue_t *q);
void msgq_reset_reader(msgq_queue_t *q);
int msgq_msg_init_size(msgq_msg_t *msg, size_t size);
int msgq_msg_init_data(msgq_msg_t *msg, char * data, size_t size);
int msgq_msg_close(msgq_msg_t *msg);
int msgq_new_queue(msgq_queue_t * q, const char * path, size_t size);
void msgq_close_queue(msgq_queue_t *q);
void msgq_init_publisher(msgq_queue_t * q);
void msgq_init_subscriber(msgq_queue_t * q);
int msgq_msg_send(msgq_msg_t *msg, msgq_queue_t *q);
int msgq_msg_recv(msgq_msg_t *msg, msgq_queue_t *q);
int msgq_msg_ready(msgq_queue_t * q);
int msgq_poll(msgq_pollitem_t * items, size_t nitems, int timeout);

View File

@ -1,175 +0,0 @@
#include <assert.h>
#include <time.h>
#include "messaging.hpp"
#include "services.h"
#ifdef __APPLE__
#define CLOCK_BOOTTIME CLOCK_MONOTONIC
#endif
static inline uint64_t nanos_since_boot() {
struct timespec t;
clock_gettime(CLOCK_BOOTTIME, &t);
return t.tv_sec * 1000000000ULL + t.tv_nsec;
}
static const service *get_service(const char *name) {
for (const auto &it : services) {
if (strcmp(it.name, name) == 0) return &it;
}
return nullptr;
}
static inline bool inList(const std::initializer_list<const char *> &list, const char *value) {
for (auto &v : list) {
if (strcmp(value, v) == 0) return true;
}
return false;
}
class MessageContext {
public:
MessageContext() { ctx_ = Context::create(); }
~MessageContext() { delete ctx_; }
Context *ctx_;
};
MessageContext ctx;
struct SubMaster::SubMessage {
std::string name;
SubSocket *socket = nullptr;
int freq = 0;
bool updated = false, alive = false, valid = false, ignore_alive;
uint64_t rcv_time = 0, rcv_frame = 0;
void *allocated_msg_reader = nullptr;
capnp::FlatArrayMessageReader *msg_reader = nullptr;
kj::Array<capnp::word> buf;
cereal::Event::Reader event;
};
SubMaster::SubMaster(const std::initializer_list<const char *> &service_list, const char *address,
const std::initializer_list<const char *> &ignore_alive) {
poller_ = Poller::create();
for (auto name : service_list) {
const service *serv = get_service(name);
assert(serv != nullptr);
SubSocket *socket = SubSocket::create(ctx.ctx_, name, address ? address : "127.0.0.1", true);
assert(socket != 0);
poller_->registerSocket(socket);
SubMessage *m = new SubMessage{
.socket = socket,
.freq = serv->frequency,
.ignore_alive = inList(ignore_alive, name),
.allocated_msg_reader = malloc(sizeof(capnp::FlatArrayMessageReader)),
.buf = kj::heapArray<capnp::word>(1024)};
messages_[socket] = m;
services_[name] = m;
}
}
int SubMaster::update(int timeout) {
if (++frame == UINT64_MAX) frame = 1;
for (auto &kv : messages_) kv.second->updated = false;
int updated = 0;
auto sockets = poller_->poll(timeout);
uint64_t current_time = nanos_since_boot();
for (auto s : sockets) {
Message *msg = s->receive(true);
if (msg == nullptr) continue;
SubMessage *m = messages_.at(s);
const size_t size = (msg->getSize() / sizeof(capnp::word)) + 1;
if (m->buf.size() < size) {
m->buf = kj::heapArray<capnp::word>(size);
}
memcpy(m->buf.begin(), msg->getData(), msg->getSize());
delete msg;
if (m->msg_reader) {
m->msg_reader->~FlatArrayMessageReader();
}
m->msg_reader = new (m->allocated_msg_reader) capnp::FlatArrayMessageReader(kj::ArrayPtr<capnp::word>(m->buf.begin(), size));
m->event = m->msg_reader->getRoot<cereal::Event>();
m->updated = true;
m->rcv_time = current_time;
m->rcv_frame = frame;
m->valid = m->event.getValid();
++updated;
}
for (auto &kv : messages_) {
SubMessage *m = kv.second;
m->alive = (m->freq <= (1e-5) || ((current_time - m->rcv_time) * (1e-9)) < (10.0 / m->freq));
}
return updated;
}
bool SubMaster::all_(const std::initializer_list<const char *> &service_list, bool valid, bool alive) {
int found = 0;
for (auto &kv : messages_) {
SubMessage *m = kv.second;
if (service_list.size() == 0 || inList(service_list, m->name.c_str())) {
found += (!valid || m->valid) && (!alive || (m->alive && !m->ignore_alive));
}
}
return service_list.size() == 0 ? found == messages_.size() : found == service_list.size();
}
void SubMaster::drain() {
while (true) {
auto polls = poller_->poll(0);
if (polls.size() == 0)
break;
for (auto sock : polls) {
Message *msg = sock->receive(true);
delete msg;
}
}
}
bool SubMaster::updated(const char *name) const {
return services_.at(name)->updated;
}
uint64_t SubMaster::rcv_frame(const char *name) const {
return services_.at(name)->rcv_frame;
}
cereal::Event::Reader &SubMaster::operator[](const char *name) {
return services_.at(name)->event;
};
SubMaster::~SubMaster() {
delete poller_;
for (auto &kv : messages_) {
SubMessage *m = kv.second;
if (m->msg_reader) {
m->msg_reader->~FlatArrayMessageReader();
}
free(m->allocated_msg_reader);
delete m->socket;
delete m;
}
}
PubMaster::PubMaster(const std::initializer_list<const char *> &service_list) {
for (auto name : service_list) {
assert(get_service(name) != nullptr);
PubSocket *socket = PubSocket::create(ctx.ctx_, name);
assert(socket);
sockets_[name] = socket;
}
}
int PubMaster::send(const char *name, capnp::MessageBuilder &msg) {
auto words = capnp::messageToFlatArray(msg);
auto bytes = words.asBytes();
return send(name, bytes.begin(), bytes.size());
}
PubMaster::~PubMaster() {
for (auto s : sockets_) delete s.second;
}

View File

@ -1,178 +0,0 @@
# TODO: these port numbers are hardcoded in c, fix this
# LogRotate: 8001 is a PUSH PULL socket between loggerd and visiond
# all ZMQ pub sub: port, should_log, frequency, (qlog_decimation)
# frame syncing packet
frame: [8002, true, 20., 1]
# accel, gyro, and compass
sensorEvents: [8003, true, 100., 100]
# GPS data, also global timestamp
gpsNMEA: [8004, true, 9.] # 9 msgs each sec
# CPU+MEM+GPU+BAT temps
thermal: [8005, true, 2., 1]
# List(CanData), list of can messages
can: [8006, true, 100.]
controlsState: [8007, true, 100., 100]
#liveEvent: [8008, true, 0.]
model: [8009, true, 20., 5]
features: [8010, true, 0.]
health: [8011, true, 2., 1]
radarState: [8012, true, 20., 5]
#liveUI: [8014, true, 0.]
encodeIdx: [8015, true, 20.]
liveTracks: [8016, true, 20.]
sendcan: [8017, true, 100.]
logMessage: [8018, true, 0.]
liveCalibration: [8019, true, 4., 4]
androidLog: [8020, true, 0.]
carState: [8021, true, 100., 10]
# 8022 is reserved for sshd
carControl: [8023, true, 100., 10]
plan: [8024, true, 20., 2]
liveLocation: [8025, true, 0., 1]
gpsLocation: [8026, true, 1., 1]
ethernetData: [8027, true, 0.]
navUpdate: [8028, true, 0.]
qcomGnss: [8029, true, 0.]
lidarPts: [8030, true, 0.]
procLog: [8031, true, 0.5]
gpsLocationExternal: [8032, true, 10., 1]
ubloxGnss: [8033, true, 10.]
clocks: [8034, true, 1., 1]
liveMpc: [8035, false, 20.]
liveLongitudinalMpc: [8036, false, 20.]
navStatus: [8038, true, 0.]
gpsLocationTrimble: [8039, true, 0.]
trimbleGnss: [8041, true, 0.]
ubloxRaw: [8042, true, 20.]
gpsPlannerPoints: [8043, true, 0.]
gpsPlannerPlan: [8044, true, 0.]
applanixRaw: [8046, true, 0.]
orbLocation: [8047, true, 0.]
trafficEvents: [8048, true, 0.]
liveLocationTiming: [8049, true, 0.]
orbslamCorrection: [8050, true, 0.]
liveLocationCorrected: [8051, true, 0.]
orbObservation: [8052, true, 0.]
applanixLocation: [8053, true, 0.]
liveLocationKalman: [8054, true, 20., 2]
uiNavigationEvent: [8055, true, 0.]
orbOdometry: [8057, true, 0.]
orbFeatures: [8058, false, 0.]
orbKeyFrame: [8059, true, 0.]
uiLayoutState: [8060, true, 0.]
frontEncodeIdx: [8061, true, 5.] # should be 20fps on tici
orbFeaturesSummary: [8062, true, 0.]
driverState: [8063, true, 5., 1]
liveParameters: [8064, true, 20., 2]
liveMapData: [8065, true, 0.]
cameraOdometry: [8066, true, 20., 5]
pathPlan: [8067, true, 20., 2]
kalmanOdometry: [8068, true, 0.]
thumbnail: [8069, true, 0.2, 1]
carEvents: [8070, true, 1., 1]
carParams: [8071, true, 0.02, 1]
frontFrame: [8072, true, 10.]
dMonitoringState: [8073, true, 5., 1]
offroadLayout: [8074, false, 0.]
wideEncodeIdx: [8075, true, 20.]
testModel: [8040, false, 0.]
testLiveLocation: [8045, false, 0.]
testJoystick: [8056, false, 0.]
# 8080 is reserved for slave testing daemon
# 8762 is reserved for logserver
# manager -- base process to manage starting and stopping of all others
# subscribes: thermal
# **** processes that communicate with the outside world ****
# thermald -- decides when to start and stop onroad
# subscribes: health, location
# publishes: thermal
# boardd -- communicates with the car
# subscribes: sendcan
# publishes: can, health, ubloxRaw
# sensord -- publishes IMU and Magnetometer
# publishes: sensorEvents
# gpsd -- publishes EON's gps
# publishes: gpsNMEA
# camerad -- publishes camera frames
# publishes: frame, frontFrame, thumbnail
# subscribes: driverState
# dmonitoringmodeld -- runs face detection on camera frames
# publishes: driverState
# **** stateful data transformers ****
# modeld -- runs & publishes the model
# publishes: model, cameraOdometry
# subscribes: liveCalibration, pathPlan
# plannerd -- decides where to drive the car
# subscribes: carState, model, radarState, controlsState, liveParameters
# publishes: plan, pathPlan, liveMpc, liveLongitudinalMpc
# controlsd -- drives the car by sending CAN messages to panda
# subscribes: can, thermal, health, plan, pathPlan, dMonitoringState, liveCalibration, model
# publishes: carState, carControl, sendcan, controlsState, carEvents, carParams
# dmonitoringd -- processes driver monitoring data and publishes driver awareness
# subscribes: driverState, liveCalibration, carState, model, gpsLocation
# publishes: dMonitoringState
# radard -- processes the radar and vision data
# subscribes: can, controlsState, model, liveParameters
# publishes: radarState, liveTracks
# params_learner -- learns vehicle params by observing the vehicle dynamics
# subscribes: controlsState, sensorEvents, cameraOdometry
# publishes: liveParameters
# calibrationd -- reads posenet and applies a temporal filter on the frame region to look at
# subscribes: cameraOdometry
# publishes: liveCalibration
# ubloxd -- read raw ublox data and converts them in readable format
# subscribes: ubloxRaw
# publishes: ubloxGnss
# **** LOGGING SERVICE ****
# loggerd
# subscribes: EVERYTHING
# **** NON VITAL SERVICES ****
# ui
# subscribes: thermal, model, controlsState, uiLayout, liveCalibration, radarState, liveMpc, plusFrame, liveMapData
# uploader
# communicates through file system with loggerd
# deleter
# communicates through file system with loggerd and uploader
# logmessaged -- central logging service, can log to cloud
# publishes: logMessage
# logcatd -- fetches logcat info from android
# publishes: androidLog
# proclogd -- fetches process information
# publishes: procLog
# tombstoned -- reports native crashes
# athenad -- on request, open a sub socket and return the value
# updated -- waits for network access and tries to update every hour

View File

@ -1,34 +0,0 @@
#!/usr/bin/env python3
import os
import yaml
class Service():
def __init__(self, port, should_log, frequency, decimation=None):
self.port = port
self.should_log = should_log
self.frequency = frequency
self.decimation = decimation
service_list_path = os.path.join(os.path.dirname(__file__), "service_list.yaml")
service_list = {}
with open(service_list_path, "r") as f:
for k, v in yaml.safe_load(f).items():
decimation = None
if len(v) == 4:
decimation = v[3]
service_list[k] = Service(v[0], v[1], v[2], decimation)
if __name__ == "__main__":
print("/* THIS IS AN AUTOGENERATED FILE, PLEASE EDIT service_list.yaml */")
print("#ifndef __SERVICES_H")
print("#define __SERVICES_H")
print("struct service { char name[0x100]; int port; bool should_log; int frequency; int decimation; };")
print("static struct service services[] = {")
for k, v in service_list.items():
print(' { .name = "%s", .port = %d, .should_log = %s, .frequency = %d, .decimation = %d },' % (k, v.port, "true" if v.should_log else "false", v.frequency, -1 if v.decimation is None else v.decimation))
print("};")
print("#endif")

8
codecov.yml 100644
View File

@ -0,0 +1,8 @@
comment: false
coverage:
status:
project:
default:
informational: true
patch: off

View File

@ -1,6 +1,4 @@
Import('env', 'cython_dependencies')
Import('envCython', 'common')
# Build cython clock module
env.Command(['common_pyx.so', 'clock.cpp'],
cython_dependencies + ['common_pyx_setup.py', 'clock.pyx'],
"cd common && python3 common_pyx_setup.py build_ext --inplace")
envCython.Program('clock.so', 'clock.pyx')
envCython.Program('params_pyx.so', 'params_pyx.pyx', LIBS=envCython['LIBS'] + [common, 'zmq'])

View File

@ -1,286 +0,0 @@
import os
import binascii
import itertools
import re
import struct
import subprocess
import random
from cereal import log
NetworkType = log.ThermalData.NetworkType
NetworkStrength = log.ThermalData.NetworkStrength
ANDROID = os.path.isfile('/EON')
def get_sound_card_online():
return (os.path.isfile('/proc/asound/card0/state') and
open('/proc/asound/card0/state').read().strip() == 'ONLINE')
def getprop(key):
if not ANDROID:
return ""
return subprocess.check_output(["getprop", key], encoding='utf8').strip()
def get_imei(slot):
slot = str(slot)
if slot not in ("0", "1"):
raise ValueError("SIM slot must be 0 or 1")
ret = parse_service_call_string(service_call(["iphonesubinfo", "3" , "i32", str(slot)]))
if not ret:
# allow non android to be identified differently
ret = "%015d" % random.randint(0, 1 << 32)
return ret
def get_serial():
ret = getprop("ro.serialno")
if ret == "":
ret = "cccccccc"
return ret
def get_subscriber_info():
ret = parse_service_call_string(service_call(["iphonesubinfo", "7"]))
if ret is None or len(ret) < 8:
return ""
return ret
def reboot(reason=None):
if reason is None:
reason_args = ["null"]
else:
reason_args = ["s16", reason]
subprocess.check_output([
"service", "call", "power", "16", # IPowerManager.reboot
"i32", "0", # no confirmation,
*reason_args,
"i32", "1" # wait
])
def service_call(call):
if not ANDROID:
return None
ret = subprocess.check_output(["service", "call", *call], encoding='utf8').strip()
if 'Parcel' not in ret:
return None
return parse_service_call_bytes(ret)
def parse_service_call_unpack(r, fmt):
try:
return struct.unpack(fmt, r)[0]
except Exception:
return None
def parse_service_call_string(r):
try:
r = r[8:] # Cut off length field
r = r.decode('utf_16_be')
# All pairs of two characters seem to be swapped. Not sure why
result = ""
for a, b, in itertools.zip_longest(r[::2], r[1::2], fillvalue='\x00'):
result += b + a
result = result.replace('\x00', '')
return result
except Exception:
return None
def parse_service_call_bytes(ret):
try:
r = b""
for hex_part in re.findall(r'[ (]([0-9a-f]{8})', ret):
r += binascii.unhexlify(hex_part)
return r
except Exception:
return None
def get_network_type():
if not ANDROID:
return NetworkType.none
wifi_check = parse_service_call_string(service_call(["connectivity", "2"]))
if wifi_check is None:
return NetworkType.none
elif 'WIFI' in wifi_check:
return NetworkType.wifi
else:
cell_check = parse_service_call_unpack(service_call(['phone', '59']), ">q")
# from TelephonyManager.java
cell_networks = {
0: NetworkType.none,
1: NetworkType.cell2G,
2: NetworkType.cell2G,
3: NetworkType.cell3G,
4: NetworkType.cell2G,
5: NetworkType.cell3G,
6: NetworkType.cell3G,
7: NetworkType.cell3G,
8: NetworkType.cell3G,
9: NetworkType.cell3G,
10: NetworkType.cell3G,
11: NetworkType.cell2G,
12: NetworkType.cell3G,
13: NetworkType.cell4G,
14: NetworkType.cell4G,
15: NetworkType.cell3G,
16: NetworkType.cell2G,
17: NetworkType.cell3G,
18: NetworkType.cell4G,
19: NetworkType.cell4G
}
return cell_networks.get(cell_check, NetworkType.none)
def get_network_strength(network_type):
network_strength = NetworkStrength.unknown
# from SignalStrength.java
def get_lte_level(rsrp, rssnr):
INT_MAX = 2147483647
if rsrp == INT_MAX:
lvl_rsrp = NetworkStrength.unknown
elif rsrp >= -95:
lvl_rsrp = NetworkStrength.great
elif rsrp >= -105:
lvl_rsrp = NetworkStrength.good
elif rsrp >= -115:
lvl_rsrp = NetworkStrength.moderate
else:
lvl_rsrp = NetworkStrength.poor
if rssnr == INT_MAX:
lvl_rssnr = NetworkStrength.unknown
elif rssnr >= 45:
lvl_rssnr = NetworkStrength.great
elif rssnr >= 10:
lvl_rssnr = NetworkStrength.good
elif rssnr >= -30:
lvl_rssnr = NetworkStrength.moderate
else:
lvl_rssnr = NetworkStrength.poor
return max(lvl_rsrp, lvl_rssnr)
def get_tdscdma_level(tdscmadbm):
lvl = NetworkStrength.unknown
if tdscmadbm > -25:
lvl = NetworkStrength.unknown
elif tdscmadbm >= -49:
lvl = NetworkStrength.great
elif tdscmadbm >= -73:
lvl = NetworkStrength.good
elif tdscmadbm >= -97:
lvl = NetworkStrength.moderate
elif tdscmadbm >= -110:
lvl = NetworkStrength.poor
return lvl
def get_gsm_level(asu):
if asu <= 2 or asu == 99:
lvl = NetworkStrength.unknown
elif asu >= 12:
lvl = NetworkStrength.great
elif asu >= 8:
lvl = NetworkStrength.good
elif asu >= 5:
lvl = NetworkStrength.moderate
else:
lvl = NetworkStrength.poor
return lvl
def get_evdo_level(evdodbm, evdosnr):
lvl_evdodbm = NetworkStrength.unknown
lvl_evdosnr = NetworkStrength.unknown
if evdodbm >= -65:
lvl_evdodbm = NetworkStrength.great
elif evdodbm >= -75:
lvl_evdodbm = NetworkStrength.good
elif evdodbm >= -90:
lvl_evdodbm = NetworkStrength.moderate
elif evdodbm >= -105:
lvl_evdodbm = NetworkStrength.poor
if evdosnr >= 7:
lvl_evdosnr = NetworkStrength.great
elif evdosnr >= 5:
lvl_evdosnr = NetworkStrength.good
elif evdosnr >= 3:
lvl_evdosnr = NetworkStrength.moderate
elif evdosnr >= 1:
lvl_evdosnr = NetworkStrength.poor
return max(lvl_evdodbm, lvl_evdosnr)
def get_cdma_level(cdmadbm, cdmaecio):
lvl_cdmadbm = NetworkStrength.unknown
lvl_cdmaecio = NetworkStrength.unknown
if cdmadbm >= -75:
lvl_cdmadbm = NetworkStrength.great
elif cdmadbm >= -85:
lvl_cdmadbm = NetworkStrength.good
elif cdmadbm >= -95:
lvl_cdmadbm = NetworkStrength.moderate
elif cdmadbm >= -100:
lvl_cdmadbm = NetworkStrength.poor
if cdmaecio >= -90:
lvl_cdmaecio = NetworkStrength.great
elif cdmaecio >= -110:
lvl_cdmaecio = NetworkStrength.good
elif cdmaecio >= -130:
lvl_cdmaecio = NetworkStrength.moderate
elif cdmaecio >= -150:
lvl_cdmaecio = NetworkStrength.poor
return max(lvl_cdmadbm, lvl_cdmaecio)
if network_type == NetworkType.none:
return network_strength
if network_type == NetworkType.wifi:
out = subprocess.check_output('dumpsys connectivity', shell=True).decode('utf-8')
network_strength = NetworkStrength.unknown
for line in out.split('\n'):
signal_str = "SignalStrength: "
if signal_str in line:
lvl_idx_start = line.find(signal_str) + len(signal_str)
lvl_idx_end = line.find(']', lvl_idx_start)
lvl = int(line[lvl_idx_start : lvl_idx_end])
if lvl >= -50:
network_strength = NetworkStrength.great
elif lvl >= -60:
network_strength = NetworkStrength.good
elif lvl >= -70:
network_strength = NetworkStrength.moderate
else:
network_strength = NetworkStrength.poor
return network_strength
else:
# check cell strength
out = subprocess.check_output('dumpsys telephony.registry', shell=True).decode('utf-8')
for line in out.split('\n'):
if "mSignalStrength" in line:
arr = line.split(' ')
ns = 0
if ("gsm" in arr[14]):
rsrp = int(arr[9])
rssnr = int(arr[11])
ns = get_lte_level(rsrp, rssnr)
if ns == NetworkStrength.unknown:
tdscmadbm = int(arr[13])
ns = get_tdscdma_level(tdscmadbm)
if ns == NetworkStrength.unknown:
asu = int(arr[1])
ns = get_gsm_level(asu)
else:
cdmadbm = int(arr[3])
cdmaecio = int(arr[4])
evdodbm = int(arr[5])
evdosnr = int(arr[7])
lvl_cdma = get_cdma_level(cdmadbm, cdmaecio)
lvl_edmo = get_evdo_level(evdodbm, evdosnr)
if lvl_edmo == NetworkStrength.unknown:
ns = lvl_cdma
elif lvl_cdma == NetworkStrength.unknown:
ns = lvl_edmo
else:
ns = min(lvl_cdma, lvl_edmo)
network_strength = max(network_strength, ns)
return network_strength

View File

@ -27,7 +27,11 @@ class Api():
'iat': now,
'exp': now + timedelta(hours=1)
}
return jwt.encode(payload, self.private_key, algorithm='RS256').decode('utf8')
token = jwt.encode(payload, self.private_key, algorithm='RS256')
if isinstance(token, bytes):
token = token.decode('utf8')
return token
def api_get(endpoint, method='GET', timeout=None, access_token=None, **params):
backend = "https://api.commadotai.com/"

View File

@ -1,97 +0,0 @@
import os
import subprocess
import glob
import hashlib
import shutil
from common.basedir import BASEDIR
from selfdrive.swaglog import cloudlog
android_packages = ("ai.comma.plus.offroad",)
def get_installed_apks():
dat = subprocess.check_output(["pm", "list", "packages", "-f"], encoding='utf8').strip().split("\n")
ret = {}
for x in dat:
if x.startswith("package:"):
v, k = x.split("package:")[1].split("=")
ret[k] = v
return ret
def install_apk(path):
# can only install from world readable path
install_path = "/sdcard/%s" % os.path.basename(path)
shutil.copyfile(path, install_path)
ret = subprocess.call(["pm", "install", "-r", install_path])
os.remove(install_path)
return ret == 0
def start_offroad():
set_package_permissions()
system("am start -n ai.comma.plus.offroad/.MainActivity")
def set_package_permissions():
pm_grant("ai.comma.plus.offroad", "android.permission.ACCESS_FINE_LOCATION")
pm_grant("ai.comma.plus.offroad", "android.permission.READ_PHONE_STATE")
pm_grant("ai.comma.plus.offroad", "android.permission.READ_EXTERNAL_STORAGE")
appops_set("ai.comma.plus.offroad", "SU", "allow")
appops_set("ai.comma.plus.offroad", "WIFI_SCAN", "allow")
def appops_set(package, op, mode):
system(f"LD_LIBRARY_PATH= appops set {package} {op} {mode}")
def pm_grant(package, permission):
system(f"pm grant {package} {permission}")
def system(cmd):
try:
cloudlog.info("running %s" % cmd)
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
cloudlog.event("running failed",
cmd=e.cmd,
output=e.output[-1024:],
returncode=e.returncode)
# *** external functions ***
def update_apks():
# install apks
installed = get_installed_apks()
install_apks = glob.glob(os.path.join(BASEDIR, "apk/*.apk"))
for apk in install_apks:
app = os.path.basename(apk)[:-4]
if app not in installed:
installed[app] = None
cloudlog.info("installed apks %s" % (str(installed), ))
for app in installed.keys():
apk_path = os.path.join(BASEDIR, "apk/"+app+".apk")
if not os.path.exists(apk_path):
continue
h1 = hashlib.sha1(open(apk_path, 'rb').read()).hexdigest()
h2 = None
if installed[app] is not None:
h2 = hashlib.sha1(open(installed[app], 'rb').read()).hexdigest()
cloudlog.info("comparing version of %s %s vs %s" % (app, h1, h2))
if h2 is None or h1 != h2:
cloudlog.info("installing %s" % app)
success = install_apk(apk_path)
if not success:
cloudlog.info("needing to uninstall %s" % app)
system("pm uninstall %s" % app)
success = install_apk(apk_path)
assert success
def pm_apply_packages(cmd):
for p in android_packages:
system("pm %s %s" % (cmd, p))
if __name__ == "__main__":
update_apks()

View File

@ -1,10 +1,11 @@
import os
from pathlib import Path
from selfdrive.hardware import PC
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../"))
from common.android import ANDROID
if ANDROID:
PERSIST = "/persist"
PARAMS = "/data/params"
if PC:
PERSIST = os.path.join(str(Path.home()), ".comma", "persist")
else:
PERSIST = os.path.join(BASEDIR, "persist")
PARAMS = os.path.join(BASEDIR, "persist", "params")
PERSIST = "/persist"

View File

@ -1,3 +1,5 @@
# distutils: language = c++
# cython: language_level = 3
from posix.time cimport clock_gettime, timespec, CLOCK_MONOTONIC_RAW, clockid_t
IF UNAME_SYSNAME == "Darwin":

View File

@ -1,20 +0,0 @@
from distutils.core import Extension, setup # pylint: disable=import-error,no-name-in-module
from Cython.Build import cythonize
from common.cython_hacks import BuildExtWithoutPlatformSuffix
sourcefiles = ['clock.pyx']
extra_compile_args = ["-std=c++11"]
setup(name='Common',
cmdclass={'build_ext': BuildExtWithoutPlatformSuffix},
ext_modules=cythonize(
Extension(
"common_pyx",
language="c++",
sources=sourcefiles,
extra_compile_args=extra_compile_args,
)
),
nthreads=4,
)

View File

@ -0,0 +1,9 @@
# remove all keys that end in DEPRECATED
def strip_deprecated_keys(d):
for k in list(d.keys()):
if isinstance(k, str):
if k.endswith('DEPRECATED'):
d.pop(k)
elif isinstance(d[k], dict):
strip_deprecated_keys(d[k])
return d

View File

@ -44,7 +44,7 @@ def compile_code(name, c_code, c_header, directory, cflags="", libraries=None):
ffibuilder = FFI()
ffibuilder.set_source(name, c_code, source_extension='.cpp', libraries=libraries)
ffibuilder.cdef(c_header)
os.environ['OPT'] = "-fwrapv -O2 -DNDEBUG -std=c++11"
os.environ['OPT'] = "-fwrapv -O2 -DNDEBUG -std=c++1z"
os.environ['CFLAGS'] = cflags
ffibuilder.compile(verbose=True, debug=False, tmpdir=directory)

14
common/gpio.py 100644
View File

@ -0,0 +1,14 @@
def gpio_init(pin, output):
try:
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
f.write(b"out" if output else b"in")
except Exception as e:
print(f"Failed to set gpio {pin} direction: {e}")
def gpio_set(pin, high):
try:
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
f.write(b"1" if high else b"0")
except Exception as e:
print(f"Failed to set gpio {pin} value: {e}")

View File

@ -1,6 +1,3 @@
Import('env', 'cython_dependencies')
env.Command(['simple_kalman_impl.so'],
cython_dependencies + ['simple_kalman_impl.pyx', 'simple_kalman_impl.pxd', 'simple_kalman_setup.py'],
"cd common/kalman && python3 simple_kalman_setup.py build_ext --inplace")
Import('envCython')
envCython.Program('simple_kalman_impl.so', 'simple_kalman_impl.pyx')

View File

@ -1,3 +1,5 @@
# cython: language_level = 3
cdef class KF1D:
cdef public:
double x0_0
@ -13,4 +15,4 @@ cdef class KF1D:
double A_K_0
double A_K_1
double A_K_2
double A_K_3
double A_K_3

View File

@ -1,3 +1,4 @@
# distutils: language = c++
# cython: language_level=3
cdef class KF1D:

View File

@ -1,9 +0,0 @@
from distutils.core import Extension, setup
from Cython.Build import cythonize
from common.cython_hacks import BuildExtWithoutPlatformSuffix
setup(name='Simple Kalman Implementation',
cmdclass={'build_ext': BuildExtWithoutPlatformSuffix},
ext_modules=cythonize(Extension("simple_kalman_impl", ["simple_kalman_impl.pyx"])))

View File

@ -82,3 +82,6 @@ kf = KF1D(x0=[[x0_0], [x1_0]],
kf_speed = timeit.timeit("kf.update(1234)", setup=setup, number=10000)
kf_old_speed = timeit.timeit("kf_old.update(1234)", setup=setup, number=10000)
self.assertTrue(kf_speed < kf_old_speed / 4)
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,12 @@
class lazy_property():
"""Defines a property whose value will be computed only once and as needed.
This can only be used on instance methods.
"""
def __init__(self, func):
self._func = func
def __get__(self, obj_self, cls):
value = self._func(obj_self)
setattr(obj_self, self._func.__name__, value)
return value

View File

@ -3,6 +3,7 @@ import os
import sys
import copy
import json
import uuid
import socket
import logging
import traceback
@ -62,8 +63,48 @@ class SwagFormatter(logging.Formatter):
return record_dict
def format(self, record):
if self.swaglogger is None:
raise Exception("must set swaglogger before calling format()")
return json_robust_dumps(self.format_dict(record))
class SwagLogFileFormatter(SwagFormatter):
def fix_kv(self, k, v):
# append type to names to preserve legacy naming in logs
# avoids overlapping key namespaces with different types
# e.g. log.info() creates 'msg' -> 'msg$s'
# log.event() creates 'msg.health.logMonoTime' -> 'msg.health.logMonoTime$i'
# because overlapping namespace 'msg' caused problems
if isinstance(v, (str, bytes)):
k += "$s"
elif isinstance(v, float):
k += "$f"
elif isinstance(v, bool):
k += "$b"
elif isinstance(v, int):
k += "$i"
elif isinstance(v, dict):
nv = {}
for ik, iv in v.items():
ik, iv = self.fix_kv(ik, iv)
nv[ik] = iv
v = nv
elif isinstance(v, list):
k += "$a"
return k, v
def format(self, record):
if isinstance(record, str):
v = json.loads(record)
else:
v = self.format_dict(record)
mk, mv = self.fix_kv('msg', v['msg'])
del v['msg']
v[mk] = mv
v['id'] = uuid.uuid4().hex
return json_robust_dumps(v)
class SwagErrorFilter(logging.Filter):
def filter(self, record):
return record.levelno < logging.ERROR
@ -117,6 +158,8 @@ class SwagLogger(logging.Logger):
evt.update(kwargs)
if 'error' in kwargs:
self.error(evt)
if 'debug' in kwargs:
self.debug(evt)
else:
self.info(evt)

View File

@ -0,0 +1,22 @@
import numpy as np
def deep_interp_np(x, xp, fp, axis=None):
if axis is not None:
fp = fp.swapaxes(0,axis)
x = np.atleast_1d(x)
xp = np.array(xp)
if len(xp) < 2:
return np.repeat(fp, len(x), axis=0)
if min(np.diff(xp)) < 0:
raise RuntimeError('Bad x array for interpolation')
j = np.searchsorted(xp, x) - 1
j = np.clip(j, 0, len(xp)-2)
d = np.divide(x - xp[j], xp[j + 1] - xp[j], out=np.ones_like(x, dtype=np.float64), where=xp[j + 1] - xp[j] != 0)
vals_interp = (fp[j].T*(1 - d)).T + (fp[j + 1].T*d).T
if axis is not None:
vals_interp = vals_interp.swapaxes(0,axis)
if len(vals_interp) == 1:
return vals_interp[0]
else:
return vals_interp

437
common/params.py 100755 → 100644
View File

@ -1,413 +1,24 @@
#!/usr/bin/env python3
"""ROS has a parameter server, we have files.
The parameter store is a persistent key value store, implemented as a directory with a writer lock.
On Android, we store params under params_dir = /data/params. The writer lock is a file
"<params_dir>/.lock" taken using flock(), and data is stored in a directory symlinked to by
"<params_dir>/d".
Each key, value pair is stored as a file with named <key> with contents <value>, located in
<params_dir>/d/<key>
Readers of a single key can just open("<params_dir>/d/<key>") and read the file contents.
Readers who want a consistent snapshot of multiple keys should take the lock.
Writers should take the lock before modifying anything. Writers should also leave the DB in a
consistent state after a crash. The implementation below does this by copying all params to a temp
directory <params_dir>/<tmp>, then atomically symlinking <params_dir>/<d> to <params_dir>/<tmp>
before deleting the old <params_dir>/<d> directory.
Writers that only modify a single key can simply take the lock, then swap the corresponding value
file in place without messing with <params_dir>/d.
"""
import time
import os
import errno
import shutil
import fcntl
import tempfile
import threading
from enum import Enum
from common.basedir import PARAMS
def mkdirs_exists_ok(path):
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
class TxType(Enum):
PERSISTENT = 1
CLEAR_ON_MANAGER_START = 2
CLEAR_ON_PANDA_DISCONNECT = 3
class UnknownKeyName(Exception):
pass
keys = {
"AccessToken": [TxType.CLEAR_ON_MANAGER_START],
"AthenadPid": [TxType.PERSISTENT],
"CalibrationParams": [TxType.PERSISTENT],
"CarParams": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CarParamsCache": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CarVin": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CommunityFeaturesToggle": [TxType.PERSISTENT],
"CompletedTrainingVersion": [TxType.PERSISTENT],
"ControlsParams": [TxType.PERSISTENT],
"DisablePowerDown": [TxType.PERSISTENT],
"DisableUpdates": [TxType.PERSISTENT],
"DoUninstall": [TxType.CLEAR_ON_MANAGER_START],
"DongleId": [TxType.PERSISTENT],
"GitBranch": [TxType.PERSISTENT],
"GitCommit": [TxType.PERSISTENT],
"GitRemote": [TxType.PERSISTENT],
"GithubSshKeys": [TxType.PERSISTENT],
"HasAcceptedTerms": [TxType.PERSISTENT],
"HasCompletedSetup": [TxType.PERSISTENT],
"IsDriverViewEnabled": [TxType.CLEAR_ON_MANAGER_START],
"IsLdwEnabled": [TxType.PERSISTENT],
"IsGeofenceEnabled": [TxType.PERSISTENT],
"IsMetric": [TxType.PERSISTENT],
"IsOffroad": [TxType.CLEAR_ON_MANAGER_START],
"IsRHD": [TxType.PERSISTENT],
"IsTakingSnapshot": [TxType.CLEAR_ON_MANAGER_START],
"IsUpdateAvailable": [TxType.CLEAR_ON_MANAGER_START],
"IsUploadRawEnabled": [TxType.PERSISTENT],
"LastAthenaPingTime": [TxType.PERSISTENT],
"LastUpdateTime": [TxType.PERSISTENT],
"LastUpdateException": [TxType.PERSISTENT],
"LimitSetSpeed": [TxType.PERSISTENT],
"LimitSetSpeedNeural": [TxType.PERSISTENT],
"LiveParameters": [TxType.PERSISTENT],
"LongitudinalControl": [TxType.PERSISTENT],
"OpenpilotEnabledToggle": [TxType.PERSISTENT],
"LaneChangeEnabled": [TxType.PERSISTENT],
"PandaFirmware": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"PandaFirmwareHex": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"PandaDongleId": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"Passive": [TxType.PERSISTENT],
"RecordFront": [TxType.PERSISTENT],
"ReleaseNotes": [TxType.PERSISTENT],
"ShouldDoUpdate": [TxType.CLEAR_ON_MANAGER_START],
"SpeedLimitOffset": [TxType.PERSISTENT],
"SubscriberInfo": [TxType.PERSISTENT],
"TermsVersion": [TxType.PERSISTENT],
"TrainingVersion": [TxType.PERSISTENT],
"UpdateAvailable": [TxType.CLEAR_ON_MANAGER_START],
"UpdateFailedCount": [TxType.CLEAR_ON_MANAGER_START],
"Version": [TxType.PERSISTENT],
"Offroad_ChargeDisabled": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"Offroad_ConnectivityNeeded": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_ConnectivityNeededPrompt": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_TemperatureTooHigh": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_PandaFirmwareMismatch": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"Offroad_InvalidTime": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_IsTakingSnapshot": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_NeosUpdate": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_UpdateFailed": [TxType.CLEAR_ON_MANAGER_START],
}
def fsync_dir(path):
fd = os.open(path, os.O_RDONLY)
try:
os.fsync(fd)
finally:
os.close(fd)
class FileLock():
def __init__(self, path, create):
self._path = path
self._create = create
self._fd = None
def acquire(self):
self._fd = os.open(self._path, os.O_CREAT if self._create else 0)
fcntl.flock(self._fd, fcntl.LOCK_EX)
def release(self):
if self._fd is not None:
os.close(self._fd)
self._fd = None
class DBAccessor():
def __init__(self, path):
self._path = path
self._vals = None
def keys(self):
self._check_entered()
return self._vals.keys()
def get(self, key):
self._check_entered()
if self._vals is None:
return None
try:
return self._vals[key]
except KeyError:
return None
def _get_lock(self, create):
lock = FileLock(os.path.join(self._path, ".lock"), create)
lock.acquire()
return lock
def _read_values_locked(self):
"""Callers should hold a lock while calling this method."""
vals = {}
try:
data_path = self._data_path()
keys = os.listdir(data_path)
for key in keys:
with open(os.path.join(data_path, key), "rb") as f:
vals[key] = f.read()
except (OSError, IOError) as e:
# Either the DB hasn't been created yet, or somebody wrote a bug and left the DB in an
# inconsistent state. Either way, return empty.
if e.errno == errno.ENOENT:
return {}
return vals
def _data_path(self):
return os.path.join(self._path, "d")
def _check_entered(self):
if self._vals is None:
raise Exception("Must call __enter__ before using DB")
class DBReader(DBAccessor):
def __enter__(self):
try:
lock = self._get_lock(False)
except OSError as e:
# Do not create lock if it does not exist.
if e.errno == errno.ENOENT:
self._vals = {}
return self
try:
# Read everything.
self._vals = self._read_values_locked()
return self
finally:
lock.release()
def __exit__(self, exc_type, exc_value, traceback):
pass
class DBWriter(DBAccessor):
def __init__(self, path):
super(DBWriter, self).__init__(path)
self._lock = None
self._prev_umask = None
def put(self, key, value):
self._vals[key] = value
def delete(self, key):
self._vals.pop(key, None)
def __enter__(self):
mkdirs_exists_ok(self._path)
# Make sure we can write and that permissions are correct.
self._prev_umask = os.umask(0)
try:
os.chmod(self._path, 0o777)
self._lock = self._get_lock(True)
self._vals = self._read_values_locked()
except Exception:
os.umask(self._prev_umask)
self._prev_umask = None
raise
return self
def __exit__(self, exc_type, exc_value, traceback):
self._check_entered()
try:
# data_path refers to the externally used path to the params. It is a symlink.
# old_data_path is the path currently pointed to by data_path.
# tempdir_path is a path where the new params will go, which the new data path will point to.
# new_data_path is a temporary symlink that will atomically overwrite data_path.
#
# The current situation is:
# data_path -> old_data_path
# We're going to write params data to tempdir_path
# tempdir_path -> params data
# Then point new_data_path to tempdir_path
# new_data_path -> tempdir_path
# Then atomically overwrite data_path with new_data_path
# data_path -> tempdir_path
old_data_path = None
new_data_path = None
tempdir_path = tempfile.mkdtemp(prefix=".tmp", dir=self._path)
try:
# Write back all keys.
os.chmod(tempdir_path, 0o777)
for k, v in self._vals.items():
with open(os.path.join(tempdir_path, k), "wb") as f:
f.write(v)
f.flush()
os.fsync(f.fileno())
fsync_dir(tempdir_path)
data_path = self._data_path()
try:
old_data_path = os.path.join(self._path, os.readlink(data_path))
except (OSError, IOError):
# NOTE(mgraczyk): If other DB implementations have bugs, this could cause
# copies to be left behind, but we still want to overwrite.
pass
new_data_path = "{}.link".format(tempdir_path)
os.symlink(os.path.basename(tempdir_path), new_data_path)
os.rename(new_data_path, data_path)
fsync_dir(self._path)
finally:
# If the rename worked, we can delete the old data. Otherwise delete the new one.
success = new_data_path is not None and os.path.exists(data_path) and (
os.readlink(data_path) == os.path.basename(tempdir_path))
if success:
if old_data_path is not None:
shutil.rmtree(old_data_path)
else:
shutil.rmtree(tempdir_path)
# Regardless of what happened above, there should be no link at new_data_path.
if new_data_path is not None and os.path.islink(new_data_path):
os.remove(new_data_path)
finally:
os.umask(self._prev_umask)
self._prev_umask = None
# Always release the lock.
self._lock.release()
self._lock = None
def read_db(params_path, key):
path = "%s/d/%s" % (params_path, key)
try:
with open(path, "rb") as f:
return f.read()
except IOError:
return None
def write_db(params_path, key, value):
if isinstance(value, str):
value = value.encode('utf8')
prev_umask = os.umask(0)
lock = FileLock(params_path + "/.lock", True)
lock.acquire()
try:
tmp_path = tempfile.NamedTemporaryFile(mode="wb", prefix=".tmp", dir=params_path, delete=False)
with tmp_path as f:
f.write(value)
f.flush()
os.fsync(f.fileno())
os.chmod(tmp_path.name, 0o666)
path = "%s/d/%s" % (params_path, key)
os.rename(tmp_path.name, path)
fsync_dir(os.path.dirname(path))
finally:
os.umask(prev_umask)
lock.release()
class Params():
def __init__(self, db=PARAMS):
self.db = db
# create the database if it doesn't exist...
if not os.path.exists(self.db + "/d"):
with self.transaction(write=True):
pass
def clear_all(self):
shutil.rmtree(self.db, ignore_errors=True)
with self.transaction(write=True):
pass
def transaction(self, write=False):
if write:
return DBWriter(self.db)
else:
return DBReader(self.db)
def _clear_keys_with_type(self, tx_type):
with self.transaction(write=True) as txn:
for key in keys:
if tx_type in keys[key]:
txn.delete(key)
def manager_start(self):
self._clear_keys_with_type(TxType.CLEAR_ON_MANAGER_START)
def panda_disconnect(self):
self._clear_keys_with_type(TxType.CLEAR_ON_PANDA_DISCONNECT)
def delete(self, key):
with self.transaction(write=True) as txn:
txn.delete(key)
def get(self, key, block=False, encoding=None):
if key not in keys:
raise UnknownKeyName(key)
while 1:
ret = read_db(self.db, key)
if not block or ret is not None:
break
# is polling really the best we can do?
time.sleep(0.05)
if ret is not None and encoding is not None:
ret = ret.decode(encoding)
return ret
def put(self, key, dat):
"""
Warning: This function blocks until the param is written to disk!
In very rare cases this can take over a second, and your code will hang.
Use the put_nonblocking helper function in time sensitive code, but
in general try to avoid writing params as much as possible.
"""
if key not in keys:
raise UnknownKeyName(key)
write_db(self.db, key, dat)
def put_nonblocking(key, val):
def f(key, val):
params = Params()
params.put(key, val)
t = threading.Thread(target=f, args=(key, val))
t.start()
return t
from common.params_pyx import Params, ParamKeyType, UnknownKeyName, put_nonblocking # pylint: disable=no-name-in-module, import-error
assert Params
assert ParamKeyType
assert UnknownKeyName
assert put_nonblocking
if __name__ == "__main__":
import sys
from common.params_pyx import keys # pylint: disable=no-name-in-module, import-error
params = Params()
if len(sys.argv) == 3:
name = sys.argv[1]
val = sys.argv[2]
assert name.encode("utf-8") in keys.keys(), f"unknown param: {name}"
print(f"SET: {name} = {val}")
params.put(name, val)
elif len(sys.argv) == 2:
name = sys.argv[1]
assert name.encode("utf-8") in keys.keys(), f"unknown param: {name}"
print(f"GET: {name} = {params.get(name)}")
else:
for k in keys.keys():
print(f"GET: {k} = {params.get(k)}")

View File

@ -0,0 +1,27 @@
from libcpp.string cimport string
from libcpp cimport bool
cdef extern from "selfdrive/common/params.cc":
pass
cdef extern from "selfdrive/common/util.cc":
pass
cdef extern from "selfdrive/common/params.h":
cpdef enum ParamKeyType:
PERSISTENT
CLEAR_ON_MANAGER_START
CLEAR_ON_PANDA_DISCONNECT
CLEAR_ON_IGNITION
ALL
cdef cppclass Params:
Params(bool)
Params(string)
string get(string, bool) nogil
bool getBool(string)
int remove(string)
int put(string, string)
int putBool(string, bool)
bool checkKey(string)
void clearAll(ParamKeyType)

View File

@ -0,0 +1,108 @@
# distutils: language = c++
# cython: language_level = 3
from libcpp cimport bool
from libcpp.string cimport string
from common.params_pxd cimport Params as c_Params, ParamKeyType as c_ParamKeyType
import os
import threading
from common.basedir import BASEDIR
cdef class ParamKeyType:
PERSISTENT = c_ParamKeyType.PERSISTENT
CLEAR_ON_MANAGER_START = c_ParamKeyType.CLEAR_ON_MANAGER_START
CLEAR_ON_PANDA_DISCONNECT = c_ParamKeyType.CLEAR_ON_PANDA_DISCONNECT
CLEAR_ON_IGNITION = c_ParamKeyType.CLEAR_ON_IGNITION
ALL = c_ParamKeyType.ALL
def ensure_bytes(v):
if isinstance(v, str):
return v.encode()
else:
return v
class UnknownKeyName(Exception):
pass
cdef class Params:
cdef c_Params* p
def __cinit__(self, d=None, bool persistent_params=False):
if d is None:
self.p = new c_Params(persistent_params)
else:
self.p = new c_Params(<string>d.encode())
def __dealloc__(self):
del self.p
def clear_all(self, tx_type=None):
if tx_type is None:
tx_type = ParamKeyType.ALL
self.p.clearAll(tx_type)
def check_key(self, key):
key = ensure_bytes(key)
if not self.p.checkKey(key):
raise UnknownKeyName(key)
return key
def get(self, key, block=False, encoding=None):
cdef string k = self.check_key(key)
cdef bool b = block
cdef string val
with nogil:
val = self.p.get(k, b)
if val == b"":
if block:
# If we got no value while running in blocked mode
# it means we got an interrupt while waiting
raise KeyboardInterrupt
else:
return None
if encoding is not None:
return val.decode(encoding)
else:
return val
def get_bool(self, key):
cdef string k = self.check_key(key)
return self.p.getBool(k)
def put(self, key, dat):
"""
Warning: This function blocks until the param is written to disk!
In very rare cases this can take over a second, and your code will hang.
Use the put_nonblocking helper function in time sensitive code, but
in general try to avoid writing params as much as possible.
"""
cdef string k = self.check_key(key)
dat = ensure_bytes(dat)
self.p.put(k, dat)
def put_bool(self, key, val):
cdef string k = self.check_key(key)
self.p.putBool(k, val)
def delete(self, key):
cdef string k = self.check_key(key)
self.p.remove(k)
def put_nonblocking(key, val, d=None):
def f(key, val):
params = Params(d)
cdef string k = ensure_bytes(key)
params.put(k, val)
t = threading.Thread(target=f, args=(key, val))
t.start()
return t

View File

@ -36,10 +36,10 @@ class Profiler():
if not self.enabled:
return
self.iter += 1
print("******* Profiling *******")
print("******* Profiling %d *******" % self.iter)
for n, ms in sorted(self.cp.items(), key=lambda x: -x[1]):
if n in self.cp_ignored:
print("%30s: %9.2f percent: %3.0f IGNORED" % (n, ms*1000.0, ms/self.tot*100))
print("%30s: %9.2f avg: %7.2f percent: %3.0f IGNORED" % (n, ms*1000.0, ms*1000.0/self.iter, ms/self.tot*100))
else:
print("%30s: %9.2f percent: %3.0f" % (n, ms*1000.0, ms/self.tot*100))
print("%30s: %9.2f avg: %7.2f percent: %3.0f" % (n, ms*1000.0, ms*1000.0/self.iter, ms/self.tot*100))
print("Iter clock: %2.6f TOTAL: %2.2f" % (self.tot/self.iter, self.tot))

View File

@ -1,53 +1,50 @@
"""Utilities for reading real time clocks and keeping soft real time constraints."""
import gc
import os
import time
import platform
import subprocess
import multiprocessing
from cffi import FFI
from common.android import ANDROID
from common.common_pyx import sec_since_boot # pylint: disable=no-name-in-module, import-error
from common.clock import sec_since_boot # pylint: disable=no-name-in-module, import-error
from selfdrive.hardware import PC, TICI
# time step for each process
DT_CTRL = 0.01 # controlsd
DT_MDL = 0.05 # model
DT_DMON = 0.1 # driver monitoring
DT_TRML = 0.5 # thermald and manager
# driver monitoring
if TICI:
DT_DMON = 0.05
else:
DT_DMON = 0.1
ffi = FFI()
ffi.cdef("long syscall(long number, ...);")
libc = ffi.dlopen(None)
def _get_tid():
if platform.machine() == "x86_64":
NR_gettid = 186
elif platform.machine() == "aarch64":
NR_gettid = 178
else:
raise NotImplementedError
class Priority:
# CORE 2
# - modeld = 55
# - camerad = 54
CTRL_LOW = 51 # plannerd & radard
return libc.syscall(NR_gettid)
# CORE 3
# - boardd = 55
CTRL_HIGH = 53
def set_realtime_priority(level):
if os.getuid() != 0:
print("not setting priority, not root")
return
if not PC:
os.sched_setscheduler(0, os.SCHED_FIFO, os.sched_param(level))
return subprocess.call(['chrt', '-f', '-p', str(level), str(_get_tid())])
def set_core_affinity(core):
if os.getuid() != 0:
print("not setting affinity, not root")
return
if not PC:
os.sched_setaffinity(0, [core,])
if ANDROID:
return subprocess.call(['taskset', '-p', str(core), str(_get_tid())])
else:
return -1
def config_realtime_process(core, priority):
gc.disable()
set_realtime_priority(priority)
set_core_affinity(core)
class Ratekeeper():

View File

@ -4,22 +4,19 @@ from common.basedir import BASEDIR
class Spinner():
def __init__(self, noop=False):
# spinner is currently only implemented for android
self.spinner_proc = None
if not noop:
try:
self.spinner_proc = subprocess.Popen(["./spinner"],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui", "spinner"),
close_fds=True)
except OSError:
self.spinner_proc = None
def __init__(self):
try:
self.spinner_proc = subprocess.Popen(["./spinner"],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui"),
close_fds=True)
except OSError:
self.spinner_proc = None
def __enter__(self):
return self
def update(self, spinner_text):
def update(self, spinner_text: str):
if self.spinner_proc is not None:
self.spinner_proc.stdin.write(spinner_text.encode('utf8') + b"\n")
try:
@ -27,6 +24,9 @@ class Spinner():
except BrokenPipeError:
pass
def update_progress(self, cur: int, total: int):
self.update(str(round(100 * cur / total)))
def close(self):
if self.spinner_proc is not None:
try:

View File

@ -0,0 +1,6 @@
def replace_right(s, old, new, occurrence):
# replace_right('1232425', '2', ' ', 1) -> '12324 5'
# replace_right('1232425', '2', ' ', 2) -> '123 4 5'
split = s.rsplit(old, occurrence)
return new.join(split)

View File

@ -0,0 +1,28 @@
import os
import unittest
from uuid import uuid4
from common.file_helpers import atomic_write_on_fs_tmp
from common.file_helpers import atomic_write_in_dir
class TestFileHelpers(unittest.TestCase):
def run_atomic_write_func(self, atomic_write_func):
path = "/tmp/tmp{}".format(uuid4())
with atomic_write_func(path) as f:
f.write("test")
with open(path) as f:
self.assertEqual(f.read(), "test")
self.assertEqual(os.stat(path).st_mode & 0o777, 0o644)
os.remove(path)
def test_atomic_write_on_fs_tmp(self):
self.run_atomic_write_func(atomic_write_on_fs_tmp)
def test_atomic_write_in_dir(self):
self.run_atomic_write_func(atomic_write_in_dir)
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,26 @@
import numpy as np
import unittest
from common.numpy_fast import interp
class InterpTest(unittest.TestCase):
def test_correctness_controls(self):
_A_CRUISE_MIN_BP = np.asarray([0., 5., 10., 20., 40.])
_A_CRUISE_MIN_V = np.asarray([-1.0, -.8, -.67, -.5, -.30])
v_ego_arr = [-1, -1e-12, 0, 4, 5, 6, 7, 10, 11, 15.2, 20, 21, 39,
39.999999, 40, 41]
expected = np.interp(v_ego_arr, _A_CRUISE_MIN_BP, _A_CRUISE_MIN_V)
actual = interp(v_ego_arr, _A_CRUISE_MIN_BP, _A_CRUISE_MIN_V)
np.testing.assert_equal(actual, expected)
for v_ego in v_ego_arr:
expected = np.interp(v_ego, _A_CRUISE_MIN_BP, _A_CRUISE_MIN_V)
actual = interp(v_ego, _A_CRUISE_MIN_BP, _A_CRUISE_MIN_V)
np.testing.assert_equal(actual, expected)
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,116 @@
import os
import threading
import time
import tempfile
import shutil
import stat
import unittest
from common.params import Params, ParamKeyType, UnknownKeyName, put_nonblocking
class TestParams(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
print("using", self.tmpdir)
self.params = Params(self.tmpdir)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_params_put_and_get(self):
self.params.put("DongleId", "cb38263377b873ee")
assert self.params.get("DongleId") == b"cb38263377b873ee"
def test_persist_params_put_and_get(self):
p = Params(persistent_params=True)
p.put("DongleId", "cb38263377b873ee")
assert p.get("DongleId") == b"cb38263377b873ee"
def test_params_non_ascii(self):
st = b"\xe1\x90\xff"
self.params.put("CarParams", st)
assert self.params.get("CarParams") == st
def test_params_get_cleared_panda_disconnect(self):
self.params.put("CarParams", "test")
self.params.put("DongleId", "cb38263377b873ee")
assert self.params.get("CarParams") == b"test"
self.params.clear_all(ParamKeyType.CLEAR_ON_PANDA_DISCONNECT)
assert self.params.get("CarParams") is None
assert self.params.get("DongleId") is not None
def test_params_get_cleared_manager_start(self):
self.params.put("CarParams", "test")
self.params.put("DongleId", "cb38263377b873ee")
assert self.params.get("CarParams") == b"test"
self.params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)
assert self.params.get("CarParams") is None
assert self.params.get("DongleId") is not None
def test_params_two_things(self):
self.params.put("DongleId", "bob")
self.params.put("AthenadPid", "123")
assert self.params.get("DongleId") == b"bob"
assert self.params.get("AthenadPid") == b"123"
def test_params_get_block(self):
def _delayed_writer():
time.sleep(0.1)
self.params.put("CarParams", "test")
threading.Thread(target=_delayed_writer).start()
assert self.params.get("CarParams") is None
assert self.params.get("CarParams", True) == b"test"
def test_params_unknown_key_fails(self):
with self.assertRaises(UnknownKeyName):
self.params.get("swag")
with self.assertRaises(UnknownKeyName):
self.params.get_bool("swag")
with self.assertRaises(UnknownKeyName):
self.params.put("swag", "abc")
with self.assertRaises(UnknownKeyName):
self.params.put_bool("swag", True)
def test_params_permissions(self):
permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH | stat.S_IWOTH
self.params.put("DongleId", "cb38263377b873ee")
st_mode = os.stat(f"{self.tmpdir}/d/DongleId").st_mode
assert (st_mode & permissions) == permissions
def test_delete_not_there(self):
assert self.params.get("CarParams") is None
self.params.delete("CarParams")
assert self.params.get("CarParams") is None
def test_get_bool(self):
self.params.delete("IsMetric")
self.assertFalse(self.params.get_bool("IsMetric"))
self.params.put_bool("IsMetric", True)
self.assertTrue(self.params.get_bool("IsMetric"))
self.params.put_bool("IsMetric", False)
self.assertFalse(self.params.get_bool("IsMetric"))
self.params.put("IsMetric", "1")
self.assertTrue(self.params.get_bool("IsMetric"))
self.params.put("IsMetric", "0")
self.assertFalse(self.params.get_bool("IsMetric"))
def test_put_non_blocking_with_get_block(self):
q = Params(self.tmpdir)
def _delayed_writer():
time.sleep(0.1)
put_nonblocking("CarParams", "test", self.tmpdir)
threading.Thread(target=_delayed_writer).start()
assert q.get("CarParams") is None
assert q.get("CarParams", True) == b"test"
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,47 @@
import os
import tempfile
import shutil
import unittest
from common.xattr import getxattr, setxattr, listxattr, removexattr
class TestParams(unittest.TestCase):
USER_TEST='user.test'
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.tmpfn = os.path.join(self.tmpdir, 'test.txt')
open(self.tmpfn, 'w').close()
#print("using", self.tmpfn)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_getxattr_none(self):
a = getxattr(self.tmpfn, TestParams.USER_TEST)
assert a is None
def test_listxattr_none(self):
l = listxattr(self.tmpfn)
assert l == []
def test_setxattr(self):
setxattr(self.tmpfn, TestParams.USER_TEST, b'123')
a = getxattr(self.tmpfn, TestParams.USER_TEST)
assert a == b'123'
def test_listxattr(self):
setxattr(self.tmpfn, 'user.test1', b'123')
setxattr(self.tmpfn, 'user.test2', b'123')
l = listxattr(self.tmpfn)
assert l == ['user.test1', 'user.test2']
def test_removexattr(self):
setxattr(self.tmpfn, TestParams.USER_TEST, b'123')
a = getxattr(self.tmpfn, TestParams.USER_TEST)
assert a == b'123'
removexattr(self.tmpfn, TestParams.USER_TEST)
a = getxattr(self.tmpfn, TestParams.USER_TEST)
assert a is None
if __name__ == "__main__":
unittest.main()

View File

@ -5,18 +5,15 @@ import subprocess
from common.basedir import BASEDIR
class TextWindow():
def __init__(self, s, noop=False):
# text window is only implemented for android currently
self.text_proc = None
if not noop:
try:
self.text_proc = subprocess.Popen(["./text", s],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui", "text"),
close_fds=True)
except OSError:
self.text_proc = None
class TextWindow:
def __init__(self, text):
try:
self.text_proc = subprocess.Popen(["./text", text],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui"),
close_fds=True)
except OSError:
self.text_proc = None
def get_status(self):
if self.text_proc is not None:

View File

@ -0,0 +1,2 @@
transformations
transformations.cpp

View File

@ -0,0 +1,70 @@
Reference Frames
------
Many reference frames are used throughout. This
folder contains all helper functions needed to
transform between them. Generally this is done
by generating a rotation matrix and multiplying.
| Name | [x, y, z] | Units | Notes |
| :-------------: |:-------------:| :-----:| :----: |
| Geodetic | [Latitude, Longitude, Altitude] | geodetic coordinates | Sometimes used as [lon, lat, alt], avoid this frame. |
| ECEF | [x, y, z] | meters | We use **ITRF14 (IGS14)**, NOT NAD83. <br> This is the global Mesh3D frame. |
| NED | [North, East, Down] | meters | Relative to earth's surface, useful for vizualizing. |
| Device | [Forward, Right, Down] | meters | This is the Mesh3D local frame. <br> Relative to camera, **not imu.** <br> ![img](http://upload.wikimedia.org/wikipedia/commons/thumb/2/2f/RPY_angles_of_airplanes.png/440px-RPY_angles_of_airplanes.png)|
| Calibrated | [Forward, Right, Down] | meters | This is the frame the model outputs are in. <br> More details below. <br>|
| Car | [Forward, Right, Down] | meters | This is useful for estimating position of points on the road. <br> More details below. <br>|
| View | [Right, Down, Forward] | meters | Like device frame, but according to camera conventions. |
| Camera | [u, v, focal] | pixels | Like view frame, but 2d on the camera image.|
| Normalized Camera | [u / focal, v / focal, 1] | / | |
| Model | [u, v, focal] | pixels | The sampled rectangle of the full camera frame the model uses. |
| Normalized Model | [u / focal, v / focal, 1] | / | |
Orientation Conventations
------
Quaternions, rotation matrices and euler angles are three
equivalent representations of orientation and all three are
used throughout the code base.
For euler angles the preferred convention is [roll, pitch, yaw]
which corresponds to rotations around the [x, y, z] axes. All
euler angles should always be in radians or radians/s unless
for plotting or display purposes. For quaternions the hamilton
notations is preferred which is [q<sub>w</sub>, q<sub>x</sub>, q<sub>y</sub>, q<sub>z</sub>]. All quaternions
should always be normalized with a strictly positive q<sub>w</sub>. **These
quaternions are a unique representation of orientation whereas euler angles
or rotation matrices are not.**
To rotate from one frame into another with euler angles the
convention is to rotate around roll, then pitch and then yaw,
while rotating around the rotated axes, not the original axes.
Car frame
------
Device frame is aligned with the road-facing camera used by openpilot. However, when controlling the vehicle it is helpful to think in a reference frame aligned with the vehicle. These two reference frames can be different.
The orientation of car frame is defined to be aligned with the car's direction of travel and the road plane when the vehicle is driving on a flat road and not turning. The origin of car frame is defined to be directly below device frame (in car frame), such that it is on the road plane. The position and orientation of this frame is not necessarily always aligned with the direction of travel or the road plane due to suspension movements and other effects.
Calibrated frame
------
It is helpful for openpilot's driving model to take in images that look similar when mounted differently in different cars. To achieve this we "calibrate" the images by transforming it into calibrated frame. Calibrated frame is defined to be aligned with car frame in pitch and yaw, and aligned with device frame in roll. It also has the same origin as device frame.
Example
------
To transform global Mesh3D positions and orientations (positions_ecef, quats_ecef) into the local frame described by the
first position and orientation from Mesh3D one would do:
```
ecef_from_local = rot_from_quat(quats_ecef[0])
local_from_ecef = ecef_from_local.T
positions_local = np.einsum('ij,kj->ki', local_from_ecef, postions_ecef - positions_ecef[0])
rotations_global = rot_from_quat(quats_ecef)
rotations_local = np.einsum('ij,kjl->kil', local_from_ecef, rotations_global)
eulers_local = euler_from_rot(rotations_local)
```

View File

@ -1,8 +1,6 @@
Import('env', 'cython_dependencies')
Import('env', 'envCython')
d = Dir('.')
transformations = env.Library('transformations', ['orientation.cc', 'coordinates.cc'])
Export('transformations')
env.Command(['transformations.so'],
cython_dependencies + ['transformations.pxd', 'transformations.pyx',
'coordinates.cc', 'orientation.cc', 'coordinates.hpp', 'orientation.hpp'],
'cd ' + d.path + ' && python3 setup.py build_ext --inplace')
envCython.Program('transformations.so', 'transformations.pyx')

View File

@ -1,29 +1,67 @@
import numpy as np
import common.transformations.orientation as orient
FULL_FRAME_SIZE = (1164, 874)
W, H = FULL_FRAME_SIZE[0], FULL_FRAME_SIZE[1]
eon_focal_length = FOCAL = 910.0
import common.transformations.orientation as orient
from selfdrive.hardware import TICI
## -- hardcoded hardware params --
eon_f_focal_length = 910.0
eon_d_focal_length = 860.0
leon_d_focal_length = 650.0
tici_f_focal_length = 2648.0
tici_e_focal_length = tici_d_focal_length = 567.0 # probably wrong? magnification is not consistent across frame
eon_f_frame_size = (1164, 874)
eon_d_frame_size = (1152, 864)
leon_d_frame_size = (816, 612)
tici_f_frame_size = tici_e_frame_size = tici_d_frame_size = (1928, 1208)
# aka 'K' aka camera_frame_from_view_frame
eon_intrinsics = np.array([
[FOCAL, 0., W/2.],
[ 0., FOCAL, H/2.],
[ 0., 0., 1.]])
eon_fcam_intrinsics = np.array([
[eon_f_focal_length, 0.0, float(eon_f_frame_size[0])/2],
[0.0, eon_f_focal_length, float(eon_f_frame_size[1])/2],
[0.0, 0.0, 1.0]])
eon_intrinsics = eon_fcam_intrinsics # xx
leon_dcam_intrinsics = np.array([
[650, 0, 816//2],
[ 0, 650, 612//2],
[ 0, 0, 1]])
[leon_d_focal_length, 0.0, float(leon_d_frame_size[0])/2],
[0.0, leon_d_focal_length, float(leon_d_frame_size[1])/2],
[0.0, 0.0, 1.0]])
eon_dcam_intrinsics = np.array([
[860, 0, 1152//2],
[ 0, 860, 864//2],
[ 0, 0, 1]])
[eon_d_focal_length, 0.0, float(eon_d_frame_size[0])/2],
[0.0, eon_d_focal_length, float(eon_d_frame_size[1])/2],
[0.0, 0.0, 1.0]])
tici_fcam_intrinsics = np.array([
[tici_f_focal_length, 0.0, float(tici_f_frame_size[0])/2],
[0.0, tici_f_focal_length, float(tici_f_frame_size[1])/2],
[0.0, 0.0, 1.0]])
tici_dcam_intrinsics = np.array([
[tici_d_focal_length, 0.0, float(tici_d_frame_size[0])/2],
[0.0, tici_d_focal_length, float(tici_d_frame_size[1])/2],
[0.0, 0.0, 1.0]])
tici_ecam_intrinsics = tici_dcam_intrinsics
# aka 'K_inv' aka view_frame_from_camera_frame
eon_intrinsics_inv = np.linalg.inv(eon_intrinsics)
eon_fcam_intrinsics_inv = np.linalg.inv(eon_fcam_intrinsics)
eon_intrinsics_inv = eon_fcam_intrinsics_inv # xx
tici_fcam_intrinsics_inv = np.linalg.inv(tici_fcam_intrinsics)
tici_ecam_intrinsics_inv = np.linalg.inv(tici_ecam_intrinsics)
if not TICI:
FULL_FRAME_SIZE = eon_f_frame_size
FOCAL = eon_f_focal_length
fcam_intrinsics = eon_fcam_intrinsics
else:
FULL_FRAME_SIZE = tici_f_frame_size
FOCAL = tici_f_focal_length
fcam_intrinsics = tici_fcam_intrinsics
W, H = FULL_FRAME_SIZE[0], FULL_FRAME_SIZE[1]
# device/mesh : x->forward, y-> right, z->down
@ -69,9 +107,9 @@ def vp_from_ke(m):
return (m[0, 0]/m[2, 0], m[1, 0]/m[2, 0])
def vp_from_rpy(rpy):
def vp_from_rpy(rpy, intrinsics=fcam_intrinsics):
e = get_view_frame_from_road_frame(rpy[0], rpy[1], rpy[2], 1.22)
ke = np.dot(eon_intrinsics, e)
ke = np.dot(intrinsics, e)
return vp_from_ke(ke)
@ -81,7 +119,7 @@ def roll_from_ke(m):
-(m[0, 0] - m[0, 1] * m[2, 0] / m[2, 1]))
def normalize(img_pts, intrinsics=eon_intrinsics):
def normalize(img_pts, intrinsics=fcam_intrinsics):
# normalizes image coordinates
# accepts single pt or array of pts
intrinsics_inv = np.linalg.inv(intrinsics)
@ -94,7 +132,7 @@ def normalize(img_pts, intrinsics=eon_intrinsics):
return img_pts_normalized[:, :2].reshape(input_shape)
def denormalize(img_pts, intrinsics=eon_intrinsics):
def denormalize(img_pts, intrinsics=fcam_intrinsics, width=W, height=H):
# denormalizes image coordinates
# accepts single pt or array of pts
img_pts = np.array(img_pts)
@ -102,9 +140,9 @@ def denormalize(img_pts, intrinsics=eon_intrinsics):
img_pts = np.atleast_2d(img_pts)
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1))))
img_pts_denormalized = img_pts.dot(intrinsics.T)
img_pts_denormalized[img_pts_denormalized[:, 0] > W] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 0] > width] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 0] < 0] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 1] > H] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 1] > height] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 1] < 0] = np.nan
return img_pts_denormalized[:, :2].reshape(input_shape)
@ -137,18 +175,10 @@ def img_from_device(pt_device):
return pt_img.reshape(input_shape)[:, :2]
def get_camera_frame_from_calib_frame(camera_frame_from_road_frame):
def get_camera_frame_from_calib_frame(camera_frame_from_road_frame, intrinsics=fcam_intrinsics):
camera_frame_from_ground = camera_frame_from_road_frame[:, (0, 1, 3)]
calib_frame_from_ground = np.dot(eon_intrinsics,
calib_frame_from_ground = np.dot(intrinsics,
get_view_frame_from_road_frame(0, 0, 0, 1.22))[:, (0, 1, 3)]
ground_from_calib_frame = np.linalg.inv(calib_frame_from_ground)
camera_frame_from_calib_frame = np.dot(camera_frame_from_ground, ground_from_calib_frame)
return camera_frame_from_calib_frame
def pretransform_from_calib(calib):
roll, pitch, yaw, height = calib
view_frame_from_road_frame = get_view_frame_from_road_frame(roll, pitch, yaw, height)
camera_frame_from_road_frame = np.dot(eon_intrinsics, view_frame_from_road_frame)
camera_frame_from_calib_frame = get_camera_frame_from_calib_frame(camera_frame_from_road_frame)
return np.linalg.inv(camera_frame_from_calib_frame)

View File

@ -6,8 +6,6 @@
#include "coordinates.hpp"
#define DEG2RAD(x) ((x) * M_PI / 180.0)
#define RAD2DEG(x) ((x) * 180.0 / M_PI)
double a = 6378137; // lgtm [cpp/short-global-name]

View File

@ -1,5 +1,8 @@
#pragma once
#define DEG2RAD(x) ((x) * M_PI / 180.0)
#define RAD2DEG(x) ((x) * 180.0 / M_PI)
struct ECEF {
double x, y, z;
Eigen::Vector3d to_vector(){
@ -9,6 +12,9 @@ struct ECEF {
struct NED {
double n, e, d;
Eigen::Vector3d to_vector(){
return Eigen::Vector3d(n, e, d);
}
};
struct Geodetic {

View File

@ -1,34 +1,33 @@
import numpy as np
from common.transformations.camera import (FULL_FRAME_SIZE, eon_focal_length,
from common.transformations.camera import (FULL_FRAME_SIZE,
FOCAL,
get_view_frame_from_road_frame,
get_view_frame_from_calib_frame,
vp_from_ke)
# segnet
SEGNET_SIZE = (512, 384)
segnet_frame_from_camera_frame = np.array([
[float(SEGNET_SIZE[0])/FULL_FRAME_SIZE[0], 0., ],
[ 0., float(SEGNET_SIZE[1])/FULL_FRAME_SIZE[1]]])
def get_segnet_frame_from_camera_frame(segnet_size=SEGNET_SIZE, full_frame_size=FULL_FRAME_SIZE):
return np.array([[float(segnet_size[0]) / full_frame_size[0], 0.0],
[0.0, float(segnet_size[1]) / full_frame_size[1]]])
segnet_frame_from_camera_frame = get_segnet_frame_from_camera_frame() # xx
# model
MODEL_INPUT_SIZE = (320, 160)
MODEL_YUV_SIZE = (MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1] * 3 // 2)
MODEL_CX = MODEL_INPUT_SIZE[0]/2.
MODEL_CX = MODEL_INPUT_SIZE[0] / 2.
MODEL_CY = 21.
model_zoom = 1.25
model_fl = 728.0
model_height = 1.22
# canonical model transform
model_intrinsics = np.array(
[[ eon_focal_length / model_zoom, 0. , MODEL_CX],
[ 0. , eon_focal_length / model_zoom, MODEL_CY],
[ 0. , 0. , 1.]])
model_intrinsics = np.array([
[model_fl, 0.0, MODEL_CX],
[0.0, model_fl, MODEL_CY],
[0.0, 0.0, 1.0]])
# MED model
@ -36,34 +35,45 @@ MEDMODEL_INPUT_SIZE = (512, 256)
MEDMODEL_YUV_SIZE = (MEDMODEL_INPUT_SIZE[0], MEDMODEL_INPUT_SIZE[1] * 3 // 2)
MEDMODEL_CY = 47.6
medmodel_zoom = 1.
medmodel_intrinsics = np.array(
[[ eon_focal_length / medmodel_zoom, 0. , 0.5 * MEDMODEL_INPUT_SIZE[0]],
[ 0. , eon_focal_length / medmodel_zoom, MEDMODEL_CY],
[ 0. , 0. , 1.]])
medmodel_fl = 910.0
medmodel_intrinsics = np.array([
[medmodel_fl, 0.0, 0.5 * MEDMODEL_INPUT_SIZE[0]],
[0.0, medmodel_fl, MEDMODEL_CY],
[0.0, 0.0, 1.0]])
# CAL model
CALMODEL_INPUT_SIZE = (512, 256)
CALMODEL_YUV_SIZE = (CALMODEL_INPUT_SIZE[0], CALMODEL_INPUT_SIZE[1] * 3 // 2)
CALMODEL_CY = 47.6
calmodel_zoom = 1.5
calmodel_intrinsics = np.array(
[[ eon_focal_length / calmodel_zoom, 0. , 0.5 * CALMODEL_INPUT_SIZE[0]],
[ 0. , eon_focal_length / calmodel_zoom, CALMODEL_CY],
[ 0. , 0. , 1.]])
calmodel_fl = 606.7
calmodel_intrinsics = np.array([
[calmodel_fl, 0.0, 0.5 * CALMODEL_INPUT_SIZE[0]],
[0.0, calmodel_fl, CALMODEL_CY],
[0.0, 0.0, 1.0]])
# BIG model
BIGMODEL_INPUT_SIZE = (1024, 512)
BIGMODEL_YUV_SIZE = (BIGMODEL_INPUT_SIZE[0], BIGMODEL_INPUT_SIZE[1] * 3 // 2)
bigmodel_zoom = 1.
bigmodel_intrinsics = np.array(
[[ eon_focal_length / bigmodel_zoom, 0. , 0.5 * BIGMODEL_INPUT_SIZE[0]],
[ 0. , eon_focal_length / bigmodel_zoom, 256+MEDMODEL_CY],
[ 0. , 0. , 1.]])
bigmodel_fl = 910.0
bigmodel_intrinsics = np.array([
[bigmodel_fl, 0.0, 0.5 * BIGMODEL_INPUT_SIZE[0]],
[0.0, bigmodel_fl, 256 + MEDMODEL_CY],
[0.0, 0.0, 1.0]])
# SBIG model (big model with the size of small model)
SBIGMODEL_INPUT_SIZE = (512, 256)
SBIGMODEL_YUV_SIZE = (SBIGMODEL_INPUT_SIZE[0], SBIGMODEL_INPUT_SIZE[1] * 3 // 2)
sbigmodel_fl = 455.0
sbigmodel_intrinsics = np.array([
[sbigmodel_fl, 0.0, 0.5 * SBIGMODEL_INPUT_SIZE[0]],
[0.0, sbigmodel_fl, 0.5 * (256 + MEDMODEL_CY)],
[0.0, 0.0, 1.0]])
model_frame_from_road_frame = np.dot(model_intrinsics,
get_view_frame_from_road_frame(0, 0, 0, model_height))
@ -80,20 +90,21 @@ medmodel_frame_from_calib_frame = np.dot(medmodel_intrinsics,
model_frame_from_bigmodel_frame = np.dot(model_intrinsics, np.linalg.inv(bigmodel_intrinsics))
medmodel_frame_from_bigmodel_frame = np.dot(medmodel_intrinsics, np.linalg.inv(bigmodel_intrinsics))
# 'camera from model camera'
def get_model_height_transform(camera_frame_from_road_frame, height):
camera_frame_from_road_ground = np.dot(camera_frame_from_road_frame, np.array([
[1, 0, 0],
[0, 1, 0],
[0, 0, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 0, 0],
[0, 0, 1],
]))
camera_frame_from_road_high = np.dot(camera_frame_from_road_frame, np.array([
[1, 0, 0],
[0, 1, 0],
[0, 0, height - model_height],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 0, height - model_height],
[0, 0, 1],
]))
road_high_from_camera_frame = np.linalg.inv(camera_frame_from_road_high)
@ -104,13 +115,14 @@ def get_model_height_transform(camera_frame_from_road_frame, height):
# camera_frame_from_model_frame aka 'warp matrix'
# was: calibration.h/CalibrationTransform
def get_camera_frame_from_model_frame(camera_frame_from_road_frame, height=model_height):
def get_camera_frame_from_model_frame(camera_frame_from_road_frame, height=model_height, camera_fl=FOCAL):
vp = vp_from_ke(camera_frame_from_road_frame)
model_zoom = camera_fl / model_fl
model_camera_from_model_frame = np.array([
[model_zoom, 0., vp[0] - MODEL_CX * model_zoom],
[ 0., model_zoom, vp[1] - MODEL_CY * model_zoom],
[ 0., 0., 1.],
[model_zoom, 0.0, vp[0] - MODEL_CX * model_zoom],
[0.0, model_zoom, vp[1] - MODEL_CY * model_zoom],
[0.0, 0.0, 1.0],
])
# This function is super slow, so skip it if height is very close to canonical

View File

@ -30,7 +30,8 @@ Eigen::Vector3d quat2euler(Eigen::Quaterniond quat){
// Eigen::Vector3d euler = quat.toRotationMatrix().eulerAngles(2, 1, 0);
// return {euler(2), euler(1), euler(0)};
double gamma = atan2(2 * (quat.w() * quat.x() + quat.y() * quat.z()), 1 - 2 * (quat.x()*quat.x() + quat.y()*quat.y()));
double theta = asin(2 * (quat.w() * quat.y() - quat.z() * quat.x()));
double asin_arg_clipped = std::clamp(2 * (quat.w() * quat.y() - quat.z() * quat.x()), -1.0, 1.0);
double theta = asin(asin_arg_clipped);
double psi = atan2(2 * (quat.w() * quat.z() + quat.x() * quat.y()), 1 - 2 * (quat.y()*quat.y() + quat.z()*quat.z()));
return {gamma, theta, psi};
}
@ -39,7 +40,7 @@ Eigen::Matrix3d quat2rot(Eigen::Quaterniond quat){
return quat.toRotationMatrix();
}
Eigen::Quaterniond rot2quat(Eigen::Matrix3d rot){
Eigen::Quaterniond rot2quat(const Eigen::Matrix3d &rot){
return ensure_unique(Eigen::Quaterniond(rot));
}
@ -47,7 +48,7 @@ Eigen::Matrix3d euler2rot(Eigen::Vector3d euler){
return quat2rot(euler2quat(euler));
}
Eigen::Vector3d rot2euler(Eigen::Matrix3d rot){
Eigen::Vector3d rot2euler(const Eigen::Matrix3d &rot){
return quat2euler(rot2quat(rot));
}
@ -141,7 +142,3 @@ Eigen::Vector3d ned_euler_from_ecef(ECEF ecef_init, Eigen::Vector3d ecef_pose){
return {phi, theta, psi};
}
int main(void){
}

View File

@ -8,9 +8,9 @@ Eigen::Quaterniond ensure_unique(Eigen::Quaterniond quat);
Eigen::Quaterniond euler2quat(Eigen::Vector3d euler);
Eigen::Vector3d quat2euler(Eigen::Quaterniond quat);
Eigen::Matrix3d quat2rot(Eigen::Quaterniond quat);
Eigen::Quaterniond rot2quat(Eigen::Matrix3d rot);
Eigen::Quaterniond rot2quat(const Eigen::Matrix3d &rot);
Eigen::Matrix3d euler2rot(Eigen::Vector3d euler);
Eigen::Vector3d rot2euler(Eigen::Matrix3d rot);
Eigen::Vector3d rot2euler(const Eigen::Matrix3d &rot);
Eigen::Matrix3d rot_matrix(double roll, double pitch, double yaw);
Eigen::Matrix3d rot(Eigen::Vector3d axis, double angle);
Eigen::Vector3d ecef_euler_from_ned(ECEF ecef_init, Eigen::Vector3d ned_pose);

Some files were not shown because too many files have changed in this diff Show More