diff --git a/.dockerignore b/.dockerignore
index e0503085d..0c1ba2f12 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -12,6 +12,7 @@
!requirements.txt
!requirements_test.txt
!requirements-3.9.txt
+!docker/azure-agent-start.sh
!docker/opencv-python-headless-setup.py
!docker/jetson-nano/opencv-tuple.patch
!rootfs/
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 22ec850fd..39d1a43be 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -54,7 +54,7 @@ jobs:
uses: ./.github/templates/run_in_venv
with:
command: |
- pip3 install --extra-index-url https://download.pytorch.org/whl/cpu -r requirements.txt -r requirements_test.txt -r requirements_ci.txt && \
+ pip3 install --extra-index-url https://download.pytorch.org/whl/cpu --extra-index-url https://pkgs.dev.azure.com/viseron/Viseron%20Pipelines/_packaging/viseron-wheels/pypi/simple -r requirements.txt -r requirements_test.txt -r requirements_ci.txt && \
pre-commit install
prepare-pre-commit:
@@ -488,6 +488,21 @@ jobs:
sudo add-apt-repository "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main"
sudo apt-get update
sudo apt-get install -y --no-install-recommends libedgetpu1-std python3-gi python3-gst-1.0
+ - name: Install libhailort for hailo docs
+ env:
+ AZURE_DEVOPS_EXT_PAT: ${{ secrets.AZURE_DEVOPS_EXT_PAT }}
+ run: |
+ set -e
+ HAILO_VERSION=$(grep '^HAILO_VERSION=' azure-pipelines/.env | cut -d'"' -f2)
+ echo "Logging in to Azure DevOps"
+ echo "$AZURE_DEVOPS_EXT_PAT" | az devops login --organization https://dev.azure.com/viseron || true
+ echo "Downloading libhailort-amd64 version $HAILO_VERSION"
+ az artifacts universal download --organization https://dev.azure.com/viseron/ --project="Viseron Pipelines" --scope project --feed viseron-binaries --name libhailort-amd64 --version "$HAILO_VERSION" --path libhailort_pkg
+ echo "Installing libhailort to /usr/local/lib"
+ sudo cp libhailort_pkg/libhailort.so.* /usr/local/lib/
+ sudo ldconfig
+ ls -l /usr/local/lib/libhailort.so.*
+ az devops logout
- name: Run script to check generated docs
uses: ./.github/templates/run_in_venv
with:
diff --git a/.gitignore b/.gitignore
index 9d5ebf2d3..4822a345c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,9 +37,12 @@
!/docker/jetson-nano/tflite_runtime-2.5.0.post1-cp38-cp38-linux_aarch64.whl
!/docker/rpi3
!/docker/rpi3/Dockerfile*
+!/docker/rpi5
+!/docker/rpi5/Dockerfile*
!/docker/ffprobe_wrapper
!/docker/ffmpeg_wrapper
!/docker/yolov4*.cfg
+!/docker/azure-agent-start.sh
!/docker/opencv-python-headless-setup.py
!/docker/jetson-nano/opencv-tuple.patch
!/rootfs/
diff --git a/.mypy.ini b/.mypy.ini
index 18052b636..aa6b6be73 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -46,9 +46,15 @@ ignore_missing_imports = true
[mypy-gi.repository.*]
ignore_missing_imports = true
+[mypy-hailo_platform.*]
+ignore_missing_imports = true
+
[mypy-imutils.*]
ignore_missing_imports = true
+[mypy-jinja2.*]
+ignore_missing_imports = true
+
[mypy-path.*]
ignore_missing_imports = true
diff --git a/azure-pipelines/.env b/azure-pipelines/.env
index 7fe7ecfd9..1663a9252 100644
--- a/azure-pipelines/.env
+++ b/azure-pipelines/.env
@@ -1,12 +1,13 @@
UBUNTU_VERSION="jammy"
UBUNTU_VERSION_NUMBER="22.04"
-BASE_VERSION="1.8.0"
+BASE_VERSION="1.9.0"
OPENCV_VERSION="4.10.0"
OPENVINO_VERSION="2024.0.0"
FFMPEG_VERSION="5.1.2"
DLIB_VERSION="19.24.4"
+HAILO_VERSION="4.22.0"
SKLEARN_VERSION="1.2.2"
-WHEELS_VERSION="1.7"
+WHEELS_VERSION="1.8"
S6_OVERLAY_VERSION="2.1.0.2"
CMAKE_VERSION=3.20.0
MAKEFLAGS="-j 5"
@@ -23,7 +24,7 @@ JETPACK_VERSION_MAJOR=32
JETPACK_VERSION_MINOR=6
JETPACK_VERSION_PATCH=1
JETSON_NANO_FFMPEG_VERSION="4.2.2"
-JETSON_NANO_FFMPEG_APT_VERSION="7:4.2.2-nvidia"
+JETSON_NANO_FFMPEG_APT_VERSION="9:3.4.11-2ubuntu0.1l4t"
CUDA_VERSION=12.3.2
DARKNET_COMMIT=27b37bf
GPAC_VERSION=2.4.0
diff --git a/azure-pipelines/azure-pipelines-tools.yml b/azure-pipelines/azure-pipelines-tools.yml
index 0f7270528..e54e086f4 100644
--- a/azure-pipelines/azure-pipelines-tools.yml
+++ b/azure-pipelines/azure-pipelines-tools.yml
@@ -30,3 +30,86 @@ jobs:
architectures:
- amd64-cuda
- jetson-nano
+ - template: templates/build.yaml
+ parameters:
+ image: hailo
+ noCache: ${{ parameters.noCache }}
+ architectures:
+ - amd64
+ - aarch64
+
+ # Publish hailo wheels and libhailort packages
+ - job: publish_hailo_artifacts
+ displayName: Publish Hailo artifacts
+ dependsOn:
+ - build_hailo_amd64
+ - build_hailo_aarch64
+ condition: succeeded()
+ strategy:
+ maxParallel: 1
+ matrix:
+ amd64:
+ ARCH: amd64
+ aarch64:
+ ARCH: aarch64
+ pool:
+ vmImage: ubuntu-latest
+ steps:
+ - task: Docker@2
+ displayName: Login to Docker Hub
+ inputs:
+ command: login
+ containerRegistry: "Docker Hub"
+ - script: |
+ set -e
+ ARCH=$(ARCH)
+ HAILO_VERSION=$(grep '^HAILO_VERSION=' azure-pipelines/.env | cut -d'"' -f2)
+ if [ -z "$HAILO_VERSION" ]; then
+ echo "Failed to determine HAILO_VERSION" >&2
+ exit 1
+ fi
+ IMAGE_NAME="roflcoopter/${ARCH}-hailo:${HAILO_VERSION}"
+ echo "Pulling $IMAGE_NAME"
+ docker pull $IMAGE_NAME
+ CID=$(docker create "$IMAGE_NAME" bash)
+
+ mkdir -p hailo-wheels/${ARCH}
+ docker cp $CID:/wheels/. hailo-wheels/${ARCH}/
+
+ # Prepare libhailort universal package directory per architecture
+ mkdir -p libhailort-dist-${ARCH}
+ docker cp $CID:/usr/local/lib/libhailort.so.${HAILO_VERSION} libhailort-dist-${ARCH}/
+
+ docker rm $CID
+ echo "Extracted files:"; ls -1 hailo-wheels/${ARCH}
+ echo "libhailort files:"; ls -1 libhailort-dist-${ARCH}
+ echo "##vso[task.setvariable variable=HAILO_VERSION]$HAILO_VERSION"
+ displayName: Extract wheels
+ - task: TwineAuthenticate@1
+ displayName: Authenticate to Azure Artifacts (Python feed)
+ inputs:
+ artifactFeed: "Viseron Pipelines/viseron-wheels"
+ - script: |
+ set -e
+ ARCH=$(ARCH)
+ python3 -m pip install --upgrade pip
+ python3 -m pip install --no-cache-dir twine==6.1.0
+ echo "Uploading wheels to Azure Artifacts feed 'viseron-wheels'"
+ python3 -m twine upload --skip-existing --config-file $(PYPIRC_PATH) -r viseron-wheels hailo-wheels/${ARCH}/*.whl
+ displayName: Upload wheels
+ - task: UniversalPackages@0
+ displayName: Publish libhailort universal package
+ inputs:
+ command: publish
+ publishDirectory: libhailort-dist-$(ARCH)
+ vstsFeedPublish: "Viseron Pipelines/viseron-binaries"
+ vstsFeedPackagePublish: libhailort-$(ARCH)
+ versionOption: custom
+ versionPublish: $(HAILO_VERSION)
+ packagePublishDescription: "libhailort shared library ($(ARCH)) for Hailo version $(HAILO_VERSION)"
+ - task: Docker@2
+ displayName: Logoff Docker Hub
+ inputs:
+ command: logout
+ containerRegistry: "Docker Hub"
+ condition: always()
diff --git a/azure-pipelines/docker-compose-build.yaml b/azure-pipelines/docker-compose-build.yaml
index c3c1da518..987a18b0b 100644
--- a/azure-pipelines/docker-compose-build.yaml
+++ b/azure-pipelines/docker-compose-build.yaml
@@ -78,6 +78,18 @@ services:
- roflcoopter/amd64-dlib:$DLIB_VERSION
image: roflcoopter/amd64-dlib:$DLIB_VERSION
+ amd64-hailo:
+ build:
+ context: ..
+ dockerfile: ./docker/Dockerfile.hailo
+ args:
+ BUILD_FROM: ubuntu:$UBUNTU_VERSION
+ MAKEFLAGS: "$MAKEFLAGS"
+ HAILO_VERSION: "$HAILO_VERSION"
+ cache_from:
+ - roflcoopter/amd64-hailo:$HAILO_VERSION
+ image: roflcoopter/amd64-hailo:$HAILO_VERSION
+
amd64-wheels:
build:
context: ..
@@ -86,6 +98,7 @@ services:
ARCH: amd64
BUILD_FROM: ubuntu:$UBUNTU_VERSION
DLIB_VERSION: "$DLIB_VERSION"
+ EXTRA_PIP_ARGS: --extra-index-url https://download.pytorch.org/whl/cpu
OPENCV_VERSION: "$OPENCV_VERSION"
cache_from:
- roflcoopter/amd64-wheels:$WHEELS_VERSION
@@ -102,6 +115,7 @@ services:
IGC_VERSION: "$IGC_VERSION"
LEVEL_ZERO_GPU: "$LEVEL_ZERO_GPU"
OPENCL_SHA256_FILENAME: "$OPENCL_SHA256_FILENAME"
+ HAILO_VERSION: "$HAILO_VERSION"
context: ..
dockerfile: ./docker/amd64/Dockerfile.base
cache_from:
@@ -245,6 +259,7 @@ services:
IGC_VERSION: "$IGC_VERSION"
LEVEL_ZERO_GPU: "$LEVEL_ZERO_GPU"
OPENCL_SHA256_FILENAME: "$OPENCL_SHA256_FILENAME"
+ HAILO_VERSION: "$HAILO_VERSION"
cache_from:
- roflcoopter/amd64-cuda-base:$BASE_VERSION
image: roflcoopter/amd64-cuda-base:$BASE_VERSION
@@ -416,6 +431,17 @@ services:
- roflcoopter/aarch64-dlib:$DLIB_VERSION
image: roflcoopter/aarch64-dlib:$DLIB_VERSION
+ aarch64-hailo:
+ build:
+ context: ..
+ dockerfile: ./docker/Dockerfile.hailo
+ args:
+ BUILD_FROM: ubuntu:$UBUNTU_VERSION
+ MAKEFLAGS: "$MAKEFLAGS"
+ HAILO_VERSION: "$HAILO_VERSION"
+ cache_from:
+ - roflcoopter/aarch64-hailo:$HAILO_VERSION
+ image: roflcoopter/aarch64-hailo:$HAILO_VERSION
aarch64-wheels:
build:
context: ..
@@ -436,6 +462,7 @@ services:
args:
UBUNTU_VERSION: "$UBUNTU_VERSION"
FFMPEG_VERSION: "$FFMPEG_VERSION"
+ HAILO_VERSION: "$HAILO_VERSION"
cache_from:
- roflcoopter/aarch64-base:$BASE_VERSION
image: roflcoopter/aarch64-base:$BASE_VERSION
@@ -615,6 +642,19 @@ services:
- roflcoopter/jetson-nano-viseron:dev
image: roflcoopter/jetson-nano-viseron:dev
+ ################ RaspberryPi 5 #####################################
+ rpi5-azure-agent:
+ build:
+ context: ..
+ dockerfile: ./docker/rpi5/Dockerfile.azure
+ args:
+ TARGETARCH: "linux-arm64"
+ UBUNTU_VERSION: "$UBUNTU_VERSION"
+ cache_from:
+ - roflcoopter/rpi5-azure-agent:latest
+ image: roflcoopter/rpi5-azure-agent:latest
+
+ ################### Other ##########################################
models:
build:
context: ..
diff --git a/azure-pipelines/templates/build.yaml b/azure-pipelines/templates/build.yaml
index 6245ba9fe..f0cbd4639 100644
--- a/azure-pipelines/templates/build.yaml
+++ b/azure-pipelines/templates/build.yaml
@@ -30,160 +30,159 @@ parameters:
default: false
jobs:
- - job: "build_${{ parameters.image }}"
- variables:
- ${{ if eq(parameters.noCache, true) }}:
- noCacheOption: "--no-cache"
- ${{ if ne(parameters.noCache, true) }}:
- noCacheOption: ""
- timeoutInMinutes: ${{ parameters.timeoutJob }}
- strategy:
- matrix:
- ${{ each architecture in parameters.architectures }}:
- ${{ architecture }}:
- arch: ${{ architecture }}
- pool:
- vmImage: "ubuntu-latest"
- steps:
- - template: release_version.yaml
- parameters:
- release: ${{ parameters.release }}
- - task: Docker@2
- displayName: Login to Docker Hub
- inputs:
- command: login
- containerRegistry: "Docker Hub"
-
- - script: |
- df -h
- displayName: List free space before cleaning
- - script: |
- docker rmi -f $(docker images -aq) || true
- displayName: Clean up Docker images
- - script: |
- docker system prune --force --all --volumes
- displayName: Docker prune
- - script: |
- sudo rm -rf /usr/local/lib/android
- sudo rm -rf /usr/local/.ghcup
- sudo rm -rf /opt/hostedtoolcache/CodeQL
- displayName: Remove unused files
- - script: |
- df -h
- displayName: List free space after cleaning
-
- - script: docker run --rm --privileged tonistiigi/binfmt --install all
- displayName: Register QEMU for cross-builds
- condition: and(succeeded(), eq('${{ parameters.crossBuild }}', true))
-
- # - script: |
- # cd $(Agent.BuildDirectory)/s/docker
- # docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env pull $(arch)-${{ parameters.image }}
- # displayName: Pull image for Docker layer caching
- # continueOnError: true
- # condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', false))
- # - script: |
- # cd $(Agent.BuildDirectory)/s/docker
- # docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env pull ${{ parameters.image }}
- # displayName: Pull image for Docker layer caching
- # continueOnError: true
- # condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', true))
-
- - script: >
- cd $(Agent.BuildDirectory)/s/docker &&
- docker compose --file ../azure-pipelines/docker-compose-build.yaml
- --env-file ../azure-pipelines/.env
- build $(noCacheOption)
- --build-arg BUILDKIT_INLINE_CACHE=1
- --build-arg VISERON_VERSION=$(viseronVersion)
- --build-arg VISERON_GIT_COMMIT=$(Build.SourceVersion)
- $(arch)-${{ parameters.image }}
- displayName: Build $(arch)-${{ parameters.image }}
- condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', false))
- env:
- DOCKER_BUILDKIT: 1
- COMPOSE_DOCKER_CLI_BUILD: 1
- BUILDKIT_PROGRESS: plain
- - script: >
- cd $(Agent.BuildDirectory)/s/docker &&
- docker compose --file ../azure-pipelines/docker-compose-build.yaml
- --env-file ../azure-pipelines/.env
- build $(noCacheOption)
- --build-arg BUILDKIT_INLINE_CACHE=1
- --build-arg VISERON_VERSION=$(viseronVersion)
- --build-arg VISERON_GIT_COMMIT=$(Build.SourceVersion)
- ${{ parameters.image }}
- displayName: Build ${{ parameters.image }}
- condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', true))
- env:
- DOCKER_BUILDKIT: 1
- COMPOSE_DOCKER_CLI_BUILD: 1
- BUILDKIT_PROGRESS: plain
-
- # Runs only for non-release triggers
- - script: |
- cd $(Agent.BuildDirectory)/s/docker
- docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env push $(arch)-${{ parameters.image }}
-
- # Extract the tag and store it in an environment variable
- IMAGE_TAG=$(docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env config | grep "$(arch)-${{ parameters.image }}" -A2 | grep "image:" | sed 's/.*://g' | tr -d ' ' | cut -d':' -f2)
- echo "Extracted IMAGE_TAG: $IMAGE_TAG"
-
- # Push tag with commit hash
- docker image tag roflcoopter/$(arch)-${{ parameters.image }}:${IMAGE_TAG} roflcoopter/$(arch)-${{ parameters.image }}:$(Build.SourceVersion)
- docker image push roflcoopter/$(arch)-${{ parameters.image }}:$(Build.SourceVersion)
-
- # Push tag with timestamp
- now=$(date -u +"%Y%m%d%H%M%S")
- docker image tag roflcoopter/$(arch)-${{ parameters.image }}:${IMAGE_TAG} roflcoopter/$(arch)-${{ parameters.image }}:$now
- docker image push roflcoopter/$(arch)-${{ parameters.image }}:$now
- displayName: Push $(arch)-${{ parameters.image }} (non-release)
- condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', false), eq('${{ parameters.release }}', false))
- - script: |
- cd $(Agent.BuildDirectory)/s/docker
- docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env push ${{ parameters.image }}
-
- # Extract the tag and store it in an environment variable
- IMAGE_TAG=$(docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env config | grep "$(arch)-${{ parameters.image }}" -A2 | grep "image:" | sed 's/.*://g' | tr -d ' ' | cut -d':' -f2)
- echo "Extracted IMAGE_TAG: $IMAGE_TAG"
-
- # Push tag with commit hash
- docker image tag roflcoopter/${{ parameters.image }}:${IMAGE_TAG} roflcoopter/${{ parameters.image }}:$(Build.SourceVersion)
- docker image push roflcoopter/${{ parameters.image }}:$(Build.SourceVersion)
-
- # Push tag with timestamp
- now=$(date -u +"%Y%m%d%H%M%S")
- docker image tag roflcoopter/${{ parameters.image }}:${IMAGE_TAG} roflcoopter/${{ parameters.image }}:$now
- docker image push roflcoopter/${{ parameters.image }}:$now
- displayName: Push ${{ parameters.image }} (non-release)
- condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', true), eq('${{ parameters.release }}', false))
-
- # Runs only for release triggers
- - script: |
- echo tagging roflcoopter/$(arch)-${{ parameters.image }}:$(viseronVersion)
- docker image tag roflcoopter/$(arch)-${{ parameters.image }}:dev roflcoopter/$(arch)-${{ parameters.image }}:$(viseronVersion)
- docker image push roflcoopter/$(arch)-${{ parameters.image }}:$(viseronVersion)
-
- # Push tag with commit hash
- docker image tag roflcoopter/$(arch)-${{ parameters.image }}:dev roflcoopter/$(arch)-${{ parameters.image }}:$(viseronVersion)-$(Build.SourceVersion)
- docker image push roflcoopter/$(arch)-${{ parameters.image }}:$(viseronVersion)-$(Build.SourceVersion)
-
- # Push tag with timestamp
- now=$(date -u +"%Y%m%d%H%M%S")
- docker image tag roflcoopter/$(arch)-${{ parameters.image }}:dev roflcoopter/$(arch)-${{ parameters.image }}:$(viseronVersion)-$now
- docker image push roflcoopter/$(arch)-${{ parameters.image }}:$(viseronVersion)-$now
-
- if [ $(latestRelease) = true ] ; then
- echo tagging roflcoopter/$(arch)-${{ parameters.image }}:latest
- docker image tag roflcoopter/$(arch)-${{ parameters.image }}:dev roflcoopter/$(arch)-${{ parameters.image }}:latest
- docker image push roflcoopter/$(arch)-${{ parameters.image }}:latest
- fi
- displayName: Push $(arch)-${{ parameters.image }} (release)
- condition: and(succeeded(), eq('${{ parameters.release }}', true))
-
- - task: Docker@2
- displayName: Logoff Docker Hub
- inputs:
- command: logout
- containerRegistry: "Docker Hub"
- condition: always()
+ - ${{ each architecture in parameters.architectures }}:
+ - job: build_${{ replace(parameters.image, '-', '_') }}_${{ replace(architecture, '-', '_') }}
+ timeoutInMinutes: ${{ parameters.timeoutJob }}
+ variables:
+ ${{ if eq(parameters.noCache, true) }}:
+ noCacheOption: "--no-cache"
+ ${{ if ne(parameters.noCache, true) }}:
+ noCacheOption: ""
+ pool:
+ ${{ if or(eq(architecture, 'aarch64'), eq(architecture, 'jetson-nano'), eq(architecture, 'rpi5')) }}:
+ name: rpi5
+ ${{ else }}:
+ vmImage: ubuntu-latest
+ steps:
+ - template: release_version.yaml
+ parameters:
+ release: ${{ parameters.release }}
+ - task: Docker@2
+ displayName: Login to Docker Hub
+ inputs:
+ command: login
+ containerRegistry: "Docker Hub"
+
+ - script: |
+ df -h
+ displayName: List free space before cleaning
+ - script: |
+ docker rmi -f $(docker images -aq) || true
+ displayName: Clean up Docker images
+ - script: |
+ docker system prune --force --all --volumes
+ displayName: Docker prune
+ - script: |
+ sudo rm -rf /usr/local/lib/android || true
+ sudo rm -rf /usr/local/.ghcup || true
+ sudo rm -rf /opt/hostedtoolcache/CodeQL || true
+ displayName: Remove unused files
+ - script: |
+ df -h
+ displayName: List free space after cleaning
+
+ - script: docker run --rm --privileged tonistiigi/binfmt --install all
+ displayName: Register QEMU for cross-builds
+ condition: and(succeeded(), eq('${{ parameters.crossBuild }}', true))
+
+ # - script: |
+ # cd $(Agent.BuildDirectory)/s/docker
+ # docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env pull ${{ architecture }}-${{ parameters.image }}
+ # displayName: Pull image for Docker layer caching
+ # continueOnError: true
+ # condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', false))
+ # - script: |
+ # cd $(Agent.BuildDirectory)/s/docker
+ # docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env pull ${{ parameters.image }}
+ # displayName: Pull image for Docker layer caching
+ # continueOnError: true
+ # condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', true))
+
+ - script: >
+ cd $(Agent.BuildDirectory)/s/docker &&
+ docker compose --file ../azure-pipelines/docker-compose-build.yaml
+ --env-file ../azure-pipelines/.env
+ build $(noCacheOption)
+ --build-arg BUILDKIT_INLINE_CACHE=1
+ --build-arg VISERON_VERSION=$(viseronVersion)
+ --build-arg VISERON_GIT_COMMIT=$(Build.SourceVersion)
+ ${{ architecture }}-${{ parameters.image }}
+ displayName: Build ${{ architecture }}-${{ parameters.image }}
+ condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', false))
+ env:
+ DOCKER_BUILDKIT: 1
+ COMPOSE_DOCKER_CLI_BUILD: 1
+ BUILDKIT_PROGRESS: plain
+ - script: >
+ cd $(Agent.BuildDirectory)/s/docker &&
+ docker compose --file ../azure-pipelines/docker-compose-build.yaml
+ --env-file ../azure-pipelines/.env
+ build $(noCacheOption)
+ --build-arg BUILDKIT_INLINE_CACHE=1
+ --build-arg VISERON_VERSION=$(viseronVersion)
+ --build-arg VISERON_GIT_COMMIT=$(Build.SourceVersion)
+ ${{ parameters.image }}
+ displayName: Build ${{ parameters.image }}
+ condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', true))
+ env:
+ DOCKER_BUILDKIT: 1
+ COMPOSE_DOCKER_CLI_BUILD: 1
+ BUILDKIT_PROGRESS: plain
+
+ # Runs only for non-release triggers
+ - script: |
+ cd $(Agent.BuildDirectory)/s/docker
+ docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env push ${{ architecture }}-${{ parameters.image }}
+
+ # Extract the tag and store it in an environment variable
+ IMAGE_TAG=$(docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env config | grep "${{ architecture }}-${{ parameters.image }}" -A2 | grep "image:" | sed 's/.*://g' | tr -d ' ' | cut -d':' -f2)
+ echo "Extracted IMAGE_TAG: $IMAGE_TAG"
+
+ # Push tag with commit hash
+ docker image tag roflcoopter/${{ architecture }}-${{ parameters.image }}:${IMAGE_TAG} roflcoopter/${{ architecture }}-${{ parameters.image }}:$(Build.SourceVersion)
+ docker image push roflcoopter/${{ architecture }}-${{ parameters.image }}:$(Build.SourceVersion)
+
+ # Push tag with timestamp
+ now=$(date -u +"%Y%m%d%H%M%S")
+ docker image tag roflcoopter/${{ architecture }}-${{ parameters.image }}:${IMAGE_TAG} roflcoopter/${{ architecture }}-${{ parameters.image }}:$now
+ docker image push roflcoopter/${{ architecture }}-${{ parameters.image }}:$now
+ displayName: Push ${{ architecture }}-${{ parameters.image }} (non-release)
+ condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', false), eq('${{ parameters.release }}', false))
+ - script: |
+ cd $(Agent.BuildDirectory)/s/docker
+ docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env push ${{ parameters.image }}
+
+ # Extract the tag and store it in an environment variable
+ IMAGE_TAG=$(docker compose --file ../azure-pipelines/docker-compose-build.yaml --env-file ../azure-pipelines/.env config | grep "${{ architecture }}-${{ parameters.image }}" -A2 | grep "image:" | sed 's/.*://g' | tr -d ' ' | cut -d':' -f2)
+ echo "Extracted IMAGE_TAG: $IMAGE_TAG"
+
+ # Push tag with commit hash
+ docker image tag roflcoopter/${{ parameters.image }}:${IMAGE_TAG} roflcoopter/${{ parameters.image }}:$(Build.SourceVersion)
+ docker image push roflcoopter/${{ parameters.image }}:$(Build.SourceVersion)
+
+ # Push tag with timestamp
+ now=$(date -u +"%Y%m%d%H%M%S")
+ docker image tag roflcoopter/${{ parameters.image }}:${IMAGE_TAG} roflcoopter/${{ parameters.image }}:$now
+ docker image push roflcoopter/${{ parameters.image }}:$now
+ displayName: Push ${{ parameters.image }} (non-release)
+ condition: and(succeeded(), eq('${{ parameters.imageNameOnly }}', true), eq('${{ parameters.release }}', false))
+
+ # Runs only for release triggers
+ - script: |
+ echo tagging roflcoopter/${{ architecture }}-${{ parameters.image }}:$(viseronVersion)
+ docker image tag roflcoopter/${{ architecture }}-${{ parameters.image }}:dev roflcoopter/${{ architecture }}-${{ parameters.image }}:$(viseronVersion)
+ docker image push roflcoopter/${{ architecture }}-${{ parameters.image }}:$(viseronVersion)
+
+ # Push tag with commit hash
+ docker image tag roflcoopter/${{ architecture }}-${{ parameters.image }}:dev roflcoopter/${{ architecture }}-${{ parameters.image }}:$(viseronVersion)-$(Build.SourceVersion)
+ docker image push roflcoopter/${{ architecture }}-${{ parameters.image }}:$(viseronVersion)-$(Build.SourceVersion)
+
+ # Push tag with timestamp
+ now=$(date -u +"%Y%m%d%H%M%S")
+ docker image tag roflcoopter/${{ architecture }}-${{ parameters.image }}:dev roflcoopter/${{ architecture }}-${{ parameters.image }}:$(viseronVersion)-$now
+ docker image push roflcoopter/${{ architecture }}-${{ parameters.image }}:$(viseronVersion)-$now
+
+ if [ $(latestRelease) = true ] ; then
+ echo tagging roflcoopter/${{ architecture }}-${{ parameters.image }}:latest
+ docker image tag roflcoopter/${{ architecture }}-${{ parameters.image }}:dev roflcoopter/${{ architecture }}-${{ parameters.image }}:latest
+ docker image push roflcoopter/${{ architecture }}-${{ parameters.image }}:latest
+ fi
+ displayName: Push ${{ architecture }}-${{ parameters.image }} (release)
+ condition: and(succeeded(), eq('${{ parameters.release }}', true))
+
+ - task: Docker@2
+ displayName: Logoff Docker Hub
+ inputs:
+ command: logout
+ containerRegistry: "Docker Hub"
+ condition: always()
diff --git a/docker/Dockerfile b/docker/Dockerfile
index ba7287036..975976220 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -16,11 +16,15 @@ ARG GPAC_VERSION
ENV \
DEBIAN_FRONTEND=noninteractive
+ARG BUILDPLATFORM
RUN \
- if [ "$ARCH" = "armhf" ] || \
- [ "$ARCH" = "rpi3" ] || \
- [ "$ARCH" = "aarch64" ] || \
- [ "$ARCH" = "jetson-nano" ]; then echo "Crossbuilding!" && cross-build-start; fi
+ echo "Build platform: $BUILDPLATFORM" && \
+ echo "Target architecture: $ARCH" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ case "$ARCH" in \
+ aarch64|jetson-nano|rpi3) echo "Crossbuilding!" && cross-build-start;; \
+ esac \
+ fi
RUN \
apt-get update && apt-get install -y --no-install-recommends \
@@ -102,7 +106,7 @@ ENV \
S6_KILL_GRACETIME=30000 \
S6_KILL_FINISH_MAXTIME=30000 \
PATH=$PATH:/home/abc/bin \
- LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib \
+ LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib:/ffmpeg/lib \
PYTHONPATH=$PYTHONPATH:/usr/local/lib/python3.10/site-packages \
OPENCV_OPENCL_CACHE_ENABLE=false \
PG_COLOR="always"
diff --git a/docker/Dockerfile.hailo b/docker/Dockerfile.hailo
new file mode 100644
index 000000000..b75e15ac0
--- /dev/null
+++ b/docker/Dockerfile.hailo
@@ -0,0 +1,59 @@
+ARG BUILD_FROM
+FROM ${BUILD_FROM} as build
+
+ARG HAILO_VERSION
+ARG MAKEFLAGS="-j2"
+
+ENV \
+ DEBIAN_FRONTEND=noninteractive \
+ PIP_IGNORE_INSTALLED=0 \
+ HAILORT_LOGGER_PATH=NONE
+
+RUN \
+ buildDeps="autoconf \
+ automake \
+ ca-certificates \
+ cmake \
+ g++ \
+ gcc \
+ git \
+ make \
+ zip \
+ unzip \
+ python3-dev \
+ python3-pip \
+ python3-setuptools \
+ python3-wheel" && \
+ apt-get -yqq update && \
+ apt-get install -yq --no-install-recommends ${buildDeps}
+
+# Compile hailort
+RUN \
+ DIR=/tmp && mkdir -p ${DIR} && cd ${DIR} && \
+ git clone --branch v${HAILO_VERSION} --depth 1 https://github.com/hailo-ai/hailort.git && \
+ cd hailort && \
+ cmake -S. -Bbuild -DCMAKE_BUILD_TYPE=Release && cmake --build build --config release --target install
+
+# Build and create wheel
+RUN \
+ cd /tmp/hailort/hailort/libhailort/bindings/python/platform && \
+ python3 setup.py bdist_wheel --dist-dir=/wheels && \
+ pip3 wheel . -w /wheels && \
+ # Sanitize produced wheels to remove any lingering 'license-file' metadata lines
+ for whl in /wheels/*.whl; do \
+ tmpdir=$(mktemp -d); \
+ unzip -q "$whl" -d "$tmpdir"; \
+ find "$tmpdir" -maxdepth 2 -type f -path '*/METADATA' -exec sed -i '/^license-file:/Id' {} +; \
+ (cd "$tmpdir" && zip -qr "$whl.fixed" .); \
+ mv "$whl.fixed" "$whl"; \
+ rm -rf "$tmpdir"; \
+ done && \
+ ls -al /wheels/
+
+FROM scratch as scratch
+
+ARG HAILO_VERSION
+
+COPY --from=build /usr/local/bin/hailortcli /usr/local/bin/hailortcli
+COPY --from=build /usr/local/lib/libhailort.so.${HAILO_VERSION} /usr/local/lib/libhailort.so.${HAILO_VERSION}
+COPY --from=build /wheels /wheels/
diff --git a/docker/Dockerfile.wheels b/docker/Dockerfile.wheels
index a2f80fac1..7e0f53c24 100644
--- a/docker/Dockerfile.wheels
+++ b/docker/Dockerfile.wheels
@@ -14,10 +14,15 @@ ENV \
DEBIAN_FRONTEND=noninteractive \
PIP_IGNORE_INSTALLED=0
+ARG BUILDPLATFORM
RUN \
- if [ "$ARCH" = "armhf" ] || \
- [ "$ARCH" = "rpi3" ] || \
- [ "$ARCH" = "aarch64" ] ; then echo "Crossbuilding!" && cross-build-start; fi
+ echo "Build platform: $BUILDPLATFORM" && \
+ echo "Target architecture: $ARCH" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ case "$ARCH" in \
+ aarch64|rpi3) echo "Crossbuilding!" && cross-build-start;; \
+ esac \
+ fi
RUN apt-get -yqq update && apt-get install -yq --no-install-recommends \
software-properties-common \
diff --git a/docker/aarch64/Dockerfile.base b/docker/aarch64/Dockerfile.base
index fd9c35b6f..b1316fe1c 100644
--- a/docker/aarch64/Dockerfile.base
+++ b/docker/aarch64/Dockerfile.base
@@ -1,6 +1,8 @@
ARG FFMPEG_VERSION
ARG UBUNTU_VERSION
+ARG HAILO_VERSION
FROM roflcoopter/viseron-models:latest as models
+FROM roflcoopter/aarch64-hailo:${HAILO_VERSION} as hailo
FROM balenalib/aarch64-ubuntu:${UBUNTU_VERSION}-run
COPY --from=models /detectors/models/darknet /detectors/models/darknet
@@ -25,3 +27,8 @@ RUN \
libatomic1 && \
ln -s /detectors/models/darknet/yolov7-tiny.weights /detectors/models/darknet/default.weights && \
ln -s /detectors/models/darknet/yolov7-tiny.cfg /detectors/models/darknet/default.cfg
+
+ARG HAILO_VERSION
+COPY --from=hailo /usr/local/bin/hailortcli /usr/local/bin/hailortcli
+COPY --from=hailo /usr/local/lib/libhailort.so.${HAILO_VERSION} /usr/local/lib/libhailort.so.${HAILO_VERSION}
+COPY --from=hailo /wheels /wheels/
diff --git a/docker/aarch64/Dockerfile.dlib b/docker/aarch64/Dockerfile.dlib
index be9c35fa2..9bc8ba592 100644
--- a/docker/aarch64/Dockerfile.dlib
+++ b/docker/aarch64/Dockerfile.dlib
@@ -1,6 +1,5 @@
ARG UBUNTU_VERSION
FROM balenalib/aarch64-ubuntu:${UBUNTU_VERSION}-build as build
-RUN [ "cross-build-start" ]
ARG DLIB_VERSION
ARG MAKEFLAGS="-j2"
@@ -9,6 +8,13 @@ ENV \
DEBIAN_FRONTEND=noninteractive \
PIP_IGNORE_INSTALLED=0
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
+
RUN buildDeps="autoconf \
automake \
ca-certificates \
@@ -34,7 +40,5 @@ RUN \
\
&& ls -al /wheels/
-RUN [ "cross-build-end" ]
-
FROM scratch as scratch
COPY --from=build /wheels /wheels/
diff --git a/docker/aarch64/Dockerfile.ffmpeg b/docker/aarch64/Dockerfile.ffmpeg
index 6f49600e4..72c49e1eb 100644
--- a/docker/aarch64/Dockerfile.ffmpeg
+++ b/docker/aarch64/Dockerfile.ffmpeg
@@ -1,7 +1,6 @@
# Shameless copy of https://github.com/jrottenberg/ffmpeg/
ARG UBUNTU_VERSION
FROM balenalib/aarch64-ubuntu:${UBUNTU_VERSION}-build as build
-RUN [ "cross-build-start" ]
WORKDIR /tmp/workdir
@@ -15,6 +14,13 @@ ENV \
PREFIX=/opt/ffmpeg \
LD_LIBRARY_PATH="/opt/ffmpeg/lib:/opt/ffmpeg/lib64:/usr/lib64:/usr/lib:/lib64:/lib"
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
+
RUN buildDeps="autoconf \
automake \
ca-certificates \
@@ -36,7 +42,9 @@ RUN buildDeps="autoconf \
yasm \
libgomp1 \
zlib1g-dev \
- libomxil-bellagio-dev" && \
+ libomxil-bellagio-dev \
+ libdrm-dev \
+ libv4l-dev" && \
apt-get -yqq update && \
apt-get install -yq --no-install-recommends ${buildDeps}
@@ -105,12 +113,22 @@ RUN \
curl -sLO https://archive.mozilla.org/pub/opus/opus-${OPUS_VERSION}.tar.gz && \
echo ${OPUS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f opus-${OPUS_VERSION}.tar.gz
-RUN ["cross-build-end"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Stopping Crossbuilding!" && cross-build-end; \
+ fi
RUN \
DIR=/tmp/opus && \
cd ${DIR} && \
autoreconf -fiv
-RUN ["cross-build-start"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN \
DIR=/tmp/opus && \
cd ${DIR} && \
@@ -215,12 +233,22 @@ RUN \
cd ${DIR} && \
curl -sL https://github.com/mstorsjo/fdk-aac/archive/v${FDKAAC_VERSION}.tar.gz | \
tar -zx --strip-components=1
-RUN ["cross-build-end"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Stopping Crossbuilding!" && cross-build-end; \
+ fi
RUN \
DIR=/tmp/fdk-aac && \
cd ${DIR} && \
autoreconf -fiv
-RUN ["cross-build-start"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN \
DIR=/tmp/fdk-aac && \
cd ${DIR} && \
@@ -279,13 +307,23 @@ RUN \
curl -sLO https://github.com/fribidi/fribidi/archive/${FRIBIDI_VERSION}.tar.gz && \
echo ${FRIBIDI_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f ${FRIBIDI_VERSION}.tar.gz
-RUN ["cross-build-end"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Stopping Crossbuilding!" && cross-build-end; \
+ fi
RUN \
export NOCONFIGURE=1 && \
DIR=/tmp/fribidi && \
cd ${DIR} && \
./autogen.sh
-RUN ["cross-build-start"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN \
DIR=/tmp/fribidi && \
cd ${DIR} && \
@@ -316,12 +354,22 @@ RUN \
curl -sLO https://github.com/libass/libass/archive/${LIBASS_VERSION}.tar.gz && \
echo ${LIBASS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f ${LIBASS_VERSION}.tar.gz
-RUN ["cross-build-end"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Stopping Crossbuilding!" && cross-build-end; \
+ fi
RUN \
DIR=/tmp/libass && \
cd ${DIR} && \
./autogen.sh
-RUN ["cross-build-start"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN \
DIR=/tmp/libass && \
cd ${DIR} && \
@@ -337,12 +385,22 @@ RUN \
cd ${DIR} && \
curl -sLO https://github.com/ultravideo/kvazaar/archive/v${KVAZAAR_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f v${KVAZAAR_VERSION}.tar.gz
-RUN ["cross-build-end"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Stopping Crossbuilding!" && cross-build-end; \
+ fi
RUN \
DIR=/tmp/kvazaar && \
cd ${DIR} && \
./autogen.sh
-RUN ["cross-build-start"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN \
DIR=/tmp/kvazaar && \
cd ${DIR} && \
@@ -426,12 +484,22 @@ RUN \
curl -sLO https://gitlab.gnome.org/GNOME/libxml2/-/archive/v${LIBXML2_VERSION}/libxml2-v${LIBXML2_VERSION}.tar.gz && \
echo ${LIBXML2_SHA256SUM} | sha256sum --check && \
tar -xz --strip-components=1 -f libxml2-v${LIBXML2_VERSION}.tar.gz
-RUN ["cross-build-end"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Stopping Crossbuilding!" && cross-build-end; \
+ fi
RUN \
DIR=/tmp/libxml2 && \
cd ${DIR} && \
./autogen.sh --prefix="${PREFIX}" --with-ftp=no --with-http=no --with-python=no
-RUN ["cross-build-start"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN \
DIR=/tmp/libxml2 && \
cd ${DIR} && \
@@ -464,12 +532,22 @@ RUN \
curl -sLO https://github.com/zeromq/libzmq/archive/v${LIBZMQ_VERSION}.tar.gz && \
echo ${LIBZMQ_SHA256SUM} | sha256sum --check && \
tar -xz --strip-components=1 -f v${LIBZMQ_VERSION}.tar.gz
-RUN ["cross-build-end"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Stopping Crossbuilding!" && cross-build-end; \
+ fi
RUN \
DIR=/tmp/libzmq && \
cd ${DIR} && \
./autogen.sh
-RUN ["cross-build-start"]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN \
DIR=/tmp/libzmq && \
cd ${DIR} && \
@@ -529,6 +607,8 @@ RUN \
--extra-libs=-lpthread \
--enable-neon \
--enable-v4l2_m2m \
+ --enable-libv4l2 \
+ --enable-libdrm \
--extra-cflags="-I${PREFIX}/include" \
--extra-ldflags="-L${PREFIX}/lib" && \
make && \
@@ -552,7 +632,5 @@ RUN \
sed "s:${PREFIX}:/usr/local:g" <"$pc" >/usr/local/lib/pkgconfig/"${pc##*/}"; \
done
-RUN [ "cross-build-end" ]
-
FROM scratch
COPY --from=ffmpeg /usr/local /usr/local/
diff --git a/docker/aarch64/Dockerfile.opencv b/docker/aarch64/Dockerfile.opencv
index 41a7cd916..100101a58 100644
--- a/docker/aarch64/Dockerfile.opencv
+++ b/docker/aarch64/Dockerfile.opencv
@@ -2,7 +2,6 @@ ARG FFMPEG_VERSION
ARG UBUNTU_VERSION
FROM roflcoopter/aarch64-ffmpeg:${FFMPEG_VERSION} as ffmpeg
FROM balenalib/aarch64-ubuntu:${UBUNTU_VERSION}-build as build
-RUN [ "cross-build-start" ]
COPY --from=ffmpeg /usr/local /usr/local/
@@ -18,6 +17,13 @@ ENV \
PREFIX=/opt/opencv \
LD_LIBRARY_PATH="/opt/opencv/lib:/opt/opencv/lib64:/usr/lib64:/usr/lib:/lib64:/lib:/usr/local/lib"
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
+
RUN buildDeps="autoconf \
automake \
ca-certificates \
@@ -91,8 +97,6 @@ RUN \
make && \
make install
-RUN [ "cross-build-end" ]
-
FROM scratch
COPY --from=build /opt/opencv /opt/opencv/
diff --git a/docker/amd64-cuda/Dockerfile.base b/docker/amd64-cuda/Dockerfile.base
index 28cf5eae1..54ca5bf82 100644
--- a/docker/amd64-cuda/Dockerfile.base
+++ b/docker/amd64-cuda/Dockerfile.base
@@ -1,8 +1,10 @@
ARG DARKNET_COMMIT
ARG CUDA_VERSION
ARG UBUNTU_VERSION_NUMBER
+ARG HAILO_VERSION
FROM roflcoopter/viseron-models:latest as models
FROM roflcoopter/amd64-cuda-darknet:${DARKNET_COMMIT} as darknet
+FROM roflcoopter/amd64-hailo:${HAILO_VERSION} as hailo
FROM nvidia/cuda:${CUDA_VERSION}-cudnn9-runtime-ubuntu${UBUNTU_VERSION_NUMBER}
COPY --from=models /detectors/models/darknet /detectors/models/darknet
@@ -79,3 +81,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
ln -s /detectors/models/darknet/yolov7.cfg /detectors/models/darknet/default.cfg
COPY --from=darknet /darknet/lib /usr/local/lib
+
+ARG HAILO_VERSION
+COPY --from=hailo /usr/local/bin/hailortcli /usr/local/bin/hailortcli
+COPY --from=hailo /usr/local/lib/libhailort.so.${HAILO_VERSION} /usr/local/lib/libhailort.so.${HAILO_VERSION}
+COPY --from=hailo /wheels /wheels/
diff --git a/docker/amd64/Dockerfile.base b/docker/amd64/Dockerfile.base
index bbf1b2278..640767ade 100644
--- a/docker/amd64/Dockerfile.base
+++ b/docker/amd64/Dockerfile.base
@@ -1,5 +1,7 @@
ARG UBUNTU_VERSION
+ARG HAILO_VERSION
FROM roflcoopter/viseron-models:latest as models
+FROM roflcoopter/amd64-hailo:${HAILO_VERSION} as hailo
FROM ubuntu:${UBUNTU_VERSION} as base
COPY --from=models /detectors/models/darknet /detectors/models/darknet
@@ -68,3 +70,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
rm -R /opencl && \
ln -s /detectors/models/darknet/yolov7.weights /detectors/models/darknet/default.weights && \
ln -s /detectors/models/darknet/yolov7.cfg /detectors/models/darknet/default.cfg
+
+ARG HAILO_VERSION
+COPY --from=hailo /usr/local/bin/hailortcli /usr/local/bin/hailortcli
+COPY --from=hailo /usr/local/lib/libhailort.so.${HAILO_VERSION} /usr/local/lib/libhailort.so.${HAILO_VERSION}
+COPY --from=hailo /wheels /wheels/
diff --git a/docker/azure-agent-start.sh b/docker/azure-agent-start.sh
new file mode 100644
index 000000000..19a8a6ae8
--- /dev/null
+++ b/docker/azure-agent-start.sh
@@ -0,0 +1,114 @@
+#!/bin/bash
+set -e
+
+if [ -z "${AZP_URL}" ]; then
+ echo 1>&2 "error: missing AZP_URL environment variable"
+ exit 1
+fi
+
+# If an agent installation already exists (persisted via a volume) and no force update is requested,
+# skip download + configuration and just run the existing agent to speed up container startup.
+if [ -f ./run.sh ] && [ -f .agent ] && [ -z "${AZP_FORCE_UPDATE}" ]; then
+ echo
+ echo "Existing Azure Pipelines agent detected. Skipping download and configuration (set AZP_FORCE_UPDATE=1 to override)."
+ chmod +x ./run.sh
+ ./run.sh "$@" & wait $!
+ exit 0
+fi
+
+if [ -n "$AZP_CLIENTID" ]; then
+ echo "Using service principal credentials to get token"
+ az login --allow-no-subscriptions --service-principal --username "$AZP_CLIENTID" --password "$AZP_CLIENTSECRET" --tenant "$AZP_TENANTID"
+ # adapted from https://learn.microsoft.com/en-us/azure/databricks/dev-tools/user-aad-token
+ AZP_TOKEN=$(az account get-access-token --query accessToken --output tsv)
+ echo "Token retrieved"
+fi
+
+if [ -z "${AZP_TOKEN_FILE}" ]; then
+ if [ -z "${AZP_TOKEN}" ]; then
+ echo 1>&2 "error: missing AZP_TOKEN environment variable"
+ exit 1
+ fi
+
+ AZP_TOKEN_FILE="/azp/.token"
+ echo -n "${AZP_TOKEN}" > "${AZP_TOKEN_FILE}"
+fi
+
+unset AZP_CLIENTSECRET
+unset AZP_TOKEN
+
+if [ -n "${AZP_WORK}" ]; then
+ mkdir -p "${AZP_WORK}"
+fi
+
+cleanup() {
+ trap "" EXIT
+
+ if [ -e ./config.sh ]; then
+ print_header "Cleanup. Removing Azure Pipelines agent..."
+
+ # If the agent has some running jobs, the configuration removal process will fail.
+ # So, give it some time to finish the job.
+ while true; do
+ ./config.sh remove --unattended --auth "PAT" --token $(cat "${AZP_TOKEN_FILE}") && break
+
+ echo "Retrying in 30 seconds..."
+ sleep 30
+ done
+ fi
+}
+
+print_header() {
+ lightcyan="\033[1;36m"
+ nocolor="\033[0m"
+ echo -e "\n${lightcyan}$1${nocolor}\n"
+}
+
+# Let the agent ignore the token env variables
+export VSO_AGENT_IGNORE="AZP_TOKEN,AZP_TOKEN_FILE"
+
+print_header "1. Determining matching Azure Pipelines agent..."
+
+AZP_AGENT_PACKAGES=$(curl -LsS \
+ -u user:$(cat "${AZP_TOKEN_FILE}") \
+ -H "Accept:application/json" \
+ "${AZP_URL}/_apis/distributedtask/packages/agent?platform=${TARGETARCH}&top=1")
+
+AZP_AGENT_PACKAGE_LATEST_URL=$(echo "${AZP_AGENT_PACKAGES}" | jq -r ".value[0].downloadUrl")
+
+if [ -z "${AZP_AGENT_PACKAGE_LATEST_URL}" -o "${AZP_AGENT_PACKAGE_LATEST_URL}" == "null" ]; then
+ echo 1>&2 "error: could not determine a matching Azure Pipelines agent"
+ echo 1>&2 "check that account "${AZP_URL}" is correct and the token is valid for that account"
+ exit 1
+fi
+
+print_header "2. Downloading and extracting Azure Pipelines agent..."
+
+curl -LsS "${AZP_AGENT_PACKAGE_LATEST_URL}" | tar -xz & wait $!
+
+source ./env.sh
+
+trap "cleanup; exit 0" EXIT
+trap "cleanup; exit 130" INT
+trap "cleanup; exit 143" TERM
+
+print_header "3. Configuring Azure Pipelines agent..."
+
+# Despite it saying "PAT", it can be the token through the service principal
+./config.sh --unattended \
+ --agent "${AZP_AGENT_NAME:-$(hostname)}" \
+ --url "${AZP_URL}" \
+ --auth "PAT" \
+ --token $(cat "${AZP_TOKEN_FILE}") \
+ --pool "${AZP_POOL:-Default}" \
+ --work "${AZP_WORK:-_work}" \
+ --replace \
+ --acceptTeeEula & wait $!
+
+print_header "4. Running Azure Pipelines agent..."
+
+chmod +x ./run.sh
+
+# To be aware of TERM and INT signals call ./run.sh
+# Running it with the --once flag at the end will shut down the agent after the build is executed
+./run.sh "$@" & wait $!
\ No newline at end of file
diff --git a/docker/jetson-nano/Dockerfile.base b/docker/jetson-nano/Dockerfile.base
index 133de5823..58914b193 100644
--- a/docker/jetson-nano/Dockerfile.base
+++ b/docker/jetson-nano/Dockerfile.base
@@ -1,11 +1,11 @@
ARG PYTHON_VERSION
ARG DARKNET_COMMIT
ARG JETPACK_VERSION
-FROM roflcoopter/viseron-models:latest as models
-FROM roflcoopter/jetson-nano-python:${PYTHON_VERSION} as python
-FROM roflcoopter/jetson-nano-python:3.9.19 as python-3.9.19
-FROM roflcoopter/jetson-nano-darknet:${DARKNET_COMMIT} as darknet
-FROM nvcr.io/nvidia/l4t-base:r${JETPACK_VERSION} as build
+FROM roflcoopter/viseron-models:latest AS models
+FROM roflcoopter/jetson-nano-python:${PYTHON_VERSION} AS python
+FROM roflcoopter/jetson-nano-python:3.9.19 AS python-3.9.19
+FROM roflcoopter/jetson-nano-darknet:${DARKNET_COMMIT} AS darknet
+FROM nvcr.io/nvidia/l4t-base:r${JETPACK_VERSION} AS build
COPY --from=models /detectors/models/darknet /detectors/models/darknet
COPY --from=models /detectors/models/edgetpu /detectors/models/edgetpu
@@ -30,10 +30,11 @@ RUN \
apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
gnupg && \
+ echo "deb https://newrepo.switchroot.org/ switchroot unstable" >> /etc/apt/sources.list.d/switchroot.list && \
+ apt-key del 92813F6A23DB6DFC && \
+ wget -O - https://newrepo.switchroot.org/pubkey | apt-key add - && \
echo "deb https://repo.download.nvidia.com/jetson/common r${JETPACK_VERSION_MAJOR}.${JETPACK_VERSION_MINOR} main" >> /etc/apt/sources.list.d/nvidia.list && \
echo "deb https://repo.download.nvidia.com/jetson/${SOC} r${JETPACK_VERSION_MAJOR}.${JETPACK_VERSION_MINOR} main" >> /etc/apt/sources.list.d/nvidia.list && \
- echo "deb https://repo.download.nvidia.com/jetson/ffmpeg main main" >> /etc/apt/sources.list.d/nvidia.list && \
- echo "deb-src https://repo.download.nvidia.com/jetson/ffmpeg main main" >> /etc/apt/sources.list.d/nvidia.list && \
apt-key adv --fetch-key http://repo.download.nvidia.com/jetson/jetson-ota-public.asc && \
mkdir -p /opt/nvidia/l4t-packages/ && touch /opt/nvidia/l4t-packages/.nv-l4t-disable-boot-fw-update-in-preinstall && \
rm -r /etc/ld.so.conf.d/nvidia-tegra.conf && \
diff --git a/docker/jetson-nano/Dockerfile.darknet b/docker/jetson-nano/Dockerfile.darknet
index e0df00057..9075236d0 100644
--- a/docker/jetson-nano/Dockerfile.darknet
+++ b/docker/jetson-nano/Dockerfile.darknet
@@ -21,8 +21,6 @@ RUN \
COPY --from=opencv /opt/opencv /opt/opencv/
-RUN [ "cross-build-start" ]
-
ARG MAKEFLAGS="-j2"
ARG SOC
ARG JETPACK_VERSION_MAJOR
@@ -36,6 +34,13 @@ ENV \
PKG_CONFIG_PATH=$PKG_CONFIG_PATH:/opt/opencv/lib/pkgconfig \
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/opencv/lib:/usr/lib/aarch64-linux-gnu/tegra:/usr/lib/aarch64-linux-gnu/tegra-egl
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
+
RUN \
apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
@@ -65,7 +70,7 @@ RUN buildDeps="autoconf \
nvidia-l4t-cuda \
cuda-libraries-dev-10-2" && \
apt-get -yqq update && \
- apt-get install -yq -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' --no-install-recommends ${buildDeps}
+ apt-get install -yq -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' --no-install-recommends ${buildDeps}
# Minimal cuda install does not create symlink so we do it manually
RUN ln -s /usr/local/cuda-10.2 /usr/local/cuda
diff --git a/docker/jetson-nano/Dockerfile.dlib b/docker/jetson-nano/Dockerfile.dlib
index e4d3650de..601dfe25e 100644
--- a/docker/jetson-nano/Dockerfile.dlib
+++ b/docker/jetson-nano/Dockerfile.dlib
@@ -17,8 +17,6 @@ RUN \
ln -s pydoc3 pydoc && \
ln -s python3 python
-RUN [ "cross-build-start" ]
-
ARG DLIB_VERSION
ARG MAKEFLAGS="-j2"
ARG SOC
@@ -29,6 +27,13 @@ ENV \
DEBIAN_FRONTEND=noninteractive \
PIP_IGNORE_INSTALLED=0
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
+
# NVIDIA repos use HTTPS so we need some additional libraries first
RUN \
apt-get update && apt-get install -y --no-install-recommends \
@@ -73,7 +78,5 @@ RUN \
\
&& ls -al /wheels/
-RUN [ "cross-build-end" ]
-
FROM scratch as scratch
COPY --from=build /wheels /wheels/
diff --git a/docker/jetson-nano/Dockerfile.ffmpeg b/docker/jetson-nano/Dockerfile.ffmpeg
index 15f170142..e046b547f 100644
--- a/docker/jetson-nano/Dockerfile.ffmpeg
+++ b/docker/jetson-nano/Dockerfile.ffmpeg
@@ -131,7 +131,12 @@ COPY --from=qemu /usr/bin/cross-build-start /bin/cross-build-start
COPY --from=qemu /usr/bin/cross-build-end /bin/cross-build-end
COPY --from=qemu /usr/bin/resin-xbuild /usr/bin/resin-xbuild
-RUN [ "cross-build-start" ]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
WORKDIR /tmp/workdir
@@ -600,8 +605,6 @@ RUN \
sed "s:${PREFIX}:/usr/local:g" <"$pc" >/usr/local/lib/pkgconfig/"${pc##*/}"; \
done
-RUN [ "cross-build-end" ]
-
FROM scratch
COPY --from=build /usr/local /usr/local/
diff --git a/docker/jetson-nano/Dockerfile.opencv b/docker/jetson-nano/Dockerfile.opencv
index 1327b8cba..a9f56e67c 100644
--- a/docker/jetson-nano/Dockerfile.opencv
+++ b/docker/jetson-nano/Dockerfile.opencv
@@ -9,7 +9,12 @@ COPY --from=qemu /usr/bin/cross-build-start /bin/cross-build-start
COPY --from=qemu /usr/bin/cross-build-end /bin/cross-build-end
COPY --from=qemu /usr/bin/resin-xbuild /usr/bin/resin-xbuild
-RUN [ "cross-build-start" ]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
WORKDIR /tmp/workdir
diff --git a/docker/jetson-nano/Dockerfile.opencv.part1 b/docker/jetson-nano/Dockerfile.opencv.part1
index c229ac6a1..a78fe6d45 100644
--- a/docker/jetson-nano/Dockerfile.opencv.part1
+++ b/docker/jetson-nano/Dockerfile.opencv.part1
@@ -17,8 +17,6 @@ RUN \
ln -s pydoc3 pydoc && \
ln -s python3 python
-RUN [ "cross-build-start" ]
-
WORKDIR /tmp/workdir
ARG OPENCV_VERSION
@@ -34,6 +32,13 @@ ENV \
CUDA_ARCH_BIN="53" \
CUDA_ARCH_PTX="70"
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
+
RUN \
apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
diff --git a/docker/jetson-nano/Dockerfile.python b/docker/jetson-nano/Dockerfile.python
index 522b6f884..f939aef9f 100644
--- a/docker/jetson-nano/Dockerfile.python
+++ b/docker/jetson-nano/Dockerfile.python
@@ -9,8 +9,6 @@ COPY --from=qemu /usr/bin/cross-build-start /bin/cross-build-start
COPY --from=qemu /usr/bin/cross-build-end /bin/cross-build-end
COPY --from=qemu /usr/bin/resin-xbuild /usr/bin/resin-xbuild
-RUN [ "cross-build-start" ]
-
ARG PYTHON_VERSION
ARG PYTHON_PIP_VERSION
@@ -18,6 +16,13 @@ ENV LANG=C.UTF-8
ENV DEBIAN_FRONTEND=noninteractive
ENV PIP_URL=https://bootstrap.pypa.io/get-pip.py
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
+
RUN \
apt-get -yqq update && apt-get install -yq --no-install-recommends \
ca-certificates \
@@ -91,8 +96,6 @@ RUN \
-name pydoc_data -o \
-name tkinter \) -exec rm -rf {} +
-RUN [ "cross-build-end" ]
-
FROM scratch
COPY --from=build /usr/local /usr/local
diff --git a/docker/jetson-nano/Dockerfile.wheels b/docker/jetson-nano/Dockerfile.wheels
index 5d8f0affa..7d8fc54f9 100644
--- a/docker/jetson-nano/Dockerfile.wheels
+++ b/docker/jetson-nano/Dockerfile.wheels
@@ -27,7 +27,12 @@ ENV \
DEBIAN_FRONTEND=noninteractive \
PIP_IGNORE_INSTALLED=0
-RUN [ "cross-build-start" ]
+ARG BUILDPLATFORM
+RUN \
+ echo "Build platform: $BUILDPLATFORM" && \
+ if [ "$BUILDPLATFORM" = "linux/amd64" ]; then \
+ echo "Crossbuilding!" && cross-build-start; \
+ fi
RUN apt-get -yqq update && apt-get install -yq --no-install-recommends \
wget \
diff --git a/docker/rpi5/Dockerfile.azure b/docker/rpi5/Dockerfile.azure
new file mode 100644
index 000000000..39beb04fb
--- /dev/null
+++ b/docker/rpi5/Dockerfile.azure
@@ -0,0 +1,42 @@
+ARG UBUNTU_VERSION
+FROM ubuntu:${UBUNTU_VERSION}
+ENV TARGETARCH="linux-arm64" \
+ DOCKER_BUILDKIT=1
+
+ARG DOCKER_GID
+
+RUN apt-get update && \
+ apt-get upgrade -y && \
+ apt-get install -y curl git jq libicu70 ca-certificates curl
+
+# Install Docker
+RUN \
+ install -m 0755 -d /etc/apt/keyrings && \
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc && \
+ chmod a+r /etc/apt/keyrings/docker.asc && \
+ echo \
+ "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
+ $(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \
+ tee /etc/apt/sources.list.d/docker.list > /dev/null && \
+ apt-get update && \
+ apt-get install -y \
+ docker-ce-cli \
+ docker-buildx-plugin \
+ docker-compose-plugin
+
+# Install Azure CLI
+RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash
+
+WORKDIR /azp/
+
+COPY ./docker/azure-agent-start.sh ./
+RUN chmod +x ./azure-agent-start.sh
+
+# Create agent user and set up home directory
+RUN useradd -m -d /home/agent agent
+RUN chown -R agent:agent /azp /home/agent
+RUN groupadd -g ${DOCKER_GID} docker && usermod -aG docker agent
+
+USER agent
+
+ENTRYPOINT [ "./azure-agent-start.sh" ]
\ No newline at end of file
diff --git a/docs/docs/documentation/configuration/system_events.md b/docs/docs/documentation/configuration/system_events.md
new file mode 100644
index 000000000..e99f39bdb
--- /dev/null
+++ b/docs/docs/documentation/configuration/system_events.md
@@ -0,0 +1,23 @@
+# System Events
+
+System events are events that are dispatched by the backend for communication between the many components of Viseron, such as when a camera detects motion or an object is detected.
+
+These events can be used to trigger actions in other components, such as the [webhook component](/components-explorer/components/webhook).
+
+## System event viewer
+
+The system event viewer allows you to listen to and view system events in real-time, along with the event data. Seeing the event data can be useful for when you want to use the event data in a [template](/docs/documentation/configuration/templating).
+
+The event viewer can be accessed by admins from the Settings > System Events page in the Viseron web interface.
+
+
+
+:::info
+
+The event data is normally in JSON format, but the event viewer will format it to YAML for easier readability.
+
+:::
diff --git a/docs/docs/documentation/configuration/templating.md b/docs/docs/documentation/configuration/templating.md
new file mode 100644
index 000000000..0cc96fec9
--- /dev/null
+++ b/docs/docs/documentation/configuration/templating.md
@@ -0,0 +1,97 @@
+# Templating
+
+Templating in Viseron is backed by [Jinja2](https://jinja.palletsprojects.com/), a powerful templating engine for Python.
+It allows you to create dynamic templates that can be used in the config, currently only the [webhook component](/components-explorer/components/webhook) leverages this functionality.
+
+## Templating in Viseron
+
+To know if a config option supports templating, check for the `Jinja2 template` tag in the component documentation.
+
+
+ Jinja2 template tag screenshot
+
+
+
+The syntax for Jinja2 is described in [their documentation](https://jinja.palletsprojects.com/en/latest/templates/) and is not covered here.
+
+Viseron provides some additional context variables that can be used in templates:
+
+- `states`: A dictionary of all the current states of all Entities in Viseron.
+- `event`: The event data that triggered the component. This is only available for components that are triggered by events, such as the [webhook component](/components-explorer/components/webhook).
+
+## Template editor
+
+A template editor (heavily inspired by Home Assistant) is available that allows you to test your templates before using them in your configuration.
+You can access it from the Settings > Template Editor page in the web interface.
+
+
+ Template editor screenshot
+
+
+
+## Examples
+
+### Using the `event` context variable
+
+When using the `webhook` component, you can access the event data that triggered the webhook. For example, if you want to include the camera identifier in the payload, you can use:
+
+```yaml
+webhook:
+ my_webhook:
+ trigger:
+ event: camera_one/motion_detected
+ url: http://example.com/webhook
+ payload: >
+ {%- if event.motion_detected -%}
+ "Motion detected on {{ event.camera_identifier }}!"
+ {%- else -%}
+ "No motion detected on {{ event.camera_identifier }}."
+ {%- endif -%}
+```
+
+### Using the `states` context variable
+
+You can also use the `states` context variable to access the current state of all Entities. For example, if you want to include a camera's recording state in the payload, you can use:
+
+```yaml
+webhook:
+ my_webhook:
+ trigger:
+ event: camera_one/motion_detected
+ url: http://example.com/webhook
+ payload: "Recording state: {{ states.camera_one_recorder.state }}"
+```
+
+### Conditions
+
+Some components allow you to use template conditions to determine whether an action should be taken based on the template.
+The condition checks whether the template produces a value that evaluates to true.
+
+Values that evaluate to true include:
+
+- Boolean true
+- Non zero numbers (e.g., 1, 2, 3, etc.)
+- The strings `true`, `yes`, `on`, `enable` (case-insensitive)
+
+Any other value results in a false evaluation.
+
+This example checks if the `motion_detected` attribute of the event is true before triggering the webhook:
+
+```yaml
+webhook:
+ my_webhook:
+ trigger:
+ event: camera_one/motion_detected
+ condition: >
+ {{ event.motion_detected }}
+ url: http://example.com/webhook
+ payload: "Motion detected on {{ event.camera_identifier }}"
+```
diff --git a/docs/docs/documentation/installation.mdx b/docs/docs/documentation/installation.mdx
index a71293cdf..b7e4ce731 100644
--- a/docs/docs/documentation/installation.mdx
+++ b/docs/docs/documentation/installation.mdx
@@ -55,6 +55,7 @@ docker run --rm \
-v {snapshots path}:/snapshots \
-v {thumbnails path}:/thumbnails \
-v {event clips path}:/event_clips \
+ -v {timelapse path}:/timelapse \
-v {config path}:/config \
-v /etc/localtime:/etc/localtime:ro \
-p 8888:8888 \
@@ -67,8 +68,6 @@ docker run --rm \
```yaml
-version: "2.4"
-
services:
viseron:
image: roflcoopter/viseron:latest
@@ -79,6 +78,7 @@ services:
- {snapshots path}:/snapshots
- {thumbnails path}:/thumbnails
- {event clips path}:/event_clips
+ - {timelapse path}:/timelapse
- {config path}:/config
- /etc/localtime:/etc/localtime:ro
ports:
@@ -101,6 +101,7 @@ docker run --rm \
-v {snapshots path}:/snapshots \
-v {thumbnails path}:/thumbnails \
-v {event clips path}:/event_clips \
+ -v {timelapse path}:/timelapse \
-v {config path}:/config \
-v /etc/localtime:/etc/localtime:ro \
-p 8888:8888 \
@@ -114,8 +115,6 @@ docker run --rm \
```yaml
-version: "2.4"
-
services:
viseron:
image: roflcoopter/viseron:latest
@@ -126,6 +125,7 @@ services:
- {snapshots path}:/snapshots
- {thumbnails path}:/thumbnails
- {event clips path}:/event_clips
+ - {timelapse path}:/timelapse
- {config path}:/config
- /etc/localtime:/etc/localtime:ro
ports:
@@ -150,6 +150,7 @@ docker run --rm \
-v {snapshots path}:/snapshots \
-v {thumbnails path}:/thumbnails \
-v {event clips path}:/event_clips \
+ -v {timelapse path}:/timelapse \
-v {config path}:/config \
-v /etc/localtime:/etc/localtime:ro \
-p 8888:8888 \
@@ -163,8 +164,6 @@ docker run --rm \
```yaml
-version: "2.4"
-
services:
viseron:
image: roflcoopter/amd64-cuda-viseron:latest
@@ -175,6 +174,7 @@ services:
- {snapshots path}:/snapshots
- {thumbnails path}:/thumbnails
- {event clips path}:/event_clips
+ - {timelapse path}:/timelapse
- {config path}:/config
- /etc/localtime:/etc/localtime:ro
ports:
@@ -205,6 +205,7 @@ docker run --rm \
-v {snapshots path}:/snapshots \
-v {thumbnails path}:/thumbnails \
-v {event clips path}:/event_clips \
+ -v {timelapse path}:/timelapse \
-v {config path}:/config \
-v /etc/localtime:/etc/localtime:ro \
-p 8888:8888 \
@@ -226,8 +227,6 @@ You can probably get around this by manually mounting all the needed devices but
```yaml
-version: "2.4"
-
services:
viseron:
image: roflcoopter/jetson-nano-viseron:latest
@@ -238,6 +237,7 @@ services:
- {snapshots path}:/snapshots
- {thumbnails path}:/thumbnails
- {event clips path}:/event_clips
+ - {timelapse path}:/timelapse
- {config path}:/config
- /etc/localtime:/etc/localtime:ro
ports:
@@ -270,6 +270,7 @@ docker run --rm \
-v {snapshots path}:/snapshots \
-v {thumbnails path}:/thumbnails \
-v {event clips path}:/event_clips \
+ -v {timelapse path}:/timelapse \
-v {config path}:/config \
-v /etc/localtime:/etc/localtime:ro \
-v /dev/bus/usb:/dev/bus/usb \
@@ -288,7 +289,6 @@ docker run --rm \
```yaml
-version: "2.4"
services:
viseron:
image: roflcoopter/viseron:latest
@@ -299,6 +299,7 @@ services:
- {snapshots path}:/snapshots
- {thumbnails path}:/thumbnails
- {event clips path}:/event_clips
+ - {timelapse path}:/timelapse
- {config path}:/config
- /etc/localtime:/etc/localtime:ro
devices:
@@ -342,6 +343,7 @@ docker run --rm \
-v {snapshots path}:/snapshots \
-v {thumbnails path}:/thumbnails \
-v {event clips path}:/event_clips \
+ -v {timelapse path}:/timelapse \
-v {config path}:/config \
-v /etc/localtime:/etc/localtime:ro \
-v /opt/vc/lib:/opt/vc/lib \
@@ -358,7 +360,6 @@ docker run --rm \
```yaml
-version: "2.4"
services:
viseron:
image: roflcoopter/viseron:latest
@@ -369,6 +370,7 @@ services:
- {snapshots path}:/snapshots
- {thumbnails path}:/thumbnails
- {event clips path}:/event_clips
+ - {timelapse path}:/timelapse
- {config path}:/config
- /etc/localtime:/etc/localtime:ro
- /opt/vc/lib:/opt/vc/lib
@@ -447,6 +449,7 @@ docker run --rm \
-v {snapshots path}:/snapshots \
-v {thumbnails path}:/thumbnails \
-v {event clips path}:/event_clips \
+ -v {timelapse path}:/timelapse \
-v {config path}:/config \
-v /etc/localtime:/etc/localtime:ro \
-p 8888:8888 \
@@ -465,8 +468,6 @@ docker run --rm \
Example docker-compose
```yaml
-version: "2.4"
-
services:
viseron:
image: roflcoopter/viseron:latest
@@ -477,6 +478,7 @@ services:
- {snapshots path}:/snapshots
- {thumbnails path}:/thumbnails
- {event clips path}:/event_clips
+ - {timelapse path}:/timelapse
- {config path}:/config
- /etc/localtime:/etc/localtime:ro
ports:
diff --git a/docs/package-lock.json b/docs/package-lock.json
index 7b66b8d10..10628fc67 100644
--- a/docs/package-lock.json
+++ b/docs/package-lock.json
@@ -8248,9 +8248,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001684",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001684.tgz",
- "integrity": "sha512-G1LRwLIQjBQoyq0ZJGqGIJUXzJ8irpbjHLpVRXDvBEScFJ9b17sgK6vlx0GAJFE21okD7zXl08rRRUfq6HdoEQ==",
+ "version": "1.0.30001735",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001735.tgz",
+ "integrity": "sha512-EV/laoX7Wq2J9TQlyIXRxTJqIw4sxfXS4OYgudGxBYRuTv0q7AM6yMEpU/Vo1I94thg9U6EZ2NfZx9GJq83u7w==",
"funding": [
{
"type": "opencollective",
diff --git a/docs/sidebars.ts b/docs/sidebars.ts
index 71b51ec19..8562a3632 100644
--- a/docs/sidebars.ts
+++ b/docs/sidebars.ts
@@ -110,6 +110,8 @@ const sidebars: SidebarsConfig = {
{ type: "doc", id: "documentation/configuration/authentication" },
{ type: "doc", id: "documentation/configuration/logging" },
{ type: "doc", id: "documentation/configuration/secrets" },
+ { type: "doc", id: "documentation/configuration/templating" },
+ { type: "doc", id: "documentation/configuration/system_events" },
],
},
],
diff --git a/docs/src/pages/components-explorer/_components/ComponentConfiguration/index.tsx b/docs/src/pages/components-explorer/_components/ComponentConfiguration/index.tsx
index 51b53925c..73186c468 100644
--- a/docs/src/pages/components-explorer/_components/ComponentConfiguration/index.tsx
+++ b/docs/src/pages/components-explorer/_components/ComponentConfiguration/index.tsx
@@ -4,6 +4,7 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import React from "react";
+import Link from "@docusaurus/Link";
import CodeBlock from "@theme/CodeBlock";
import Tippy from "@tippyjs/react";
import clsx from "clsx";
@@ -35,20 +36,45 @@ function getValidValues(options) {
// Return div that contains valid values for the config option
function buildValidValues(item: any) {
if (item.options) {
+ const options = item.options.slice();
+ const hasFormat = options.some((x) => x.format !== undefined);
+ const hasValue = options.some((x) => x.value !== undefined);
return (
-
- Valid values:
-
- {getValidValues(item.options).map((option, index) => (
- -
-
- {option.value === undefined ? `<${option.type}>` : option.value}
-
- {option.description ? `: ${option.description}` : null}
-
- ))}
-
-
+ <>
+ {hasFormat && (
+
+ Valid formats:
+
+ {getValidValues(item.options).map(
+ (option, index) =>
+ option.format && (
+ -
+
{option.format}
+ {option.description ? `: ${option.description}` : null}
+
+ ),
+ )}
+
+
+ )}
+ {hasValue && (
+
+ Valid values:
+
+ {getValidValues(item.options).map((option, index) => (
+ -
+
+ {option.value === undefined
+ ? `<${option.type}>`
+ : option.value}
+
+ {option.description ? `: ${option.description}` : null}
+
+ ))}
+
+
+ )}
+ >
);
}
return null;
@@ -124,7 +150,7 @@ function getName(item: any) {
return `<${item.name.type}>`;
}
-function getDefault(item: any) {
+function getDefault(item: any, optional: boolean) {
function getCodeBlock() {
return (
@@ -153,7 +179,7 @@ function getDefault(item: any) {
// Handle object defaults
if (
- item.optional &&
+ optional &&
item.default !== null &&
typeof item.default === "object" &&
!Array.isArray(item.default) &&
@@ -165,7 +191,7 @@ function getDefault(item: any) {
// Handle array defaults
if (
- item.optional &&
+ optional &&
item.default !== null &&
// Only display default values for arrays if the length is greater than zero
Array.isArray(item.default) &&
@@ -177,7 +203,7 @@ function getDefault(item: any) {
// Handle other defaults
if (
- item.optional &&
+ optional &&
item.default !== null &&
!Array.isArray(item.default) &&
!(typeof item.default === "object")
@@ -189,7 +215,7 @@ function getDefault(item: any) {
);
}
- if (item.optional) {
+ if (optional) {
return ")";
}
return null;
@@ -206,7 +232,15 @@ function buildHeader(item: any) {
{/* Zero width space to prevent selecting type when double clicking the name */}
- {item.format ? item.format : item.type}
+ {item.format ? (
+ item.format
+ ) : item.type === "jinja2_template" ? (
+
+ Jinja2 template
+
+ ) : (
+ item.type
+ )}
{optional ? " (" : null}
@@ -221,7 +255,7 @@ function buildHeader(item: any) {
? " deprecated"
: " required"}
- {getDefault(item)}
+ {getDefault(item, optional)}
);
diff --git a/docs/src/pages/components-explorer/_domains/object_detector/index.mdx b/docs/src/pages/components-explorer/_domains/object_detector/index.mdx
index 7b6961612..0fd60ef73 100644
--- a/docs/src/pages/components-explorer/_domains/object_detector/index.mdx
+++ b/docs/src/pages/components-explorer/_domains/object_detector/index.mdx
@@ -22,7 +22,7 @@ Object detectors can be taxing on the system, so it is wise to combine it with a
### Zones \{#object-detector-zones}
-
+
### Mask \{#object-detector-mask}
diff --git a/docs/src/pages/components-explorer/_domains/object_detector/zones.mdx b/docs/src/pages/components-explorer/_domains/object_detector/zones.mdx
index a42931e2a..80ceae15d 100644
--- a/docs/src/pages/components-explorer/_domains/object_detector/zones.mdx
+++ b/docs/src/pages/components-explorer/_domains/object_detector/zones.mdx
@@ -15,27 +15,27 @@ actually interested in, excluding the sidewalk.
? props.meta.name
: "