mirror of
https://github.com/mofeng-git/One-KVM.git
synced 2025-12-11 16:50:28 +08:00
commit
c262db4a18
@ -1,7 +1,7 @@
|
||||
[bumpversion]
|
||||
commit = True
|
||||
tag = True
|
||||
current_version = 4.49
|
||||
current_version = 4.94
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)(\.(?P<patch>\d+)(\-(?P<release>[a-z]+))?)?
|
||||
serialize =
|
||||
{major}.{minor}
|
||||
|
||||
152
.github/workflows/build_img.yaml
vendored
152
.github/workflows/build_img.yaml
vendored
@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
device_target:
|
||||
description: 'Target device name'
|
||||
description: 'Target device to build'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
@ -15,6 +15,20 @@ on:
|
||||
- e900v22c
|
||||
- octopus-flanet
|
||||
- all
|
||||
create_release:
|
||||
description: 'Create GitHub Release'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
release_name:
|
||||
description: 'Custom release name (optional)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
env:
|
||||
BUILD_DATE: ""
|
||||
GIT_SHA: ""
|
||||
RELEASE_TAG: ""
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@ -26,11 +40,35 @@ jobs:
|
||||
TZ: Asia/Shanghai
|
||||
volumes:
|
||||
- /dev:/dev
|
||||
- /mnt/nfs/lfs/:/mnt/nfs/lfs/
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set build environment
|
||||
id: build_env
|
||||
shell: bash
|
||||
run: |
|
||||
BUILD_DATE=$(date +%y%m%d-%H%M)
|
||||
# 使用 GitHub 提供的环境变量避免 Git 权限问题
|
||||
GIT_SHA="${GITHUB_SHA:0:7}"
|
||||
GIT_BRANCH="${GITHUB_REF_NAME}"
|
||||
|
||||
echo "BUILD_DATE=$BUILD_DATE" >> $GITHUB_ENV
|
||||
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_ENV
|
||||
echo "GIT_BRANCH=$GIT_BRANCH" >> $GITHUB_ENV
|
||||
|
||||
# 生成唯一但不创建新分支的标识符
|
||||
RELEASE_TAG="build-$BUILD_DATE-${{ github.event.inputs.device_target }}-$GIT_SHA"
|
||||
echo "RELEASE_TAG=$RELEASE_TAG" >> $GITHUB_ENV
|
||||
|
||||
echo "Build environment:"
|
||||
echo "- Date: $BUILD_DATE"
|
||||
echo "- Git SHA: $GIT_SHA"
|
||||
echo "- Git Branch: $GIT_BRANCH"
|
||||
echo "- Release Tag: $RELEASE_TAG"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@ -38,7 +76,8 @@ jobs:
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get install -y --no-install-recommends \
|
||||
sudo tzdata docker.io qemu-utils qemu-user-static binfmt-support parted e2fsprogs \
|
||||
curl tar python3 python3-pip rsync git android-sdk-libsparse-utils coreutils zerofree
|
||||
curl tar python3 python3-pip rsync git android-sdk-libsparse-utils coreutils zerofree wget \
|
||||
file tree
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
ln -snf /usr/share/zoneinfo/$TZ /etc/localtime
|
||||
@ -48,27 +87,98 @@ jobs:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
|
||||
- name: Build image
|
||||
id: build
|
||||
shell: bash
|
||||
run: |
|
||||
echo "BUILD_DATE=$(date +%y%m%d)" >> $GITHUB_ENV
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
echo "=== Build Configuration ==="
|
||||
echo "Target: ${{ github.event.inputs.device_target }}"
|
||||
echo "Build Date: $BUILD_DATE"
|
||||
echo "Git SHA: $GIT_SHA"
|
||||
echo "Git Branch: $GIT_BRANCH"
|
||||
echo "Output Directory: ${{ github.workspace }}/output"
|
||||
echo "=========================="
|
||||
|
||||
mkdir -p "${{ github.workspace }}/output"
|
||||
chmod +x build/build_img.sh
|
||||
|
||||
echo "Starting build for target: ${{ github.event.inputs.device_target }}"
|
||||
bash build/build_img.sh ${{ github.event.inputs.device_target }}
|
||||
|
||||
echo "Build script finished."
|
||||
|
||||
echo "Starting build process..."
|
||||
if bash build/build_img.sh ${{ github.event.inputs.device_target }}; then
|
||||
echo "BUILD_SUCCESS=true" >> $GITHUB_OUTPUT
|
||||
echo "Build completed successfully!"
|
||||
else
|
||||
echo "BUILD_SUCCESS=false" >> $GITHUB_OUTPUT
|
||||
echo "Build failed!" >&2
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CI_PROJECT_DIR: ${{ github.workspace }}
|
||||
GITHUB_ACTIONS: true
|
||||
OUTPUTDIR: ${{ github.workspace }}/output
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
- name: Collect build artifacts
|
||||
id: artifacts
|
||||
run: |
|
||||
cd "${{ github.workspace }}/output"
|
||||
|
||||
echo "=== Build Artifacts ==="
|
||||
if [ -d "${{ github.workspace }}/output" ]; then
|
||||
find . -name "*.xz" | head -20
|
||||
|
||||
# 统计xz文件信息
|
||||
ARTIFACT_COUNT=$(find . -name "*.xz" | wc -l)
|
||||
TOTAL_SIZE=$(du -sh . | cut -f1)
|
||||
|
||||
echo "ARTIFACT_COUNT=$ARTIFACT_COUNT" >> $GITHUB_OUTPUT
|
||||
echo "TOTAL_SIZE=$TOTAL_SIZE" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "No output directory found!"
|
||||
echo "ARTIFACT_COUNT=0" >> $GITHUB_OUTPUT
|
||||
echo "TOTAL_SIZE=0" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "======================"
|
||||
|
||||
- name: Create GitHub Release
|
||||
if: steps.build.outputs.BUILD_SUCCESS == 'true' && github.event.inputs.create_release == 'true'
|
||||
id: release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: onekvm-image-${{ github.event.inputs.device_target }}-${{ env.BUILD_DATE }}
|
||||
path: |
|
||||
${{ github.workspace }}/output/*.img
|
||||
${{ github.workspace }}/output/*.vmdk
|
||||
${{ github.workspace }}/output/*.vdi
|
||||
${{ github.workspace }}/output/*.burn.img
|
||||
if-no-files-found: ignore
|
||||
tag_name: ${{ env.RELEASE_TAG }}
|
||||
name: ${{ github.event.inputs.release_name || format('One-KVM {0} 构建镜像 ({1})', github.event.inputs.device_target, env.BUILD_DATE) }}
|
||||
body: |
|
||||
## 📦 GitHub Actions 镜像构建
|
||||
|
||||
### 构建信息
|
||||
- **目标设备**: `${{ github.event.inputs.device_target }}`
|
||||
- **构建时间**: `${{ env.BUILD_DATE }}`
|
||||
- **Git 提交**: `${{ env.GIT_SHA }}` (分支: `${{ env.GIT_BRANCH }}`)
|
||||
- **构建环境**: GitHub Actions (Ubuntu 22.04)
|
||||
- **工作流ID**: `${{ github.run_id }}`
|
||||
|
||||
files: ${{ github.workspace }}/output/*.xz
|
||||
prerelease: true
|
||||
make_latest: false
|
||||
generate_release_notes: false
|
||||
env:
|
||||
CI_PROJECT_DIR: ${{ github.workspace }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## 📋 构建摘要" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| 项目 | 值 |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|------|-----|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| **目标设备** | \`${{ github.event.inputs.device_target }}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| **构建时间** | \`${{ env.BUILD_DATE }}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| **Git SHA** | \`${{ env.GIT_SHA }}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| **Git 分支** | \`${{ env.GIT_BRANCH }}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| **构建状态** | ${{ steps.build.outputs.BUILD_SUCCESS == 'true' && '✅ 成功' || '❌ 失败' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [ "${{ steps.build.outputs.BUILD_SUCCESS }}" = "true" ]; then
|
||||
echo "| **构建产物** | ${{ steps.artifacts.outputs.ARTIFACT_COUNT || '0' }} 个文件 (${{ steps.artifacts.outputs.TOTAL_SIZE || '0' }}) |" >> $GITHUB_STEP_SUMMARY
|
||||
if [ "${{ github.event.inputs.create_release }}" = "true" ]; then
|
||||
echo "| **Release** | [${{ env.RELEASE_TAG }}](${{ steps.release.outputs.url }}) |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
fi
|
||||
239
.github/workflows/docker-build.yaml
vendored
239
.github/workflows/docker-build.yaml
vendored
@ -3,81 +3,192 @@ name: Build and Push Docker Image
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version'
|
||||
build_type:
|
||||
description: 'Build type'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- stage-0
|
||||
- dev
|
||||
- latest
|
||||
- release
|
||||
version:
|
||||
description: 'Version tag (for main image)'
|
||||
required: false
|
||||
default: 'latest'
|
||||
type: string
|
||||
platforms:
|
||||
description: 'Target platforms'
|
||||
required: false
|
||||
default: 'linux/amd64,linux/arm64,linux/arm/v7'
|
||||
type: string
|
||||
enable_aliyun:
|
||||
description: 'Push to Aliyun Registry'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOCKERHUB_REGISTRY: docker.io
|
||||
ALIYUN_REGISTRY: registry.cn-hangzhou.aliyuncs.com
|
||||
STAGE0_IMAGE: kvmd-stage-0
|
||||
MAIN_IMAGE: kvmd
|
||||
|
||||
jobs:
|
||||
build:
|
||||
build-stage-0:
|
||||
runs-on: ubuntu-22.04
|
||||
container:
|
||||
image: node:18
|
||||
env:
|
||||
TZ: Asia/Shanghai
|
||||
if: github.event.inputs.build_type == 'stage-0'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: docker-container
|
||||
platforms: ${{ github.event.inputs.platforms }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKERHUB_REGISTRY }}
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Aliyun Registry
|
||||
if: github.event.inputs.enable_aliyun == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.ALIYUN_REGISTRY }}
|
||||
username: ${{ secrets.ALIYUN_USERNAME }}
|
||||
password: ${{ secrets.ALIYUN_PASSWORD }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
silentwind0/${{ env.STAGE0_IMAGE }}
|
||||
${{ github.event.inputs.enable_aliyun == 'true' && format('{0}/silentwind/{1}', env.ALIYUN_REGISTRY, env.STAGE0_IMAGE) || '' }}
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=raw,value=latest-{{date 'YYYYMMDD-HHmmss'}}
|
||||
type=sha,prefix={{branch}}-
|
||||
labels: |
|
||||
org.opencontainers.image.title=One-KVM Stage-0 Base Image
|
||||
org.opencontainers.image.description=Base image for One-KVM build environment
|
||||
org.opencontainers.image.vendor=One-KVM Project
|
||||
|
||||
- name: Build and push stage-0 image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./build/Dockerfile-stage-0
|
||||
platforms: ${{ github.event.inputs.platforms }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha,scope=stage-0
|
||||
cache-to: type=gha,mode=max,scope=stage-0
|
||||
provenance: false
|
||||
sbom: false
|
||||
allow: security.insecure
|
||||
|
||||
build-main:
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event.inputs.build_type != 'stage-0'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: docker-container
|
||||
platforms: ${{ github.event.inputs.platforms }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKERHUB_REGISTRY }}
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Aliyun Registry
|
||||
if: github.event.inputs.enable_aliyun == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.ALIYUN_REGISTRY }}
|
||||
username: ${{ secrets.ALIYUN_USERNAME }}
|
||||
password: ${{ secrets.ALIYUN_PASSWORD }}
|
||||
|
||||
- name: Set version tag
|
||||
id: version
|
||||
run: |
|
||||
apt-get update
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get install -y --no-install-recommends \
|
||||
sudo tzdata docker.io qemu-utils qemu-user-static binfmt-support parted e2fsprogs \
|
||||
curl tar python3 python3-pip rsync git android-sdk-libsparse-utils coreutils zerofree
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
ln -snf /usr/share/zoneinfo/$TZ /etc/localtime
|
||||
echo $TZ > /etc/timezone
|
||||
update-binfmts --enable
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
if [[ "${{ github.event.inputs.build_type }}" == "dev" ]]; then
|
||||
echo "tag=dev" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.event.inputs.build_type }}" == "release" ]]; then
|
||||
echo "tag=${{ github.event.inputs.version }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Install Docker Buildx
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
silentwind0/${{ env.MAIN_IMAGE }}
|
||||
${{ github.event.inputs.enable_aliyun == 'true' && format('{0}/silentwind/{1}', env.ALIYUN_REGISTRY, env.MAIN_IMAGE) || '' }}
|
||||
tags: |
|
||||
type=raw,value=${{ steps.version.outputs.tag }}
|
||||
type=raw,value=${{ steps.version.outputs.tag }}-{{date 'YYYYMMDD-HHmmss'}}
|
||||
type=sha,prefix={{branch}}-
|
||||
labels: |
|
||||
org.opencontainers.image.title=One-KVM
|
||||
org.opencontainers.image.description=DIY IP-KVM solution based on PiKVM
|
||||
org.opencontainers.image.vendor=One-KVM Project
|
||||
org.opencontainers.image.version=${{ steps.version.outputs.tag }}
|
||||
|
||||
- name: Build and push main image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./build/Dockerfile
|
||||
platforms: ${{ github.event.inputs.platforms }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha,scope=main
|
||||
cache-to: type=gha,mode=max,scope=main
|
||||
provenance: false
|
||||
sbom: false
|
||||
|
||||
- name: Build summary
|
||||
run: |
|
||||
# 创建插件目录
|
||||
mkdir -p ~/.docker/cli-plugins
|
||||
# 下载 buildx 二进制文件
|
||||
BUILDX_VERSION="v0.11.2"
|
||||
curl -L "https://github.com/docker/buildx/releases/download/${BUILDX_VERSION}/buildx-${BUILDX_VERSION}.linux-amd64" -o ~/.docker/cli-plugins/docker-buildx
|
||||
chmod +x ~/.docker/cli-plugins/docker-buildx
|
||||
# 验证安装
|
||||
docker buildx version
|
||||
|
||||
#- name: Install QEMU
|
||||
# run: |
|
||||
# 安装 QEMU 模拟器
|
||||
#docker run --privileged --rm tonistiigi/binfmt --install all
|
||||
# 验证 QEMU 安装
|
||||
#docker buildx inspect --bootstrap
|
||||
|
||||
- name: Create and use new builder instance
|
||||
run: |
|
||||
# 创建新的 builder 实例
|
||||
docker buildx create --name mybuilder --driver docker-container --bootstrap
|
||||
# 使用新创建的 builder
|
||||
docker buildx use mybuilder
|
||||
# 验证支持的平台
|
||||
docker buildx inspect --bootstrap
|
||||
|
||||
- name: Build multi-arch image
|
||||
run: |
|
||||
# 构建多架构镜像
|
||||
docker buildx build \
|
||||
--platform linux/amd64,linux/arm64,linux/arm/v7 \
|
||||
--file ./build/Dockerfile \
|
||||
--tag silentwind/kvmd:${{ github.event.inputs.version }} \
|
||||
.
|
||||
|
||||
#- name: Login to DockerHub
|
||||
# uses: docker/login-action@v2
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
|
||||
echo "## Build Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Build Type**: ${{ github.event.inputs.build_type }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Version Tag**: ${{ steps.version.outputs.tag }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Platforms**: ${{ github.event.inputs.platforms }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Aliyun Enabled**: ${{ github.event.inputs.enable_aliyun }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Tags**:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "${{ steps.meta.outputs.tags }}" | sed 's/^/ - /' >> $GITHUB_STEP_SUMMARY
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -21,3 +21,4 @@
|
||||
/venv/
|
||||
.vscode/settings.j/son
|
||||
kvmd_config/
|
||||
CLAUDE.md
|
||||
|
||||
11
Makefile
11
Makefile
@ -4,7 +4,8 @@ TESTENV_IMAGE ?= kvmd-testenv
|
||||
TESTENV_HID ?= /dev/ttyS10
|
||||
TESTENV_VIDEO ?= /dev/video0
|
||||
TESTENV_GPIO ?= /dev/gpiochip0
|
||||
TESTENV_RELAY ?= $(if $(shell ls /dev/hidraw0 2>/dev/null || true),/dev/hidraw0,)
|
||||
TESTENV_RELAY ?=
|
||||
#TESTENV_RELAY ?= $(if $(shell ls /dev/hidraw0 2>/dev/null || true),/dev/hidraw0,)
|
||||
|
||||
LIBGPIOD_VERSION ?= 1.6.3
|
||||
|
||||
@ -28,6 +29,8 @@ all:
|
||||
@ echo " make testenv # Build test environment"
|
||||
@ echo " make tox # Run tests and linters"
|
||||
@ echo " make tox E=pytest # Run selected test environment"
|
||||
@ echo " make tox-local # Run tests and linters locally (no Docker)"
|
||||
@ echo " make tox-local E=flake8 # Run selected test locally"
|
||||
@ echo " make gpio # Create gpio mockup"
|
||||
@ echo " make run # Run kvmd"
|
||||
@ echo " make run CMD=... # Run specified command inside kvmd environment"
|
||||
@ -96,9 +99,13 @@ tox: testenv
|
||||
"
|
||||
|
||||
|
||||
tox-local:
|
||||
@./check-code.sh $(if $(E),$(E),all)
|
||||
|
||||
|
||||
$(TESTENV_GPIO):
|
||||
test ! -e $(TESTENV_GPIO)
|
||||
sudo modprobe gpio-mockup gpio_mockup_ranges=0,40
|
||||
sudo modprobe gpio_mockup gpio_mockup_ranges=0,40
|
||||
test -c $(TESTENV_GPIO)
|
||||
|
||||
|
||||
|
||||
18
PKGBUILD
18
PKGBUILD
@ -39,7 +39,7 @@ for _variant in "${_variants[@]}"; do
|
||||
pkgname+=(kvmd-platform-$_platform-$_board)
|
||||
done
|
||||
pkgbase=kvmd
|
||||
pkgver=4.49
|
||||
pkgver=4.94
|
||||
pkgrel=1
|
||||
pkgdesc="The main PiKVM daemon"
|
||||
url="https://github.com/pikvm/kvmd"
|
||||
@ -53,6 +53,8 @@ depends=(
|
||||
python-aiofiles
|
||||
python-async-lru
|
||||
python-passlib
|
||||
# python-bcrypt is needed for passlib
|
||||
python-bcrypt
|
||||
python-pyotp
|
||||
python-qrcode
|
||||
python-periphery
|
||||
@ -66,7 +68,7 @@ depends=(
|
||||
python-dbus
|
||||
python-dbus-next
|
||||
python-pygments
|
||||
python-pyghmi
|
||||
"python-pyghmi>=1.6.0-2"
|
||||
python-pam
|
||||
python-pillow
|
||||
python-xlib
|
||||
@ -80,6 +82,7 @@ depends=(
|
||||
python-luma-oled
|
||||
python-pyusb
|
||||
python-pyudev
|
||||
python-evdev
|
||||
"libgpiod>=2.1"
|
||||
freetype2
|
||||
"v4l-utils>=1.22.1-1"
|
||||
@ -94,7 +97,7 @@ depends=(
|
||||
certbot
|
||||
platform-io-access
|
||||
raspberrypi-utils
|
||||
"ustreamer>=6.26"
|
||||
"ustreamer>=6.37"
|
||||
|
||||
# Systemd UDEV bug
|
||||
"systemd>=248.3-2"
|
||||
@ -120,7 +123,7 @@ depends=(
|
||||
# fsck for /boot
|
||||
dosfstools
|
||||
|
||||
# pgrep for kvmd-udev-restart-pass
|
||||
# pgrep for kvmd-udev-restart-pass, sysctl for kvmd-otgnet
|
||||
procps-ng
|
||||
|
||||
# Misc
|
||||
@ -163,7 +166,9 @@ package_kvmd() {
|
||||
|
||||
install -Dm755 -t "$pkgdir/usr/bin" scripts/kvmd-{bootconfig,gencert,certbot}
|
||||
|
||||
install -Dm644 -t "$pkgdir/usr/lib/systemd/system" configs/os/services/*
|
||||
install -dm755 "$pkgdir/usr/lib/systemd/system"
|
||||
cp -rd configs/os/services -T "$pkgdir/usr/lib/systemd/system"
|
||||
|
||||
install -DTm644 configs/os/sysusers.conf "$pkgdir/usr/lib/sysusers.d/kvmd.conf"
|
||||
install -DTm644 configs/os/tmpfiles.conf "$pkgdir/usr/lib/tmpfiles.d/kvmd.conf"
|
||||
|
||||
@ -198,6 +203,7 @@ package_kvmd() {
|
||||
mkdir -p "$pkgdir/etc/kvmd/override.d"
|
||||
|
||||
mkdir -p "$pkgdir/var/lib/kvmd/"{msd,pst}
|
||||
chmod 1775 "$pkgdir/var/lib/kvmd/pst"
|
||||
}
|
||||
|
||||
|
||||
@ -210,7 +216,7 @@ for _variant in "${_variants[@]}"; do
|
||||
cd \"kvmd-\$pkgver\"
|
||||
|
||||
pkgdesc=\"PiKVM platform configs - $_platform for $_board\"
|
||||
depends=(kvmd=$pkgver-$pkgrel \"linux-rpi-pikvm>=6.6.45-10\" \"raspberrypi-bootloader-pikvm>=20240818-1\")
|
||||
depends=(kvmd=$pkgver-$pkgrel \"linux-rpi-pikvm>=6.6.45-13\" \"raspberrypi-bootloader-pikvm>=20240818-1\")
|
||||
|
||||
backup=(
|
||||
etc/sysctl.d/99-kvmd.conf
|
||||
|
||||
@ -42,6 +42,31 @@ RUN sed -i 's/deb.debian.org/mirrors.tuna.tsinghua.edu.cn/' /etc/apt/sources.lis
|
||||
libnss3 \
|
||||
libasound2 \
|
||||
nano \
|
||||
unzip \
|
||||
libavcodec59 \
|
||||
libavformat59 \
|
||||
libavutil57 \
|
||||
libswscale6 \
|
||||
libavfilter8 \
|
||||
libavdevice59 \
|
||||
&& if [ ${TARGETARCH} != arm ] && [ ${TARGETARCH} != arm64 ]; then \
|
||||
apt-get install -y --no-install-recommends \
|
||||
ffmpeg \
|
||||
vainfo \
|
||||
libva2 \
|
||||
libva-drm2 \
|
||||
libva-x11-2 \
|
||||
libdrm2 \
|
||||
mesa-va-drivers \
|
||||
mesa-vdpau-drivers \
|
||||
intel-media-va-driver \
|
||||
i965-va-driver; \
|
||||
fi \
|
||||
&& if [ ${TARGETARCH} = arm ] || [ ${TARGETARCH} = arm64 ]; then \
|
||||
apt-get install -y --no-install-recommends \
|
||||
v4l-utils \
|
||||
libv4l-0; \
|
||||
fi \
|
||||
&& cp /tmp/lib/* /lib/*-linux-*/ \
|
||||
&& pip install --no-cache-dir --root-user-action=ignore --disable-pip-version-check /tmp/wheel/*.whl \
|
||||
&& pip install --no-cache-dir --root-user-action=ignore --disable-pip-version-check pyfatfs \
|
||||
|
||||
@ -47,6 +47,29 @@ RUN sed -i 's/deb.debian.org/mirrors.tuna.tsinghua.edu.cn/' /etc/apt/sources.lis
|
||||
libspeex-dev \
|
||||
libspeexdsp-dev \
|
||||
libusb-1.0-0-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
ffmpeg \
|
||||
libavcodec-dev \
|
||||
libavformat-dev \
|
||||
libavutil-dev \
|
||||
libswscale-dev \
|
||||
libavfilter-dev \
|
||||
libavdevice-dev \
|
||||
vainfo \
|
||||
libva-dev \
|
||||
libva-drm2 \
|
||||
libva-x11-2 \
|
||||
libdrm-dev \
|
||||
mesa-va-drivers \
|
||||
mesa-vdpau-drivers \
|
||||
v4l-utils \
|
||||
libv4l-dev \
|
||||
&& if [ ${TARGETARCH} != arm ] && [ ${TARGETARCH} != arm64 ]; then \
|
||||
apt-get install -y --no-install-recommends \
|
||||
intel-media-va-driver \
|
||||
i965-va-driver; \
|
||||
fi \
|
||||
&& apt clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@ -70,7 +93,13 @@ RUN --security=insecure pip config set global.index-url https://pypi.tuna.tsingh
|
||||
more-itertools multidict netifaces packaging passlib pillow ply psutil \
|
||||
pycparser pyelftools pyghmi pygments pyparsing pyotp qrcode requests \
|
||||
semantic-version setproctitle six spidev tabulate urllib3 wrapt xlib \
|
||||
yarl pyserial pyyaml zstandard supervisor pyfatfs
|
||||
yarl pyserial pyyaml zstandard supervisor pyfatfs pyserial python-periphery \
|
||||
python-ldap python-pam pyrad pyudev pyusb luma.oled pyserial-asyncio
|
||||
|
||||
# 编译 python vedev库
|
||||
RUN git clone --depth=1 https://github.com/gvalkov/python-evdev.git /tmp/python-evdev \
|
||||
&& cd /tmp/python-evdev \
|
||||
&& python3 setup.py bdist_wheel --dist-dir /tmp/wheel/
|
||||
|
||||
# 编译安装 libnice、libsrtp、libwebsockets 和 janus-gateway
|
||||
RUN git clone --depth=1 https://gitlab.freedesktop.org/libnice/libnice /tmp/libnice \
|
||||
@ -103,17 +132,20 @@ RUN git clone --depth=1 https://gitlab.freedesktop.org/libnice/libnice /tmp/libn
|
||||
&& rm -rf /tmp/janus-gateway
|
||||
|
||||
# 编译 ustreamer
|
||||
RUN sed --in-place --expression 's|^#include "refcount.h"$|#include "../refcount.h"|g' /usr/include/janus/plugins/plugin.h \
|
||||
&& git clone --depth=1 https://github.com/mofeng-git/ustreamer /tmp/ustreamer \
|
||||
&& sed -i '68s/-Wl,-Bstatic//' /tmp/ustreamer/src/Makefile \
|
||||
&& make -j WITH_PYTHON=1 WITH_JANUS=1 WITH_LIBX264=1 -C /tmp/ustreamer \
|
||||
&& /tmp/ustreamer/ustreamer -v \
|
||||
&& cp /tmp/ustreamer/python/dist/*.whl /tmp/wheel/
|
||||
RUN echo "Building ustreamer with timestamp cache bust" \
|
||||
&& sed --in-place --expression 's|^#include "refcount.h"$|#include "../refcount.h"|g' /usr/include/janus/plugins/plugin.h \
|
||||
&& TIMESTAMP=$(date +%s%N) \
|
||||
&& git clone --depth=1 https://github.com/mofeng-git/ustreamer /tmp/ustreamer-${TIMESTAMP} \
|
||||
&& make -j WITH_PYTHON=1 WITH_JANUS=1 WITH_FFMPEG=1 -C /tmp/ustreamer-${TIMESTAMP} \
|
||||
&& /tmp/ustreamer-${TIMESTAMP}/ustreamer -v \
|
||||
&& cp /tmp/ustreamer-${TIMESTAMP}/python/dist/*.whl /tmp/wheel/ \
|
||||
&& mv /tmp/ustreamer-${TIMESTAMP} /tmp/ustreamer
|
||||
|
||||
# 复制必要的库文件
|
||||
RUN mkdir /tmp/lib \
|
||||
&& cd /lib/*-linux-*/ \
|
||||
&& cp libevent_core-*.so.7 libbsd.so.0 libevent_pthreads-*.so.7 libspeexdsp.so.1 \
|
||||
libevent-*.so.7 libjpeg.so.62 libx264.so.164 libyuv.so.0 libnice.so.10 \
|
||||
/usr/lib/libsrtp2.so.1 /usr/lib/libwebsockets.so.19 \
|
||||
/tmp/lib/
|
||||
&& cp libevent_core-*.so.* libbsd.so.* libevent_pthreads-*.so.* libspeexdsp.so.* \
|
||||
libevent-*.so.* libjpeg.so.* libyuv.so.* libnice.so.* \
|
||||
/tmp/lib/ \
|
||||
&& find /usr/lib -name "libsrtp2.so.*" -exec cp {} /tmp/lib/ \; \
|
||||
&& find /usr/lib -name "libwebsockets.so.*" -exec cp {} /tmp/lib/ \;
|
||||
|
||||
@ -2,12 +2,15 @@
|
||||
|
||||
# --- 配置 ---
|
||||
# 允许通过环境变量覆盖默认路径
|
||||
SRCPATH="${SRCPATH:-/mnt/nfs/lfs/src}"
|
||||
SRCPATH="${SRCPATH:-/mnt/src}"
|
||||
BOOTFS="${BOOTFS:-/tmp/bootfs}"
|
||||
ROOTFS="${ROOTFS:-/tmp/rootfs}"
|
||||
OUTPUTDIR="${OUTPUTDIR:-/mnt/nfs/lfs/src/output}"
|
||||
OUTPUTDIR="${OUTPUTDIR:-/mnt/output}"
|
||||
TMPDIR="${TMPDIR:-$SRCPATH/tmp}"
|
||||
|
||||
# 远程文件下载配置
|
||||
REMOTE_PREFIX="${REMOTE_PREFIX:-https://files.mofeng.run/src}"
|
||||
|
||||
export LC_ALL=C
|
||||
|
||||
# 全局变量
|
||||
@ -132,6 +135,9 @@ build_target() {
|
||||
;;
|
||||
esac
|
||||
|
||||
# 在 GitHub Actions 环境中清理下载的文件
|
||||
cleanup_downloaded_files
|
||||
|
||||
echo "=================================================="
|
||||
echo "信息:目标 $target 构建完成!"
|
||||
echo "=================================================="
|
||||
|
||||
@ -172,9 +172,96 @@ write_meta() {
|
||||
run_in_chroot "sed -i 's/localhost.localdomain/$hostname/g' /etc/kvmd/meta.yaml"
|
||||
}
|
||||
|
||||
# 检测是否在 GitHub Actions 环境中
|
||||
is_github_actions() {
|
||||
[[ -n "$GITHUB_ACTIONS" ]]
|
||||
}
|
||||
|
||||
# 记录下载的文件列表(仅在 GitHub Actions 环境中)
|
||||
DOWNLOADED_FILES_LIST="/tmp/downloaded_files.txt"
|
||||
|
||||
# 自动下载文件函数
|
||||
download_file_if_missing() {
|
||||
local file_path="$1"
|
||||
local relative_path=""
|
||||
|
||||
# 如果文件已存在,直接返回
|
||||
if [[ -f "$file_path" ]]; then
|
||||
echo "信息:文件已存在: $file_path"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# 计算相对于 SRCPATH 的路径
|
||||
if [[ "$file_path" == "$SRCPATH"/* ]]; then
|
||||
relative_path="${file_path#$SRCPATH/}"
|
||||
else
|
||||
echo "错误:文件路径 $file_path 不在 SRCPATH ($SRCPATH) 下" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "信息:文件不存在,尝试下载: $file_path"
|
||||
echo "信息:相对路径: $relative_path"
|
||||
|
||||
# 确保目标目录存在
|
||||
local target_dir="$(dirname "$file_path")"
|
||||
ensure_dir "$target_dir"
|
||||
|
||||
# 首先尝试直接下载
|
||||
local remote_url="${REMOTE_PREFIX}/${relative_path}"
|
||||
echo "信息:尝试下载: $remote_url"
|
||||
|
||||
if curl -f -L -o "$file_path" "$remote_url" 2>/dev/null; then
|
||||
echo "信息:下载成功: $file_path"
|
||||
# 在 GitHub Actions 环境中记录下载的文件
|
||||
if is_github_actions; then
|
||||
echo "$file_path" >> "$DOWNLOADED_FILES_LIST"
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
# 如果直接下载失败,尝试添加 .xz 后缀
|
||||
echo "信息:直接下载失败,尝试 .xz 压缩版本..."
|
||||
local xz_url="${remote_url}.xz"
|
||||
local xz_file="${file_path}.xz"
|
||||
|
||||
if curl -f -L -o "$xz_file" "$xz_url" 2>/dev/null; then
|
||||
echo "信息:下载 .xz 文件成功,正在解压..."
|
||||
if xz -d "$xz_file"; then
|
||||
echo "信息:解压成功: $file_path"
|
||||
# 在 GitHub Actions 环境中记录下载的文件
|
||||
if is_github_actions; then
|
||||
echo "$file_path" >> "$DOWNLOADED_FILES_LIST"
|
||||
fi
|
||||
return 0
|
||||
else
|
||||
echo "错误:解压 .xz 文件失败" >&2
|
||||
rm -f "$xz_file"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "错误:无法下载文件 $file_path (尝试了原始版本和 .xz 版本)" >&2
|
||||
return 1
|
||||
}
|
||||
|
||||
# 清理下载的文件(仅在 GitHub Actions 环境中)
|
||||
cleanup_downloaded_files() {
|
||||
if is_github_actions && [[ -f "$DOWNLOADED_FILES_LIST" ]]; then
|
||||
echo "信息:清理 GitHub Actions 环境中下载的文件..."
|
||||
while IFS= read -r file_path; do
|
||||
if [[ -f "$file_path" ]]; then
|
||||
echo "信息:删除下载的文件: $file_path"
|
||||
rm -f "$file_path"
|
||||
fi
|
||||
done < "$DOWNLOADED_FILES_LIST"
|
||||
rm -f "$DOWNLOADED_FILES_LIST"
|
||||
echo "信息:下载文件清理完成"
|
||||
fi
|
||||
}
|
||||
|
||||
# 检查必要的外部工具
|
||||
check_required_tools() {
|
||||
local required_tools="sudo docker losetup mount umount parted e2fsck resize2fs qemu-img curl tar python3 pip3 rsync git simg2img img2simg dd cat rm mkdir mv cp sed chmod chown ln grep printf id"
|
||||
local required_tools="sudo docker losetup mount umount parted e2fsck resize2fs qemu-img curl tar python3 pip3 rsync git simg2img img2simg dd cat rm mkdir mv cp sed chmod chown ln grep printf id xz"
|
||||
|
||||
for cmd in $required_tools; do
|
||||
if ! command -v "$cmd" &> /dev/null; then
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
onecloud_rootfs() {
|
||||
local unpacker="$SRCPATH/image/onecloud/AmlImg_v0.3.1_linux_amd64"
|
||||
local source_image="$SRCPATH/image/onecloud/Armbian_by-SilentWind_24.5.0-trunk_Onecloud_bookworm_legacy_5.9.0-rc7_minimal.burn.img"
|
||||
local source_image="$SRCPATH/image/onecloud/Armbian_by-SilentWind_24.5.0-trunk_Onecloud_bookworm_legacy_5.9.0-rc7_minimal_support-dvd-emulation.burn.img"
|
||||
local bootfs_img="$TMPDIR/bootfs.img"
|
||||
local rootfs_img="$TMPDIR/rootfs.img"
|
||||
local bootfs_sparse="$TMPDIR/6.boot.PARTITION.sparse"
|
||||
@ -16,6 +16,13 @@ onecloud_rootfs() {
|
||||
ensure_dir "$TMPDIR"
|
||||
ensure_dir "$BOOTFS"
|
||||
|
||||
# 自动下载 AmlImg 工具(如果不存在)
|
||||
download_file_if_missing "$unpacker" || { echo "错误:下载 AmlImg 工具失败" >&2; exit 1; }
|
||||
sudo chmod +x "$unpacker" || { echo "错误:设置 AmlImg 工具执行权限失败" >&2; exit 1; }
|
||||
|
||||
# 自动下载源镜像文件(如果不存在)
|
||||
download_file_if_missing "$source_image" || { echo "错误:下载 Onecloud 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:解包 Onecloud burn 镜像..."
|
||||
sudo "$unpacker" unpack "$source_image" "$TMPDIR" || { echo "错误:解包失败" >&2; exit 1; }
|
||||
|
||||
@ -30,7 +37,12 @@ onecloud_rootfs() {
|
||||
sudo losetup "$bootfs_loopdev" "$bootfs_img" || { echo "错误:关联 bootfs 镜像到 $bootfs_loopdev 失败" >&2; exit 1; }
|
||||
sudo mount "$bootfs_loopdev" "$BOOTFS" || { echo "错误:挂载 bootfs ($bootfs_loopdev) 失败" >&2; exit 1; }
|
||||
BOOTFS_MOUNTED=1
|
||||
sudo cp "$SRCPATH/image/onecloud/meson8b-onecloud-fix.dtb" "$BOOTFS/dtb/meson8b-onecloud.dtb" || { echo "错误:复制修复后的 DTB 文件失败" >&2; exit 1; }
|
||||
|
||||
# 自动下载 DTB 文件(如果不存在)
|
||||
local dtb_file="$SRCPATH/image/onecloud/meson8b-onecloud-fix.dtb"
|
||||
download_file_if_missing "$dtb_file" || { echo "错误:下载 Onecloud DTB 文件失败" >&2; exit 1; }
|
||||
|
||||
sudo cp "$dtb_file" "$BOOTFS/dtb/meson8b-onecloud.dtb" || { echo "错误:复制修复后的 DTB 文件失败" >&2; exit 1; }
|
||||
sudo umount "$BOOTFS" || { echo "警告:卸载 bootfs ($BOOTFS) 失败" >&2; BOOTFS_MOUNTED=0; } # 卸载失败不应中断流程
|
||||
BOOTFS_MOUNTED=0
|
||||
echo "信息:分离 bootfs loop 设备 $bootfs_loopdev..."
|
||||
@ -60,6 +72,10 @@ cumebox2_rootfs() {
|
||||
|
||||
echo "信息:准备 Cumebox2 Rootfs..."
|
||||
ensure_dir "$TMPDIR"
|
||||
|
||||
# 自动下载源镜像文件(如果不存在)
|
||||
download_file_if_missing "$source_image" || { echo "错误:下载 Cumebox2 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
cp "$source_image" "$target_image" || { echo "错误:复制 Cumebox2 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:调整镜像分区大小..."
|
||||
@ -86,6 +102,10 @@ chainedbox_rootfs_and_fix_dtb() {
|
||||
|
||||
echo "信息:准备 Chainedbox Rootfs 并修复 DTB..."
|
||||
ensure_dir "$TMPDIR"; ensure_dir "$BOOTFS"
|
||||
|
||||
# 自动下载源镜像文件(如果不存在)
|
||||
download_file_if_missing "$source_image" || { echo "错误:下载 Chainedbox 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
cp "$source_image" "$target_image" || { echo "错误:复制 Chainedbox 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:挂载 boot 分区并修复 DTB..."
|
||||
@ -95,7 +115,12 @@ chainedbox_rootfs_and_fix_dtb() {
|
||||
sudo losetup --offset "$boot_offset" "$bootfs_loopdev" "$target_image" || { echo "错误:设置 boot 分区 loop 设备 $bootfs_loopdev 失败" >&2; exit 1; }
|
||||
sudo mount "$bootfs_loopdev" "$BOOTFS" || { echo "错误:挂载 boot 分区 ($bootfs_loopdev) 失败" >&2; exit 1; }
|
||||
BOOTFS_MOUNTED=1
|
||||
sudo cp "$SRCPATH/image/chainedbox/rk3328-l1pro-1296mhz-fix.dtb" "$BOOTFS/dtb/rockchip/rk3328-l1pro-1296mhz.dtb" || { echo "错误:复制修复后的 DTB 文件失败" >&2; exit 1; }
|
||||
|
||||
# 自动下载 DTB 文件(如果不存在)
|
||||
local dtb_file="$SRCPATH/image/chainedbox/rk3328-l1pro-1296mhz-fix.dtb"
|
||||
download_file_if_missing "$dtb_file" || { echo "错误:下载 Chainedbox DTB 文件失败" >&2; exit 1; }
|
||||
|
||||
sudo cp "$dtb_file" "$BOOTFS/dtb/rockchip/rk3328-l1pro-1296mhz.dtb" || { echo "错误:复制修复后的 DTB 文件失败" >&2; exit 1; }
|
||||
sudo umount "$BOOTFS" || { echo "警告:卸载 boot 分区 ($BOOTFS) 失败" >&2; BOOTFS_MOUNTED=0; }
|
||||
BOOTFS_MOUNTED=0
|
||||
echo "信息:分离 boot loop 设备 $bootfs_loopdev..."
|
||||
@ -116,6 +141,10 @@ vm_rootfs() {
|
||||
|
||||
echo "信息:准备 Vm Rootfs..."
|
||||
ensure_dir "$TMPDIR"
|
||||
|
||||
# 自动下载源镜像文件(如果不存在)
|
||||
download_file_if_missing "$source_image" || { echo "错误:下载 Vm 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
cp "$source_image" "$target_image" || { echo "错误:复制 Vm 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:设置带偏移量的 loop 设备..."
|
||||
@ -134,6 +163,10 @@ e900v22c_rootfs() {
|
||||
|
||||
echo "信息:准备 E900V22C Rootfs..."
|
||||
ensure_dir "$TMPDIR"
|
||||
|
||||
# 自动下载源镜像文件(如果不存在)
|
||||
download_file_if_missing "$source_image" || { echo "错误:下载 E900V22C 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
cp "$source_image" "$target_image" || { echo "错误:复制 E900V22C 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:扩展镜像文件 (${add_size_mb}MB)..."
|
||||
@ -164,6 +197,10 @@ octopus_flanet_rootfs() {
|
||||
|
||||
echo "信息:准备 Octopus-Planet Rootfs..."
|
||||
ensure_dir "$TMPDIR"; ensure_dir "$BOOTFS"
|
||||
|
||||
# 自动下载源镜像文件(如果不存在)
|
||||
download_file_if_missing "$source_image" || { echo "错误:下载 Octopus-Planet 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
cp "$source_image" "$target_image" || { echo "错误:复制 Octopus-Planet 原始镜像失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:挂载 boot 分区并修改 uEnv.txt (使用 VIM2 DTB)..."
|
||||
@ -199,14 +236,30 @@ octopus_flanet_rootfs() {
|
||||
config_cumebox2_files() {
|
||||
echo "信息:为 Cumebox2 配置特定文件 (OLED, DTB)..."
|
||||
ensure_dir "$ROOTFS/etc/oled"
|
||||
|
||||
# 自动下载 Cumebox2 相关文件(如果不存在)
|
||||
local dtb_file="$SRCPATH/image/cumebox2/v-fix.dtb"
|
||||
local ssd_file="$SRCPATH/image/cumebox2/ssd"
|
||||
local config_file="$SRCPATH/image/cumebox2/config.json"
|
||||
|
||||
download_file_if_missing "$dtb_file" || echo "警告:下载 Cumebox2 DTB 失败"
|
||||
download_file_if_missing "$ssd_file" || echo "警告:下载 Cumebox2 ssd 脚本失败"
|
||||
download_file_if_missing "$config_file" || echo "警告:下载 Cumebox2 配置文件失败"
|
||||
|
||||
# 注意 DTB 路径可能需要根据实际 Armbian 版本调整
|
||||
sudo cp "$SRCPATH/image/cumebox2/v-fix.dtb" "$ROOTFS/boot/dtb/amlogic/meson-gxl-s905x-khadas-vim.dtb" || echo "警告:复制 Cumebox2 DTB 失败"
|
||||
sudo cp "$SRCPATH/image/cumebox2/ssd" "$ROOTFS/usr/bin/" || echo "警告:复制 Cumebox2 ssd 脚本失败"
|
||||
sudo cp "$dtb_file" "$ROOTFS/boot/dtb/amlogic/meson-gxl-s905x-khadas-vim.dtb" || echo "警告:复制 Cumebox2 DTB 失败"
|
||||
sudo cp "$ssd_file" "$ROOTFS/usr/bin/" || echo "警告:复制 Cumebox2 ssd 脚本失败"
|
||||
sudo chmod +x "$ROOTFS/usr/bin/ssd" || echo "警告:设置 ssd 脚本执行权限失败"
|
||||
sudo cp "$SRCPATH/image/cumebox2/config.json" "$ROOTFS/etc/oled/config.json" || echo "警告:复制 OLED 配置文件失败"
|
||||
sudo cp "$config_file" "$ROOTFS/etc/oled/config.json" || echo "警告:复制 OLED 配置文件失败"
|
||||
}
|
||||
|
||||
config_octopus_flanet_files() {
|
||||
echo "信息:为 Octopus-Planet 配置特定文件 (model_database.conf)..."
|
||||
sudo cp "$SRCPATH/image/octopus-flanet/model_database.conf" "$ROOTFS/etc/model_database.conf" || echo "警告:复制 model_database.conf 失败"
|
||||
|
||||
# 自动下载 Octopus-Planet 相关文件(如果不存在)
|
||||
local config_file="$SRCPATH/image/octopus-flanet/model_database.conf"
|
||||
|
||||
download_file_if_missing "$config_file" || echo "警告:下载 Octopus-Planet 配置文件失败"
|
||||
|
||||
sudo cp "$config_file" "$ROOTFS/etc/model_database.conf" || echo "警告:复制 model_database.conf 失败"
|
||||
}
|
||||
@ -21,7 +21,12 @@ delete_armbian_verify(){
|
||||
|
||||
prepare_external_binaries() {
|
||||
local platform="$1" # linux/armhf or linux/amd64 or linux/aarch64
|
||||
local docker_image="registry.cn-hangzhou.aliyuncs.com/silentwind/kvmd-stage-0"
|
||||
# 如果在 GitHub Actions 环境下,使用 silentwind0/kvmd-stage-0,否则用阿里云镜像
|
||||
if is_github_actions; then
|
||||
local docker_image="silentwind0/kvmd-stage-0"
|
||||
else
|
||||
local docker_image="registry.cn-hangzhou.aliyuncs.com/silentwind/kvmd-stage-0"
|
||||
fi
|
||||
|
||||
echo "信息:准备外部预编译二进制文件 (平台: $platform)..."
|
||||
ensure_dir "$PREBUILT_DIR"
|
||||
@ -102,7 +107,8 @@ install_base_packages() {
|
||||
libxkbcommon-x11-0 nginx tesseract-ocr tesseract-ocr-eng tesseract-ocr-chi-sim \\
|
||||
iptables network-manager curl kmod libmicrohttpd12 libjansson4 libssl3 \\
|
||||
libsofia-sip-ua0 libglib2.0-0 libopus0 libogg0 libcurl4 libconfig9 \\
|
||||
python3-pip net-tools && \\
|
||||
python3-pip net-tools libavcodec59 libavformat59 libavutil57 libswscale6 \\
|
||||
libavfilter8 libavdevice59 v4l-utils libv4l-0 && \\
|
||||
apt clean && \\
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
"
|
||||
|
||||
@ -1,5 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
# --- 压缩函数 ---
|
||||
|
||||
# 压缩镜像文件(仅在 GitHub Actions 环境中)
|
||||
compress_image_file() {
|
||||
local file_path="$1"
|
||||
|
||||
if is_github_actions && [[ -f "$file_path" ]]; then
|
||||
echo "信息:压缩镜像文件: $file_path"
|
||||
if xz -9 -vv "$file_path"; then
|
||||
echo "信息:压缩完成: ${file_path}.xz"
|
||||
else
|
||||
echo "警告:压缩文件 $file_path 失败"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# --- 打包函数 ---
|
||||
|
||||
pack_img() {
|
||||
@ -29,7 +45,22 @@ pack_img() {
|
||||
sudo qemu-img convert -f raw -O vmdk "$raw_img" "$vmdk_img" || echo "警告:转换为 VMDK 失败"
|
||||
echo "信息:转换为 VDI..."
|
||||
sudo qemu-img convert -f raw -O vdi "$raw_img" "$vdi_img" || echo "警告:转换为 VDI 失败"
|
||||
|
||||
# 在 GitHub Actions 环境中压缩 VM 镜像文件
|
||||
if is_github_actions; then
|
||||
echo "信息:在 GitHub Actions 环境中压缩 VM 镜像文件..."
|
||||
compress_image_file "$raw_img"
|
||||
compress_image_file "$vmdk_img"
|
||||
compress_image_file "$vdi_img"
|
||||
fi
|
||||
else
|
||||
# 在 GitHub Actions 环境中压缩镜像文件
|
||||
if is_github_actions; then
|
||||
echo "信息:在 GitHub Actions 环境中压缩镜像文件..."
|
||||
compress_image_file "$OUTPUTDIR/$target_img_name"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "信息:镜像打包完成: $OUTPUTDIR/$target_img_name"
|
||||
}
|
||||
|
||||
@ -48,6 +79,10 @@ pack_img_onecloud() {
|
||||
unmount_all
|
||||
fi
|
||||
|
||||
# 自动下载 AmlImg 工具(如果不存在)
|
||||
download_file_if_missing "$aml_packer" || { echo "错误:下载 AmlImg 工具失败" >&2; exit 1; }
|
||||
sudo chmod +x "$aml_packer" || { echo "错误:设置 AmlImg 工具执行权限失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:将 raw rootfs 转换为 sparse image..."
|
||||
# 先删除可能存在的旧 sparse 文件
|
||||
sudo rm -f "$rootfs_sparse_img"
|
||||
@ -55,11 +90,16 @@ pack_img_onecloud() {
|
||||
sudo rm "$rootfs_raw_img" # 删除 raw 文件,因为它已被转换
|
||||
|
||||
echo "信息:使用 AmlImg 工具打包..."
|
||||
sudo chmod +x "$aml_packer"
|
||||
sudo "$aml_packer" pack "$OUTPUTDIR/$target_img_name" "$TMPDIR/" || { echo "错误:AmlImg 打包失败" >&2; exit 1; }
|
||||
|
||||
echo "信息:清理 Onecloud 临时文件..."
|
||||
sudo rm -f "$TMPDIR/6.boot.PARTITION.sparse" "$TMPDIR/7.rootfs.PARTITION.sparse" "$TMPDIR/dts.img"
|
||||
|
||||
# 在 GitHub Actions 环境中压缩 Onecloud 镜像文件
|
||||
if is_github_actions; then
|
||||
echo "信息:在 GitHub Actions 环境中压缩 Onecloud 镜像文件..."
|
||||
compress_image_file "$OUTPUTDIR/$target_img_name"
|
||||
fi
|
||||
|
||||
echo "信息:Onecloud burn 镜像打包完成: $OUTPUTDIR/$target_img_name"
|
||||
}
|
||||
@ -214,7 +214,14 @@ EOF
|
||||
log_info "视频输入格式已设置为 $VIDFORMAT"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [ ! -z "$HWENCODER" ]; then
|
||||
if sed -i "s/--h264-hwenc=disabled/--h264-hwenc=$HWENCODER/g" /etc/kvmd/override.yaml; then
|
||||
log_info "硬件编码器已设置为 $HWENCODER"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
touch /etc/kvmd/.init_flag
|
||||
log_info "初始化配置完成"
|
||||
fi
|
||||
|
||||
21
build/record.txt
Normal file
21
build/record.txt
Normal file
@ -0,0 +1,21 @@
|
||||
wget https://github.com/hzyitc/AmlImg/releases/download/v0.3.1/AmlImg_v0.3.1_linux_amd64 -O /mnt/src/image/onecloud/AmlImg_v0.3.1_linux_amd64
|
||||
chmod +x /mnt/src/image/onecloud/AmlImg_v0.3.1_linux_amd64
|
||||
|
||||
|
||||
#!/bin/bash
|
||||
# 文件映射脚本
|
||||
# 本地目录前缀:/mnt
|
||||
# 远程URL前缀:https://files.mofeng.run
|
||||
|
||||
LOCAL_PREFIX="/mnt"
|
||||
REMOTE_PREFIX="https://files.mofeng.run"
|
||||
|
||||
# 文件相对路径
|
||||
REL_PATH="src/image/onecloud/Armbian_by-SilentWind_24.5.0-trunk_Onecloud_bookworm_legacy_5.9.0-rc7_minimal_support-dvd-emulation.burn.img"
|
||||
|
||||
LOCAL_FILE="$LOCAL_PREFIX/$REL_PATH"
|
||||
REMOTE_URL="$REMOTE_PREFIX/$REL_PATH"
|
||||
|
||||
echo "下载 $REMOTE_URL 到 $LOCAL_FILE"
|
||||
mkdir -p "$(dirname "$LOCAL_FILE")"
|
||||
wget -O "$LOCAL_FILE" "$REMOTE_URL"
|
||||
82
check-code.sh
Executable file
82
check-code.sh
Executable file
@ -0,0 +1,82 @@
|
||||
#!/bin/bash
|
||||
# 本地代码质量检查脚本
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
echo "🔍 运行代码质量检查..."
|
||||
|
||||
# 检查参数,如果有参数则只运行指定的检查
|
||||
CHECK_TYPE="${1:-all}"
|
||||
|
||||
run_flake8() {
|
||||
echo "📝 运行 flake8 代码风格检查..."
|
||||
flake8 --config=testenv/linters/flake8.ini kvmd testenv/tests *.py
|
||||
}
|
||||
|
||||
run_pylint() {
|
||||
echo "🔎 运行 pylint 代码质量分析..."
|
||||
pylint -j0 --rcfile=testenv/linters/pylint.ini --output-format=colorized --reports=no kvmd testenv/tests *.py || true
|
||||
}
|
||||
|
||||
run_mypy() {
|
||||
echo "🔧 运行 mypy 类型检查..."
|
||||
mypy --config-file=testenv/linters/mypy.ini --cache-dir=testenv/.mypy_cache kvmd testenv/tests *.py || true
|
||||
}
|
||||
|
||||
run_vulture() {
|
||||
echo "💀 运行 vulture 死代码检测..."
|
||||
vulture --ignore-names=_format_P,Plugin --ignore-decorators=@exposed_http,@exposed_ws,@pytest.fixture kvmd testenv/tests *.py testenv/linters/vulture-wl.py || true
|
||||
}
|
||||
|
||||
run_eslint() {
|
||||
echo "📜 运行 eslint JavaScript检查..."
|
||||
if command -v eslint >/dev/null 2>&1; then
|
||||
eslint --cache-location=/tmp --config=testenv/linters/eslintrc.js --color web/share/js || true
|
||||
else
|
||||
echo "⚠️ eslint 未安装,跳过"
|
||||
fi
|
||||
}
|
||||
|
||||
run_htmlhint() {
|
||||
echo "📄 运行 htmlhint HTML检查..."
|
||||
if command -v htmlhint >/dev/null 2>&1; then
|
||||
htmlhint --config=testenv/linters/htmlhint.json web/*.html web/*/*.html || true
|
||||
else
|
||||
echo "⚠️ htmlhint 未安装,跳过"
|
||||
fi
|
||||
}
|
||||
|
||||
run_shellcheck() {
|
||||
echo "🐚 运行 shellcheck Shell脚本检查..."
|
||||
if command -v shellcheck >/dev/null 2>&1; then
|
||||
shellcheck --color=always kvmd.install scripts/* || true
|
||||
else
|
||||
echo "⚠️ shellcheck 未安装,跳过"
|
||||
fi
|
||||
}
|
||||
|
||||
case "$CHECK_TYPE" in
|
||||
flake8) run_flake8 ;;
|
||||
pylint) run_pylint ;;
|
||||
mypy) run_mypy ;;
|
||||
vulture) run_vulture ;;
|
||||
eslint) run_eslint ;;
|
||||
htmlhint) run_htmlhint ;;
|
||||
shellcheck) run_shellcheck ;;
|
||||
all)
|
||||
run_flake8
|
||||
run_pylint
|
||||
run_mypy
|
||||
run_vulture
|
||||
run_eslint
|
||||
run_htmlhint
|
||||
run_shellcheck
|
||||
;;
|
||||
*)
|
||||
echo "用法: $0 [flake8|pylint|mypy|vulture|eslint|htmlhint|shellcheck|all]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "✅ 代码质量检查完成!"
|
||||
@ -1 +1 @@
|
||||
admin:$apr1$.6mu9N8n$xOuGesr4JZZkdiZo/j318.
|
||||
admin:{SSHA512}3zSmw/L9zIkpQdX5bcy6HntTxltAzTuGNP6NjHRRgOcNZkA0K+Lsrj3QplO9Gr3BA5MYVVki9rAVnFNCcIdtYC6FkLJWCmHs
|
||||
|
||||
@ -1,14 +1,11 @@
|
||||
# This file describes the credentials for IPMI users. The first pair separated by colon
|
||||
# is the login and password with which the user can access to IPMI. The second pair
|
||||
# is the name and password with which the user can access to KVMD API. The arrow is used
|
||||
# as a separator and shows the direction of user registration in the system.
|
||||
# This file describes the credentials for IPMI users in format "login:password",
|
||||
# one per line. The passwords are NOT encrypted.
|
||||
#
|
||||
# WARNING! IPMI protocol is completely unsafe by design. In short, the authentication
|
||||
# process for IPMI 2.0 mandates that the server send a salted SHA1 or MD5 hash of the
|
||||
# requested user's password to the client, prior to the client authenticating. Never use
|
||||
# the same passwords for KVMD and IPMI users. This default configuration is shown here
|
||||
# for example only.
|
||||
# requested user's password to the client, prior to the client authenticating.
|
||||
#
|
||||
# And even better not to use IPMI. Instead, you can directly use KVMD API via curl.
|
||||
# NEVER use the same passwords for KVMD and IPMI users.
|
||||
# This default configuration is shown here just for the example only.
|
||||
|
||||
admin:admin -> admin:admin
|
||||
admin:admin
|
||||
|
||||
97
configs/kvmd/main/v4mini-hdmi-rpi4.yaml
Normal file
97
configs/kvmd/main/v4mini-hdmi-rpi4.yaml
Normal file
@ -0,0 +1,97 @@
|
||||
# Don't touch this file otherwise your device may stop working.
|
||||
# Use override.yaml to modify required settings.
|
||||
# You can find a working configuration in /usr/share/kvmd/configs.default/kvmd.
|
||||
|
||||
override: !include [override.d, override.yaml]
|
||||
|
||||
logging: !include logging.yaml
|
||||
|
||||
kvmd:
|
||||
auth: !include auth.yaml
|
||||
|
||||
info:
|
||||
hw:
|
||||
ignore_past: true
|
||||
fan:
|
||||
unix: /run/kvmd/fan.sock
|
||||
|
||||
hid:
|
||||
type: otg
|
||||
|
||||
atx:
|
||||
type: gpio
|
||||
power_led_pin: 4
|
||||
hdd_led_pin: 5
|
||||
power_switch_pin: 23
|
||||
reset_switch_pin: 27
|
||||
|
||||
msd:
|
||||
type: otg
|
||||
|
||||
streamer:
|
||||
h264_bitrate:
|
||||
default: 5000
|
||||
cmd:
|
||||
- "/usr/bin/ustreamer"
|
||||
- "--device=/dev/kvmd-video"
|
||||
- "--persistent"
|
||||
- "--dv-timings"
|
||||
- "--format=uyvy"
|
||||
- "--buffers=6"
|
||||
- "--encoder=m2m-image"
|
||||
- "--workers=3"
|
||||
- "--quality={quality}"
|
||||
- "--desired-fps={desired_fps}"
|
||||
- "--drop-same-frames=30"
|
||||
- "--unix={unix}"
|
||||
- "--unix-rm"
|
||||
- "--unix-mode=0660"
|
||||
- "--exit-on-parent-death"
|
||||
- "--process-name-prefix={process_name_prefix}"
|
||||
- "--notify-parent"
|
||||
- "--no-log-colors"
|
||||
- "--jpeg-sink=kvmd::ustreamer::jpeg"
|
||||
- "--jpeg-sink-mode=0660"
|
||||
- "--h264-sink=kvmd::ustreamer::h264"
|
||||
- "--h264-sink-mode=0660"
|
||||
- "--h264-bitrate={h264_bitrate}"
|
||||
- "--h264-gop={h264_gop}"
|
||||
|
||||
gpio:
|
||||
drivers:
|
||||
__v4_locator__:
|
||||
type: locator
|
||||
|
||||
scheme:
|
||||
__v3_usb_breaker__:
|
||||
pin: 22
|
||||
mode: output
|
||||
initial: true
|
||||
pulse: false
|
||||
|
||||
__v4_locator__:
|
||||
driver: __v4_locator__
|
||||
pin: 12
|
||||
mode: output
|
||||
pulse: false
|
||||
|
||||
__v4_const1__:
|
||||
pin: 6
|
||||
mode: output
|
||||
initial: false
|
||||
switch: false
|
||||
pulse: false
|
||||
|
||||
|
||||
media:
|
||||
memsink:
|
||||
h264:
|
||||
sink: "kvmd::ustreamer::h264"
|
||||
|
||||
|
||||
vnc:
|
||||
memsink:
|
||||
jpeg:
|
||||
sink: "kvmd::ustreamer::jpeg"
|
||||
h264:
|
||||
sink: "kvmd::ustreamer::h264"
|
||||
@ -17,8 +17,6 @@ kvmd:
|
||||
|
||||
hid:
|
||||
type: otg
|
||||
mouse_alt:
|
||||
device: /dev/kvmd-hid-mouse-alt
|
||||
|
||||
atx:
|
||||
type: gpio
|
||||
|
||||
@ -4,11 +4,11 @@
|
||||
# will be displayed in the web interface.
|
||||
|
||||
server:
|
||||
host: localhost.localdomain
|
||||
host: "@auto"
|
||||
|
||||
kvm: {
|
||||
base_on: PiKVM,
|
||||
app_name: One-KVM,
|
||||
main_version: 241204,
|
||||
author: SilentWind
|
||||
base_on: "PiKVM",
|
||||
app_name: "One-KVM",
|
||||
main_version: "241204",
|
||||
author: "SilentWind"
|
||||
}
|
||||
|
||||
@ -48,7 +48,7 @@ kvmd:
|
||||
- "--device=/dev/video0"
|
||||
- "--persistent"
|
||||
- "--format=mjpeg"
|
||||
- "--encoder=LIBX264-VIDEO"
|
||||
- "--encoder=FFMPEG-VIDEO"
|
||||
- "--resolution={resolution}"
|
||||
- "--desired-fps={desired_fps}"
|
||||
- "--drop-same-frames=30"
|
||||
@ -67,6 +67,8 @@ kvmd:
|
||||
- "--h264-bitrate={h264_bitrate}"
|
||||
- "--h264-gop={h264_gop}"
|
||||
- "--h264-preset=ultrafast"
|
||||
- "--h264-hwenc=disabled"
|
||||
- "--h264-hwenc-fallback"
|
||||
- "--slowdown"
|
||||
gpio:
|
||||
drivers:
|
||||
@ -168,6 +170,9 @@ otgnet:
|
||||
- "/bin/true"
|
||||
pre_stop_cmd:
|
||||
- "/bin/true"
|
||||
sysctl_cmd:
|
||||
#- "/usr/sbin/sysctl"
|
||||
- "/bin/true"
|
||||
|
||||
nginx:
|
||||
http:
|
||||
|
||||
@ -1,12 +1,9 @@
|
||||
# This file describes the credentials for VNCAuth. The left part before arrow is a passphrase
|
||||
# for VNCAuth. The right part is username and password with which the user can access to KVMD API.
|
||||
# The arrow is used as a separator and shows the relationship of user registrations on the system.
|
||||
# This file contains passwords for the legacy VNCAuth, one per line.
|
||||
# The passwords are NOT encrypted.
|
||||
#
|
||||
# Never use the same passwords for VNC and IPMI users. This default configuration is shown here
|
||||
# for example only.
|
||||
# WARNING! The VNCAuth method is NOT secure and should not be used at all.
|
||||
# But we support it for compatibility with some clients.
|
||||
#
|
||||
# If this file does not contain any entries, VNCAuth will be disabled and you will only be able
|
||||
# to login in using your KVMD username and password using VeNCrypt methods.
|
||||
# NEVER use the same passwords for KVMD, IPMI and VNCAuth users.
|
||||
|
||||
# pa$$phr@se -> admin:password
|
||||
admin -> admin:admin
|
||||
admin
|
||||
|
||||
@ -24,6 +24,7 @@ location @login {
|
||||
|
||||
location /login {
|
||||
root /usr/share/kvmd/web;
|
||||
include /etc/kvmd/nginx/loc-nocache.conf;
|
||||
auth_request off;
|
||||
}
|
||||
|
||||
@ -65,6 +66,7 @@ location /api/hid/print {
|
||||
proxy_pass http://kvmd;
|
||||
include /etc/kvmd/nginx/loc-proxy.conf;
|
||||
include /etc/kvmd/nginx/loc-bigpost.conf;
|
||||
proxy_read_timeout 7d;
|
||||
auth_request off;
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,2 @@
|
||||
limit_rate 6250k;
|
||||
limit_rate_after 50k;
|
||||
client_max_body_size 0;
|
||||
proxy_request_buffering off;
|
||||
|
||||
@ -39,9 +39,9 @@ http {
|
||||
% if https_enabled:
|
||||
|
||||
server {
|
||||
listen ${http_port};
|
||||
listen ${http_ipv4}:${http_port};
|
||||
% if ipv6_enabled:
|
||||
listen [::]:${http_port};
|
||||
listen [${http_ipv6}]:${http_port};
|
||||
% endif
|
||||
include /etc/kvmd/nginx/certbot.ctx-server.conf;
|
||||
location / {
|
||||
@ -54,9 +54,9 @@ http {
|
||||
}
|
||||
|
||||
server {
|
||||
listen ${https_port} ssl http2;
|
||||
listen ${https_ipv4}:${https_port} ssl;
|
||||
% if ipv6_enabled:
|
||||
listen [::]:${https_port} ssl http2;
|
||||
listen [${https_ipv6}]:${https_port} ssl;
|
||||
% endif
|
||||
include /etc/kvmd/nginx/ssl.conf;
|
||||
include /etc/kvmd/nginx/kvmd.ctx-server.conf;
|
||||
@ -66,9 +66,9 @@ http {
|
||||
% else:
|
||||
|
||||
server {
|
||||
listen ${http_port};
|
||||
listen ${http_ipv4}:${http_port};
|
||||
% if ipv6_enabled:
|
||||
listen [::]:${http_port};
|
||||
listen [${http_ipv6}]:${http_port};
|
||||
% endif
|
||||
include /etc/kvmd/nginx/certbot.ctx-server.conf;
|
||||
include /etc/kvmd/nginx/kvmd.ctx-server.conf;
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
initramfs initramfs-linux.img followkernel
|
||||
|
||||
hdmi_force_hotplug=1
|
||||
gpu_mem=128
|
||||
gpu_mem=192
|
||||
enable_uart=1
|
||||
dtoverlay=disable-bt
|
||||
|
||||
|
||||
@ -1 +1 @@
|
||||
s/rootwait/rootwait cma=128M/g
|
||||
s/rootwait/rootwait cma=192M/g
|
||||
|
||||
16
configs/os/services/kvmd-localhid.service
Normal file
16
configs/os/services/kvmd-localhid.service
Normal file
@ -0,0 +1,16 @@
|
||||
[Unit]
|
||||
Description=PiKVM - Local HID to KVMD proxy
|
||||
After=kvmd.service systemd-udevd.service
|
||||
|
||||
[Service]
|
||||
User=kvmd-localhid
|
||||
Group=kvmd-localhid
|
||||
Type=simple
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
|
||||
ExecStart=/usr/bin/kvmd-localhid --run
|
||||
TimeoutStopSec=3
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@ -0,0 +1,8 @@
|
||||
# Fix https://github.com/pikvm/pikvm/issues/1514:
|
||||
# Wait for any single network interface, not all configured ones
|
||||
# (Rationale: when user configures Wi-Fi via pikvm.txt or otherwise,
|
||||
# we do not delete the Ethernet config, which means it will remain active
|
||||
# regardless of whether the user ever intended to use Ethernet.)
|
||||
[Service]
|
||||
ExecStart=
|
||||
ExecStart=/usr/lib/systemd/systemd-networkd-wait-online --any
|
||||
@ -1,8 +1,10 @@
|
||||
g kvmd - -
|
||||
g kvmd-selfauth - -
|
||||
g kvmd-media - -
|
||||
g kvmd-pst - -
|
||||
g kvmd-ipmi - -
|
||||
g kvmd-vnc - -
|
||||
g kvmd-localhid - -
|
||||
g kvmd-nginx - -
|
||||
g kvmd-janus - -
|
||||
g kvmd-certbot - -
|
||||
@ -12,6 +14,7 @@ u kvmd-media - "PiKVM - The media proxy"
|
||||
u kvmd-pst - "PiKVM - Persistent storage" -
|
||||
u kvmd-ipmi - "PiKVM - IPMI to KVMD proxy" -
|
||||
u kvmd-vnc - "PiKVM - VNC to KVMD/Streamer proxy" -
|
||||
u kvmd-localhid - "PiKVM - Local HID to KVMD proxy" -
|
||||
u kvmd-nginx - "PiKVM - HTTP entrypoint" -
|
||||
u kvmd-janus - "PiKVM - Janus WebRTC Gateway" -
|
||||
u kvmd-certbot - "PiKVM - Certbot-Renew for KVMD-Nginx"
|
||||
@ -29,10 +32,16 @@ m kvmd-media kvmd
|
||||
m kvmd-pst kvmd
|
||||
|
||||
m kvmd-ipmi kvmd
|
||||
m kvmd-ipmi kvmd-selfauth
|
||||
|
||||
m kvmd-vnc kvmd
|
||||
m kvmd-vnc kvmd-selfauth
|
||||
m kvmd-vnc kvmd-certbot
|
||||
|
||||
m kvmd-localhid input
|
||||
m kvmd-localhid kvmd
|
||||
m kvmd-localhid kvmd-selfauth
|
||||
|
||||
m kvmd-janus kvmd
|
||||
m kvmd-janus audio
|
||||
|
||||
|
||||
@ -1,4 +1,15 @@
|
||||
# Here are described some bindings for PiKVM devices.
|
||||
# Do not edit this file.
|
||||
KERNEL=="ttyACM[0-9]*", SUBSYSTEM=="tty", SUBSYSTEMS=="usb", ATTRS{idVendor}=="1209", ATTRS{idProduct}=="eda3", SYMLINK+="kvmd-hid-bridge"
|
||||
KERNEL=="ttyACM[0-9]*", SUBSYSTEM=="tty", SUBSYSTEMS=="usb", ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="1080", SYMLINK+="kvmd-switch"
|
||||
|
||||
ACTION!="remove", KERNEL=="ttyACM[0-9]*", SUBSYSTEM=="tty", SUBSYSTEMS=="usb", ATTRS{idVendor}=="1209", ATTRS{idProduct}=="eda3", SYMLINK+="kvmd-hid-bridge"
|
||||
ACTION!="remove", KERNEL=="ttyACM[0-9]*", SUBSYSTEM=="tty", SUBSYSTEMS=="usb", ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="1080", SYMLINK+="kvmd-switch"
|
||||
|
||||
# Disable USB autosuspend for critical devices
|
||||
ACTION!="remove", SUBSYSTEM=="usb", ATTR{idVendor}=="1209", ATTR{idProduct}=="eda3", GOTO="kvmd-usb"
|
||||
ACTION!="remove", SUBSYSTEM=="usb", ATTR{idVendor}=="2e8a", ATTR{idProduct}=="1080", GOTO="kvmd-usb"
|
||||
GOTO="end"
|
||||
|
||||
LABEL="kvmd-usb"
|
||||
ATTR{power/control}="on", ATTR{power/autosuspend_delay_ms}="-1"
|
||||
|
||||
LABEL="end"
|
||||
|
||||
1663
contrib/keymaps/en-us-colemak
Normal file
1663
contrib/keymaps/en-us-colemak
Normal file
File diff suppressed because it is too large
Load Diff
@ -49,13 +49,15 @@ oneeighth 0x03 shift altgr
|
||||
quotedbl 0x04
|
||||
3 0x04 shift
|
||||
numbersign 0x04 altgr
|
||||
sterling 0x04 shift altgr
|
||||
# KVMD
|
||||
#sterling 0x04 shift altgr
|
||||
|
||||
# evdev 5 (0x5), QKeyCode "4", number 0x5
|
||||
apostrophe 0x05
|
||||
4 0x05 shift
|
||||
braceleft 0x05 altgr
|
||||
dollar 0x05 shift altgr
|
||||
# KVMD
|
||||
#dollar 0x05 shift altgr
|
||||
|
||||
# evdev 6 (0x6), QKeyCode "5", number 0x6
|
||||
parenleft 0x06
|
||||
@ -91,7 +93,8 @@ plusminus 0x0a shift altgr
|
||||
agrave 0x0b
|
||||
0 0x0b shift
|
||||
at 0x0b altgr
|
||||
degree 0x0b shift altgr
|
||||
# KVMD
|
||||
#degree 0x0b shift altgr
|
||||
|
||||
# evdev 12 (0xc), QKeyCode "minus", number 0xc
|
||||
parenright 0x0c
|
||||
@ -122,7 +125,8 @@ AE 0x10 shift altgr
|
||||
z 0x11
|
||||
Z 0x11 shift
|
||||
guillemotleft 0x11 altgr
|
||||
less 0x11 shift altgr
|
||||
#KVMD
|
||||
#less 0x11 shift altgr
|
||||
|
||||
# evdev 18 (0x12), QKeyCode "e", number 0x12
|
||||
e 0x12
|
||||
@ -200,7 +204,8 @@ Greek_OMEGA 0x1e shift altgr
|
||||
s 0x1f
|
||||
S 0x1f shift
|
||||
ssharp 0x1f altgr
|
||||
section 0x1f shift altgr
|
||||
# KVMD
|
||||
#section 0x1f shift altgr
|
||||
|
||||
# evdev 32 (0x20), QKeyCode "d", number 0x20
|
||||
d 0x20
|
||||
@ -247,7 +252,8 @@ Lstroke 0x26 shift altgr
|
||||
# evdev 39 (0x27), QKeyCode "semicolon", number 0x27
|
||||
m 0x27
|
||||
M 0x27 shift
|
||||
mu 0x27 altgr
|
||||
# KVMD
|
||||
#mu 0x27 altgr
|
||||
masculine 0x27 shift altgr
|
||||
|
||||
# evdev 40 (0x28), QKeyCode "apostrophe", number 0x28
|
||||
@ -280,7 +286,8 @@ Lstroke 0x2c shift altgr
|
||||
x 0x2d
|
||||
X 0x2d shift
|
||||
guillemotright 0x2d altgr
|
||||
greater 0x2d shift altgr
|
||||
# KVMD
|
||||
#greater 0x2d shift altgr
|
||||
|
||||
# evdev 46 (0x2e), QKeyCode "c", number 0x2e
|
||||
c 0x2e
|
||||
|
||||
@ -69,9 +69,10 @@ class _X11Key:
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _KeyMapping:
|
||||
web_name: str
|
||||
evdev_name: str
|
||||
mcu_code: int
|
||||
usb_key: _UsbKey
|
||||
ps2_key: _Ps2Key
|
||||
ps2_key: (_Ps2Key | None)
|
||||
at1_code: int
|
||||
x11_keys: set[_X11Key]
|
||||
|
||||
@ -107,7 +108,9 @@ def _parse_usb_key(key: str) -> _UsbKey:
|
||||
return _UsbKey(code, is_modifier)
|
||||
|
||||
|
||||
def _parse_ps2_key(key: str) -> _Ps2Key:
|
||||
def _parse_ps2_key(key: str) -> (_Ps2Key | None):
|
||||
if ":" not in key:
|
||||
return None
|
||||
(code_type, raw_code) = key.split(":")
|
||||
return _Ps2Key(
|
||||
code=int(raw_code, 16),
|
||||
@ -122,6 +125,7 @@ def _read_keymap_csv(path: str) -> list[_KeyMapping]:
|
||||
if len(row) >= 6:
|
||||
keymap.append(_KeyMapping(
|
||||
web_name=row["web_name"],
|
||||
evdev_name=row["evdev_name"],
|
||||
mcu_code=int(row["mcu_code"]),
|
||||
usb_key=_parse_usb_key(row["usb_key"]),
|
||||
ps2_key=_parse_ps2_key(row["ps2_key"]),
|
||||
@ -150,6 +154,7 @@ def main() -> None:
|
||||
|
||||
# Fields list:
|
||||
# - Web
|
||||
# - Linux/evdev
|
||||
# - MCU code
|
||||
# - USB code (^ for the modifier mask)
|
||||
# - PS/2 key
|
||||
|
||||
@ -24,8 +24,8 @@ upload:
|
||||
bash -ex -c " \
|
||||
current=`cat .current`; \
|
||||
if [ '$($@_CURRENT)' == 'spi' ] || [ '$($@_CURRENT)' == 'aum' ]; then \
|
||||
gpioset 0 25=1; \
|
||||
gpioset 0 25=0; \
|
||||
gpioset -c gpiochip0 -t 30ms,0 25=1; \
|
||||
gpioset -c gpiochip0 -t 30ms,0 25=0; \
|
||||
fi \
|
||||
"
|
||||
platformio run --environment '$($@_CURRENT)' --project-conf 'platformio-$($@_CONFIG).ini' --target upload
|
||||
|
||||
@ -2,6 +2,7 @@ programmer
|
||||
id = "rpi";
|
||||
desc = "RPi SPI programmer";
|
||||
type = "linuxspi";
|
||||
prog_modes = PM_ISP;
|
||||
reset = 25;
|
||||
baudrate = 400000;
|
||||
;
|
||||
|
||||
@ -148,5 +148,8 @@ void keymapPs2(uint8_t code, Ps2KeyType *ps2_type, uint8_t *ps2_code) {
|
||||
case 109: *ps2_type = PS2_KEY_TYPE_REG; *ps2_code = 19; return; // KanaMode
|
||||
case 110: *ps2_type = PS2_KEY_TYPE_REG; *ps2_code = 100; return; // Convert
|
||||
case 111: *ps2_type = PS2_KEY_TYPE_REG; *ps2_code = 103; return; // NonConvert
|
||||
case 112: *ps2_type = PS2_KEY_TYPE_SPEC; *ps2_code = 35; return; // AudioVolumeMute
|
||||
case 113: *ps2_type = PS2_KEY_TYPE_SPEC; *ps2_code = 50; return; // AudioVolumeUp
|
||||
case 114: *ps2_type = PS2_KEY_TYPE_SPEC; *ps2_code = 33; return; // AudioVolumeDown
|
||||
}
|
||||
}
|
||||
|
||||
@ -38,7 +38,9 @@ void keymapPs2(uint8_t code, Ps2KeyType *ps2_type, uint8_t *ps2_code) {
|
||||
|
||||
switch (code) {
|
||||
% for km in sorted(keymap, key=operator.attrgetter("mcu_code")):
|
||||
% if km.ps2_key is not None:
|
||||
case ${km.mcu_code}: *ps2_type = PS2_KEY_TYPE_${km.ps2_key.type.upper()}; *ps2_code = ${km.ps2_key.code}; return; // ${km.web_name}
|
||||
% endif
|
||||
% endfor
|
||||
}
|
||||
}
|
||||
|
||||
@ -136,6 +136,10 @@ uint8_t keymapUsb(uint8_t code) {
|
||||
case 109: return 136; // KanaMode
|
||||
case 110: return 138; // Convert
|
||||
case 111: return 139; // NonConvert
|
||||
case 112: return 127; // AudioVolumeMute
|
||||
case 113: return 128; // AudioVolumeUp
|
||||
case 114: return 129; // AudioVolumeDown
|
||||
case 115: return 111; // F20
|
||||
default: return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@ -82,8 +82,6 @@ build_flags =
|
||||
-DCDC_DISABLED
|
||||
upload_protocol = custom
|
||||
upload_flags =
|
||||
-C
|
||||
$PROJECT_PACKAGES_DIR/tool-avrdude/avrdude.conf
|
||||
-C
|
||||
+avrdude-rpi.conf
|
||||
-P
|
||||
|
||||
@ -28,11 +28,14 @@ define libdep
|
||||
endef
|
||||
.pico-sdk:
|
||||
$(call libdep,pico-sdk,raspberrypi/pico-sdk,6a7db34ff63345a7badec79ebea3aaef1712f374)
|
||||
.pico-sdk.patches: .pico-sdk
|
||||
patch -d .pico-sdk -p1 < patches/pico-sdk.patch
|
||||
touch .pico-sdk.patches
|
||||
.tinyusb:
|
||||
$(call libdep,tinyusb,hathach/tinyusb,d713571cd44f05d2fc72efc09c670787b74106e0)
|
||||
.ps2x2pico:
|
||||
$(call libdep,ps2x2pico,No0ne/ps2x2pico,26ce89d597e598bb0ac636622e064202d91a9efc)
|
||||
deps: .pico-sdk .tinyusb .ps2x2pico
|
||||
deps: .pico-sdk .pico-sdk.patches .tinyusb .ps2x2pico
|
||||
|
||||
|
||||
.PHONY: deps
|
||||
|
||||
10
hid/pico/patches/pico-sdk.patch
Normal file
10
hid/pico/patches/pico-sdk.patch
Normal file
@ -0,0 +1,10 @@
|
||||
diff --git a/tools/pioasm/CMakeLists.txt b/tools/pioasm/CMakeLists.txt
|
||||
index 322408a..fc8e4b8 100644
|
||||
--- a/tools/pioasm/CMakeLists.txt
|
||||
+++ b/tools/pioasm/CMakeLists.txt
|
||||
@@ -1,4 +1,4 @@
|
||||
-cmake_minimum_required(VERSION 3.4)
|
||||
+cmake_minimum_required(VERSION 3.5)
|
||||
project(pioasm CXX)
|
||||
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
@ -138,6 +138,10 @@ inline u8 ph_usb_keymap(u8 key) {
|
||||
case 109: return 136; // KanaMode
|
||||
case 110: return 138; // Convert
|
||||
case 111: return 139; // NonConvert
|
||||
case 112: return 127; // AudioVolumeMute
|
||||
case 113: return 128; // AudioVolumeUp
|
||||
case 114: return 129; // AudioVolumeDown
|
||||
case 115: return 111; // F20
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
228
keymap.csv
228
keymap.csv
@ -1,112 +1,116 @@
|
||||
web_name,mcu_code,usb_key,ps2_key,at1_code,x11_names
|
||||
KeyA,1,0x04,reg:0x1c,0x1e,"^XK_A,XK_a"
|
||||
KeyB,2,0x05,reg:0x32,0x30,"^XK_B,XK_b"
|
||||
KeyC,3,0x06,reg:0x21,0x2e,"^XK_C,XK_c"
|
||||
KeyD,4,0x07,reg:0x23,0x20,"^XK_D,XK_d"
|
||||
KeyE,5,0x08,reg:0x24,0x12,"^XK_E,XK_e"
|
||||
KeyF,6,0x09,reg:0x2b,0x21,"^XK_F,XK_f"
|
||||
KeyG,7,0x0a,reg:0x34,0x22,"^XK_G,XK_g"
|
||||
KeyH,8,0x0b,reg:0x33,0x23,"^XK_H,XK_h"
|
||||
KeyI,9,0x0c,reg:0x43,0x17,"^XK_I,XK_i"
|
||||
KeyJ,10,0x0d,reg:0x3b,0x24,"^XK_J,XK_j"
|
||||
KeyK,11,0x0e,reg:0x42,0x25,"^XK_K,XK_k"
|
||||
KeyL,12,0x0f,reg:0x4b,0x26,"^XK_L,XK_l"
|
||||
KeyM,13,0x10,reg:0x3a,0x32,"^XK_M,XK_m"
|
||||
KeyN,14,0x11,reg:0x31,0x31,"^XK_N,XK_n"
|
||||
KeyO,15,0x12,reg:0x44,0x18,"^XK_O,XK_o"
|
||||
KeyP,16,0x13,reg:0x4d,0x19,"^XK_P,XK_p"
|
||||
KeyQ,17,0x14,reg:0x15,0x10,"^XK_Q,XK_q"
|
||||
KeyR,18,0x15,reg:0x2d,0x13,"^XK_R,XK_r"
|
||||
KeyS,19,0x16,reg:0x1b,0x1f,"^XK_S,XK_s"
|
||||
KeyT,20,0x17,reg:0x2c,0x14,"^XK_T,XK_t"
|
||||
KeyU,21,0x18,reg:0x3c,0x16,"^XK_U,XK_u"
|
||||
KeyV,22,0x19,reg:0x2a,0x2f,"^XK_V,XK_v"
|
||||
KeyW,23,0x1a,reg:0x1d,0x11,"^XK_W,XK_w"
|
||||
KeyX,24,0x1b,reg:0x22,0x2d,"^XK_X,XK_x"
|
||||
KeyY,25,0x1c,reg:0x35,0x15,"^XK_Y,XK_y"
|
||||
KeyZ,26,0x1d,reg:0x1a,0x2c,"^XK_Z,XK_z"
|
||||
Digit1,27,0x1e,reg:0x16,0x02,"XK_1,^XK_exclam"
|
||||
Digit2,28,0x1f,reg:0x1e,0x03,"XK_2,^XK_at"
|
||||
Digit3,29,0x20,reg:0x26,0x04,"XK_3,^XK_numbersign"
|
||||
Digit4,30,0x21,reg:0x25,0x05,"XK_4,^XK_dollar"
|
||||
Digit5,31,0x22,reg:0x2e,0x06,"XK_5,^XK_percent"
|
||||
Digit6,32,0x23,reg:0x36,0x07,"XK_6,^XK_asciicircum"
|
||||
Digit7,33,0x24,reg:0x3d,0x08,"XK_7,^XK_ampersand"
|
||||
Digit8,34,0x25,reg:0x3e,0x09,"XK_8,^XK_asterisk"
|
||||
Digit9,35,0x26,reg:0x46,0x0a,"XK_9,^XK_parenleft"
|
||||
Digit0,36,0x27,reg:0x45,0x0b,"XK_0,^XK_parenright"
|
||||
Enter,37,0x28,reg:0x5a,0x1c,XK_Return
|
||||
Escape,38,0x29,reg:0x76,0x01,XK_Escape
|
||||
Backspace,39,0x2a,reg:0x66,0x0e,XK_BackSpace
|
||||
Tab,40,0x2b,reg:0x0d,0x0f,XK_Tab
|
||||
Space,41,0x2c,reg:0x29,0x39,XK_space
|
||||
Minus,42,0x2d,reg:0x4e,0x0c,"XK_minus,^XK_underscore"
|
||||
Equal,43,0x2e,reg:0x55,0x0d,"XK_equal,^XK_plus"
|
||||
BracketLeft,44,0x2f,reg:0x54,0x1a,"XK_bracketleft,^XK_braceleft"
|
||||
BracketRight,45,0x30,reg:0x5b,0x1b,"XK_bracketright,^XK_braceright"
|
||||
Backslash,46,0x31,reg:0x5d,0x2b,"XK_backslash,^XK_bar"
|
||||
Semicolon,47,0x33,reg:0x4c,0x27,"XK_semicolon,^XK_colon"
|
||||
Quote,48,0x34,reg:0x52,0x28,"XK_apostrophe,^XK_quotedbl"
|
||||
Backquote,49,0x35,reg:0x0e,0x29,"XK_grave,^XK_asciitilde"
|
||||
Comma,50,0x36,reg:0x41,0x33,"XK_comma,^XK_less"
|
||||
Period,51,0x37,reg:0x49,0x34,"XK_period,^XK_greater"
|
||||
Slash,52,0x38,reg:0x4a,0x35,"XK_slash,^XK_question"
|
||||
CapsLock,53,0x39,reg:0x58,0x3a,XK_Caps_Lock
|
||||
F1,54,0x3a,reg:0x05,0x3b,XK_F1
|
||||
F2,55,0x3b,reg:0x06,0x3c,XK_F2
|
||||
F3,56,0x3c,reg:0x04,0x3d,XK_F3
|
||||
F4,57,0x3d,reg:0x0c,0x3e,XK_F4
|
||||
F5,58,0x3e,reg:0x03,0x3f,XK_F5
|
||||
F6,59,0x3f,reg:0x0b,0x40,XK_F6
|
||||
F7,60,0x40,reg:0x83,0x41,XK_F7
|
||||
F8,61,0x41,reg:0x0a,0x42,XK_F8
|
||||
F9,62,0x42,reg:0x01,0x43,XK_F9
|
||||
F10,63,0x43,reg:0x09,0x44,XK_F10
|
||||
F11,64,0x44,reg:0x78,0x57,XK_F11
|
||||
F12,65,0x45,reg:0x07,0x58,XK_F12
|
||||
PrintScreen,66,0x46,print:0xff,0x54,XK_Sys_Req
|
||||
Insert,67,0x49,spec:0x70,0xe052,XK_Insert
|
||||
Home,68,0x4a,spec:0x6c,0xe047,XK_Home
|
||||
PageUp,69,0x4b,spec:0x7d,0xe049,XK_Page_Up
|
||||
Delete,70,0x4c,spec:0x71,0xe053,XK_Delete
|
||||
End,71,0x4d,spec:0x69,0xe04f,XK_End
|
||||
PageDown,72,0x4e,spec:0x7a,0xe051,XK_Page_Down
|
||||
ArrowRight,73,0x4f,spec:0x74,0xe04d,XK_Right
|
||||
ArrowLeft,74,0x50,spec:0x6b,0xe04b,XK_Left
|
||||
ArrowDown,75,0x51,spec:0x72,0xe050,XK_Down
|
||||
ArrowUp,76,0x52,spec:0x75,0xe048,XK_Up
|
||||
ControlLeft,77,^0x01,reg:0x14,0x1d,XK_Control_L
|
||||
ShiftLeft,78,^0x02,reg:0x12,0x2a,XK_Shift_L
|
||||
AltLeft,79,^0x04,reg:0x11,0x38,XK_Alt_L
|
||||
MetaLeft,80,^0x08,spec:0x1f,0xe05b,"XK_Meta_L,XK_Super_L"
|
||||
ControlRight,81,^0x10,spec:0x14,0xe01d,XK_Control_R
|
||||
ShiftRight,82,^0x20,reg:0x59,0x36,XK_Shift_R
|
||||
AltRight,83,^0x40,spec:0x11,0xe038,"XK_Alt_R,XK_ISO_Level3_Shift"
|
||||
MetaRight,84,^0x80,spec:0x27,0xe05c,"XK_Meta_R,XK_Super_R"
|
||||
Pause,85,0x48,pause:0xff,0xe046,XK_Pause
|
||||
ScrollLock,86,0x47,reg:0x7e,0x46,XK_Scroll_Lock
|
||||
NumLock,87,0x53,reg:0x77,0x45,XK_Num_Lock
|
||||
ContextMenu,88,0x65,spec:0x2f,0xe05d,XK_Menu
|
||||
NumpadDivide,89,0x54,spec:0x4a,0xe035,XK_KP_Divide
|
||||
NumpadMultiply,90,0x55,reg:0x7c,0x37,XK_multiply
|
||||
NumpadSubtract,91,0x56,reg:0x7b,0x4a,XK_KP_Subtract
|
||||
NumpadAdd,92,0x57,reg:0x79,0x4e,XK_KP_Add
|
||||
NumpadEnter,93,0x58,spec:0x5a,0xe01c,XK_KP_Enter
|
||||
Numpad1,94,0x59,reg:0x69,0x4f,XK_KP_1
|
||||
Numpad2,95,0x5a,reg:0x72,0x50,XK_KP_2
|
||||
Numpad3,96,0x5b,reg:0x7a,0x51,XK_KP_3
|
||||
Numpad4,97,0x5c,reg:0x6b,0x4b,XK_KP_4
|
||||
Numpad5,98,0x5d,reg:0x73,0x4c,XK_KP_5
|
||||
Numpad6,99,0x5e,reg:0x74,0x4d,XK_KP_6
|
||||
Numpad7,100,0x5f,reg:0x6c,0x47,XK_KP_7
|
||||
Numpad8,101,0x60,reg:0x75,0x48,XK_KP_8
|
||||
Numpad9,102,0x61,reg:0x7d,0x49,XK_KP_9
|
||||
Numpad0,103,0x62,reg:0x70,0x52,XK_KP_0
|
||||
NumpadDecimal,104,0x63,reg:0x71,0x53,XK_KP_Decimal
|
||||
Power,105,0x66,spec:0x5e,0xe05e,XK_XF86_Sleep
|
||||
IntlBackslash,106,0x64,reg:0x61,0x56,""
|
||||
IntlYen,107,0x89,reg:0x6a,0x7d,""
|
||||
IntlRo,108,0x87,reg:0x51,0x73,""
|
||||
KanaMode,109,0x88,reg:0x13,0x70,""
|
||||
Convert,110,0x8a,reg:0x64,0x79,""
|
||||
NonConvert,111,0x8b,reg:0x67,0x7b,""
|
||||
web_name,evdev_name,mcu_code,usb_key,ps2_key,at1_code,x11_names
|
||||
KeyA,KEY_A,1,0x04,reg:0x1c,0x1e,"^XK_A,XK_a"
|
||||
KeyB,KEY_B,2,0x05,reg:0x32,0x30,"^XK_B,XK_b"
|
||||
KeyC,KEY_C,3,0x06,reg:0x21,0x2e,"^XK_C,XK_c"
|
||||
KeyD,KEY_D,4,0x07,reg:0x23,0x20,"^XK_D,XK_d"
|
||||
KeyE,KEY_E,5,0x08,reg:0x24,0x12,"^XK_E,XK_e"
|
||||
KeyF,KEY_F,6,0x09,reg:0x2b,0x21,"^XK_F,XK_f"
|
||||
KeyG,KEY_G,7,0x0a,reg:0x34,0x22,"^XK_G,XK_g"
|
||||
KeyH,KEY_H,8,0x0b,reg:0x33,0x23,"^XK_H,XK_h"
|
||||
KeyI,KEY_I,9,0x0c,reg:0x43,0x17,"^XK_I,XK_i"
|
||||
KeyJ,KEY_J,10,0x0d,reg:0x3b,0x24,"^XK_J,XK_j"
|
||||
KeyK,KEY_K,11,0x0e,reg:0x42,0x25,"^XK_K,XK_k"
|
||||
KeyL,KEY_L,12,0x0f,reg:0x4b,0x26,"^XK_L,XK_l"
|
||||
KeyM,KEY_M,13,0x10,reg:0x3a,0x32,"^XK_M,XK_m"
|
||||
KeyN,KEY_N,14,0x11,reg:0x31,0x31,"^XK_N,XK_n"
|
||||
KeyO,KEY_O,15,0x12,reg:0x44,0x18,"^XK_O,XK_o"
|
||||
KeyP,KEY_P,16,0x13,reg:0x4d,0x19,"^XK_P,XK_p"
|
||||
KeyQ,KEY_Q,17,0x14,reg:0x15,0x10,"^XK_Q,XK_q"
|
||||
KeyR,KEY_R,18,0x15,reg:0x2d,0x13,"^XK_R,XK_r"
|
||||
KeyS,KEY_S,19,0x16,reg:0x1b,0x1f,"^XK_S,XK_s"
|
||||
KeyT,KEY_T,20,0x17,reg:0x2c,0x14,"^XK_T,XK_t"
|
||||
KeyU,KEY_U,21,0x18,reg:0x3c,0x16,"^XK_U,XK_u"
|
||||
KeyV,KEY_V,22,0x19,reg:0x2a,0x2f,"^XK_V,XK_v"
|
||||
KeyW,KEY_W,23,0x1a,reg:0x1d,0x11,"^XK_W,XK_w"
|
||||
KeyX,KEY_X,24,0x1b,reg:0x22,0x2d,"^XK_X,XK_x"
|
||||
KeyY,KEY_Y,25,0x1c,reg:0x35,0x15,"^XK_Y,XK_y"
|
||||
KeyZ,KEY_Z,26,0x1d,reg:0x1a,0x2c,"^XK_Z,XK_z"
|
||||
Digit1,KEY_1,27,0x1e,reg:0x16,0x02,"XK_1,^XK_exclam"
|
||||
Digit2,KEY_2,28,0x1f,reg:0x1e,0x03,"XK_2,^XK_at"
|
||||
Digit3,KEY_3,29,0x20,reg:0x26,0x04,"XK_3,^XK_numbersign"
|
||||
Digit4,KEY_4,30,0x21,reg:0x25,0x05,"XK_4,^XK_dollar"
|
||||
Digit5,KEY_5,31,0x22,reg:0x2e,0x06,"XK_5,^XK_percent"
|
||||
Digit6,KEY_6,32,0x23,reg:0x36,0x07,"XK_6,^XK_asciicircum"
|
||||
Digit7,KEY_7,33,0x24,reg:0x3d,0x08,"XK_7,^XK_ampersand"
|
||||
Digit8,KEY_8,34,0x25,reg:0x3e,0x09,"XK_8,^XK_asterisk"
|
||||
Digit9,KEY_9,35,0x26,reg:0x46,0x0a,"XK_9,^XK_parenleft"
|
||||
Digit0,KEY_0,36,0x27,reg:0x45,0x0b,"XK_0,^XK_parenright"
|
||||
Enter,KEY_ENTER,37,0x28,reg:0x5a,0x1c,XK_Return
|
||||
Escape,KEY_ESC,38,0x29,reg:0x76,0x01,XK_Escape
|
||||
Backspace,KEY_BACKSPACE,39,0x2a,reg:0x66,0x0e,XK_BackSpace
|
||||
Tab,KEY_TAB,40,0x2b,reg:0x0d,0x0f,XK_Tab
|
||||
Space,KEY_SPACE,41,0x2c,reg:0x29,0x39,XK_space
|
||||
Minus,KEY_MINUS,42,0x2d,reg:0x4e,0x0c,"XK_minus,^XK_underscore"
|
||||
Equal,KEY_EQUAL,43,0x2e,reg:0x55,0x0d,"XK_equal,^XK_plus"
|
||||
BracketLeft,KEY_LEFTBRACE,44,0x2f,reg:0x54,0x1a,"XK_bracketleft,^XK_braceleft"
|
||||
BracketRight,KEY_RIGHTBRACE,45,0x30,reg:0x5b,0x1b,"XK_bracketright,^XK_braceright"
|
||||
Backslash,KEY_BACKSLASH,46,0x31,reg:0x5d,0x2b,"XK_backslash,^XK_bar"
|
||||
Semicolon,KEY_SEMICOLON,47,0x33,reg:0x4c,0x27,"XK_semicolon,^XK_colon"
|
||||
Quote,KEY_APOSTROPHE,48,0x34,reg:0x52,0x28,"XK_apostrophe,^XK_quotedbl"
|
||||
Backquote,KEY_GRAVE,49,0x35,reg:0x0e,0x29,"XK_grave,^XK_asciitilde"
|
||||
Comma,KEY_COMMA,50,0x36,reg:0x41,0x33,"XK_comma,^XK_less"
|
||||
Period,KEY_DOT,51,0x37,reg:0x49,0x34,"XK_period,^XK_greater"
|
||||
Slash,KEY_SLASH,52,0x38,reg:0x4a,0x35,"XK_slash,^XK_question"
|
||||
CapsLock,KEY_CAPSLOCK,53,0x39,reg:0x58,0x3a,XK_Caps_Lock
|
||||
F1,KEY_F1,54,0x3a,reg:0x05,0x3b,XK_F1
|
||||
F2,KEY_F2,55,0x3b,reg:0x06,0x3c,XK_F2
|
||||
F3,KEY_F3,56,0x3c,reg:0x04,0x3d,XK_F3
|
||||
F4,KEY_F4,57,0x3d,reg:0x0c,0x3e,XK_F4
|
||||
F5,KEY_F5,58,0x3e,reg:0x03,0x3f,XK_F5
|
||||
F6,KEY_F6,59,0x3f,reg:0x0b,0x40,XK_F6
|
||||
F7,KEY_F7,60,0x40,reg:0x83,0x41,XK_F7
|
||||
F8,KEY_F8,61,0x41,reg:0x0a,0x42,XK_F8
|
||||
F9,KEY_F9,62,0x42,reg:0x01,0x43,XK_F9
|
||||
F10,KEY_F10,63,0x43,reg:0x09,0x44,XK_F10
|
||||
F11,KEY_F11,64,0x44,reg:0x78,0x57,XK_F11
|
||||
F12,KEY_F12,65,0x45,reg:0x07,0x58,XK_F12
|
||||
PrintScreen,KEY_SYSRQ,66,0x46,print:0xff,0x54,XK_Sys_Req
|
||||
Insert,KEY_INSERT,67,0x49,spec:0x70,0xe052,XK_Insert
|
||||
Home,KEY_HOME,68,0x4a,spec:0x6c,0xe047,XK_Home
|
||||
PageUp,KEY_PAGEUP,69,0x4b,spec:0x7d,0xe049,XK_Page_Up
|
||||
Delete,KEY_DELETE,70,0x4c,spec:0x71,0xe053,XK_Delete
|
||||
End,KEY_END,71,0x4d,spec:0x69,0xe04f,XK_End
|
||||
PageDown,KEY_PAGEDOWN,72,0x4e,spec:0x7a,0xe051,XK_Page_Down
|
||||
ArrowRight,KEY_RIGHT,73,0x4f,spec:0x74,0xe04d,XK_Right
|
||||
ArrowLeft,KEY_LEFT,74,0x50,spec:0x6b,0xe04b,XK_Left
|
||||
ArrowDown,KEY_DOWN,75,0x51,spec:0x72,0xe050,XK_Down
|
||||
ArrowUp,KEY_UP,76,0x52,spec:0x75,0xe048,XK_Up
|
||||
ControlLeft,KEY_LEFTCTRL,77,^0x01,reg:0x14,0x1d,XK_Control_L
|
||||
ShiftLeft,KEY_LEFTSHIFT,78,^0x02,reg:0x12,0x2a,XK_Shift_L
|
||||
AltLeft,KEY_LEFTALT,79,^0x04,reg:0x11,0x38,XK_Alt_L
|
||||
MetaLeft,KEY_LEFTMETA,80,^0x08,spec:0x1f,0xe05b,"XK_Meta_L,XK_Super_L"
|
||||
ControlRight,KEY_RIGHTCTRL,81,^0x10,spec:0x14,0xe01d,XK_Control_R
|
||||
ShiftRight,KEY_RIGHTSHIFT,82,^0x20,reg:0x59,0x36,XK_Shift_R
|
||||
AltRight,KEY_RIGHTALT,83,^0x40,spec:0x11,0xe038,"XK_Alt_R,XK_ISO_Level3_Shift"
|
||||
MetaRight,KEY_RIGHTMETA,84,^0x80,spec:0x27,0xe05c,"XK_Meta_R,XK_Super_R"
|
||||
Pause,KEY_PAUSE,85,0x48,pause:0xff,0xe046,XK_Pause
|
||||
ScrollLock,KEY_SCROLLLOCK,86,0x47,reg:0x7e,0x46,XK_Scroll_Lock
|
||||
NumLock,KEY_NUMLOCK,87,0x53,reg:0x77,0x45,XK_Num_Lock
|
||||
ContextMenu,KEY_CONTEXT_MENU,88,0x65,spec:0x2f,0xe05d,XK_Menu
|
||||
NumpadDivide,KEY_KPSLASH,89,0x54,spec:0x4a,0xe035,XK_KP_Divide
|
||||
NumpadMultiply,KEY_KPASTERISK,90,0x55,reg:0x7c,0x37,XK_multiply
|
||||
NumpadSubtract,KEY_KPMINUS,91,0x56,reg:0x7b,0x4a,XK_KP_Subtract
|
||||
NumpadAdd,KEY_KPPLUS,92,0x57,reg:0x79,0x4e,XK_KP_Add
|
||||
NumpadEnter,KEY_KPENTER,93,0x58,spec:0x5a,0xe01c,XK_KP_Enter
|
||||
Numpad1,KEY_KP1,94,0x59,reg:0x69,0x4f,XK_KP_1
|
||||
Numpad2,KEY_KP2,95,0x5a,reg:0x72,0x50,XK_KP_2
|
||||
Numpad3,KEY_KP3,96,0x5b,reg:0x7a,0x51,XK_KP_3
|
||||
Numpad4,KEY_KP4,97,0x5c,reg:0x6b,0x4b,XK_KP_4
|
||||
Numpad5,KEY_KP5,98,0x5d,reg:0x73,0x4c,XK_KP_5
|
||||
Numpad6,KEY_KP6,99,0x5e,reg:0x74,0x4d,XK_KP_6
|
||||
Numpad7,KEY_KP7,100,0x5f,reg:0x6c,0x47,XK_KP_7
|
||||
Numpad8,KEY_KP8,101,0x60,reg:0x75,0x48,XK_KP_8
|
||||
Numpad9,KEY_KP9,102,0x61,reg:0x7d,0x49,XK_KP_9
|
||||
Numpad0,KEY_KP0,103,0x62,reg:0x70,0x52,XK_KP_0
|
||||
NumpadDecimal,KEY_KPDOT,104,0x63,reg:0x71,0x53,XK_KP_Decimal
|
||||
Power,KEY_POWER,105,0x66,spec:0x5e,0xe05e,XK_XF86_Sleep
|
||||
IntlBackslash,KEY_102ND,106,0x64,reg:0x61,0x56,
|
||||
IntlYen,KEY_YEN,107,0x89,reg:0x6a,0x7d,
|
||||
IntlRo,KEY_RO,108,0x87,reg:0x51,0x73,
|
||||
KanaMode,KEY_KATAKANA,109,0x88,reg:0x13,0x70,
|
||||
Convert,KEY_HENKAN,110,0x8a,reg:0x64,0x79,
|
||||
NonConvert,KEY_MUHENKAN,111,0x8b,reg:0x67,0x7b,
|
||||
AudioVolumeMute,KEY_MUTE,112,0x7f,spec:0x23,0xe020,
|
||||
AudioVolumeUp,KEY_VOLUMEUP,113,0x80,spec:0x32,0xe030,
|
||||
AudioVolumeDown,KEY_VOLUMEDOWN,114,0x81,spec:0x21,0xe02e,
|
||||
F20,KEY_F20,115,0x6f,,0x5a,
|
||||
|
||||
|
@ -112,6 +112,13 @@ EOF
|
||||
cp /usr/share/kvmd/configs.default/janus/janus.plugin.ustreamer.jcfg /etc/kvmd/janus || true
|
||||
fi
|
||||
|
||||
if [[ "$(vercmp "$2" 4.60)" -lt 0 ]]; then
|
||||
if grep -q "^dtoverlay=vc4-kms-v3d" /boot/config.txt; then
|
||||
sed -i -e "s/cma=128M/cma=192M/g" /boot/cmdline.txt || true
|
||||
sed -i -e "s/^gpu_mem=128/gpu_mem=192/g" /boot/config.txt || true
|
||||
fi
|
||||
fi
|
||||
|
||||
# Some update deletes /etc/motd, WTF
|
||||
# shellcheck disable=SC2015,SC2166
|
||||
[ ! -f /etc/motd -a -f /etc/motd.pacsave ] && mv /etc/motd.pacsave /etc/motd || true
|
||||
|
||||
@ -20,4 +20,4 @@
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
__version__ = "4.49"
|
||||
__version__ = "4.94"
|
||||
|
||||
@ -23,6 +23,7 @@
|
||||
import asyncio
|
||||
import threading
|
||||
import dataclasses
|
||||
import typing
|
||||
|
||||
import gpiod
|
||||
|
||||
@ -101,10 +102,10 @@ class AioReader: # pylint: disable=too-many-instance-attributes
|
||||
if line_req.wait_edge_events(1):
|
||||
new: dict[int, bool] = {}
|
||||
for event in line_req.read_edge_events():
|
||||
(pin, value) = self.__parse_event(event)
|
||||
new[pin] = value
|
||||
for (pin, value) in new.items():
|
||||
self.__values[pin].set(value)
|
||||
(pin, state) = self.__parse_event(event)
|
||||
new[pin] = state
|
||||
for (pin, state) in new.items():
|
||||
self.__values[pin].set(state)
|
||||
else: # Timeout
|
||||
# XXX: Лимит был актуален для 1.6. Надо проверить, поменялось ли это в 2.x.
|
||||
# Размер буфера ядра - 16 эвентов на линии. При превышении этого числа,
|
||||
@ -114,11 +115,12 @@ class AioReader: # pylint: disable=too-many-instance-attributes
|
||||
self.__values[pin].set(bool(value.value)) # type: ignore
|
||||
|
||||
def __parse_event(self, event: gpiod.EdgeEvent) -> tuple[int, bool]:
|
||||
if event.event_type == event.Type.RISING_EDGE:
|
||||
return (event.line_offset, True)
|
||||
elif event.event_type == event.Type.FALLING_EDGE:
|
||||
return (event.line_offset, False)
|
||||
raise RuntimeError(f"Invalid event {event} type: {event.type}")
|
||||
match event.event_type:
|
||||
case event.Type.RISING_EDGE:
|
||||
return (event.line_offset, True)
|
||||
case event.Type.FALLING_EDGE:
|
||||
return (event.line_offset, False)
|
||||
typing.assert_never(event.event_type)
|
||||
|
||||
|
||||
class _DebouncedValue:
|
||||
|
||||
@ -211,6 +211,18 @@ async def wait_first(*aws: asyncio.Task) -> tuple[set[asyncio.Task], set[asyncio
|
||||
return (await asyncio.wait(list(aws), return_when=asyncio.FIRST_COMPLETED))
|
||||
|
||||
|
||||
# =====
|
||||
async def spawn_and_follow(*coros: Coroutine) -> None:
|
||||
tasks: list[asyncio.Task] = list(map(asyncio.create_task, coros))
|
||||
try:
|
||||
await asyncio.gather(*tasks)
|
||||
except Exception:
|
||||
for task in tasks:
|
||||
task.cancel()
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
raise
|
||||
|
||||
|
||||
# =====
|
||||
async def close_writer(writer: asyncio.StreamWriter) -> bool:
|
||||
closing = writer.is_closing()
|
||||
|
||||
@ -65,6 +65,7 @@ from ..validators.basic import valid_string_list
|
||||
|
||||
from ..validators.auth import valid_user
|
||||
from ..validators.auth import valid_users_list
|
||||
from ..validators.auth import valid_expire
|
||||
|
||||
from ..validators.os import valid_abs_path
|
||||
from ..validators.os import valid_abs_file
|
||||
@ -73,6 +74,7 @@ from ..validators.os import valid_unix_mode
|
||||
from ..validators.os import valid_options
|
||||
from ..validators.os import valid_command
|
||||
|
||||
from ..validators.net import valid_ip
|
||||
from ..validators.net import valid_ip_or_host
|
||||
from ..validators.net import valid_net
|
||||
from ..validators.net import valid_port
|
||||
@ -190,6 +192,14 @@ def _init_config(config_path: str, override_options: list[str], **load_flags: bo
|
||||
|
||||
|
||||
def _patch_raw(raw_config: dict) -> None: # pylint: disable=too-many-branches
|
||||
for (sub, cmd) in [("iface", "ip_cmd"), ("firewall", "iptables_cmd")]:
|
||||
if isinstance(raw_config.get("otgnet"), dict):
|
||||
if isinstance(raw_config["otgnet"].get(sub), dict):
|
||||
if raw_config["otgnet"][sub].get(cmd):
|
||||
raw_config["otgnet"].setdefault("commands", {})
|
||||
raw_config["otgnet"]["commands"][cmd] = raw_config["otgnet"][sub][cmd]
|
||||
del raw_config["otgnet"][sub][cmd]
|
||||
|
||||
if isinstance(raw_config.get("otg"), dict):
|
||||
for (old, new) in [
|
||||
("msd", "msd"),
|
||||
@ -357,6 +367,12 @@ def _get_config_scheme() -> dict:
|
||||
|
||||
"auth": {
|
||||
"enabled": Option(True, type=valid_bool),
|
||||
"expire": Option(0, type=valid_expire),
|
||||
|
||||
"usc": {
|
||||
"users": Option([], type=valid_users_list), # PiKVM username has a same regex as a UNIX username
|
||||
"groups": Option(["kvmd-selfauth"], type=valid_users_list), # groupname has a same regex as a username
|
||||
},
|
||||
|
||||
"internal": {
|
||||
"type": Option("htpasswd"),
|
||||
@ -457,7 +473,7 @@ def _get_config_scheme() -> dict:
|
||||
|
||||
"unix": Option("/run/kvmd/ustreamer.sock", type=valid_abs_path, unpack_as="unix_path"),
|
||||
"timeout": Option(2.0, type=valid_float_f01),
|
||||
"snapshot_timeout": Option(1.0, type=valid_float_f01), # error_delay * 3 + 1
|
||||
"snapshot_timeout": Option(5.0, type=valid_float_f01), # error_delay * 3 + 1
|
||||
|
||||
"process_name_prefix": Option("kvmd/streamer"),
|
||||
|
||||
@ -504,8 +520,9 @@ def _get_config_scheme() -> dict:
|
||||
},
|
||||
|
||||
"switch": {
|
||||
"device": Option("/dev/kvmd-switch", type=valid_abs_path, unpack_as="device_path"),
|
||||
"default_edid": Option("/etc/kvmd/switch-edid.hex", type=valid_abs_path, unpack_as="default_edid_path"),
|
||||
"device": Option("/dev/kvmd-switch", type=valid_abs_path, unpack_as="device_path"),
|
||||
"default_edid": Option("/etc/kvmd/switch-edid.hex", type=valid_abs_path, unpack_as="default_edid_path"),
|
||||
"ignore_hpd_on_top": Option(False, type=valid_bool),
|
||||
},
|
||||
},
|
||||
|
||||
@ -558,15 +575,15 @@ def _get_config_scheme() -> dict:
|
||||
"vendor_id": Option(0x1D6B, type=valid_otg_id), # Linux Foundation
|
||||
"product_id": Option(0x0104, type=valid_otg_id), # Multifunction Composite Gadget
|
||||
"manufacturer": Option("PiKVM", type=valid_stripped_string),
|
||||
"product": Option("Composite KVM Device", type=valid_stripped_string),
|
||||
"product": Option("PiKVM Composite Device", type=valid_stripped_string),
|
||||
"serial": Option("CAFEBABE", type=valid_stripped_string, if_none=None),
|
||||
"config": Option("", type=valid_stripped_string),
|
||||
"device_version": Option(-1, type=functools.partial(valid_number, min=-1, max=0xFFFF)),
|
||||
"usb_version": Option(0x0200, type=valid_otg_id),
|
||||
"max_power": Option(250, type=functools.partial(valid_number, min=50, max=500)),
|
||||
"remote_wakeup": Option(True, type=valid_bool),
|
||||
|
||||
"gadget": Option("kvmd", type=valid_otg_gadget),
|
||||
"config": Option("PiKVM device", type=valid_stripped_string_not_empty),
|
||||
"udc": Option("", type=valid_stripped_string),
|
||||
"endpoints": Option(9, type=valid_int_f0),
|
||||
"init_delay": Option(3.0, type=valid_float_f01),
|
||||
@ -657,8 +674,7 @@ def _get_config_scheme() -> dict:
|
||||
|
||||
"otgnet": {
|
||||
"iface": {
|
||||
"net": Option("172.30.30.0/24", type=functools.partial(valid_net, v6=False)),
|
||||
"ip_cmd": Option(["/usr/bin/ip"], type=valid_command),
|
||||
"net": Option("172.30.30.0/24", type=functools.partial(valid_net, v6=False)),
|
||||
},
|
||||
|
||||
"firewall": {
|
||||
@ -666,10 +682,13 @@ def _get_config_scheme() -> dict:
|
||||
"allow_tcp": Option([], type=valid_ports_list),
|
||||
"allow_udp": Option([67], type=valid_ports_list),
|
||||
"forward_iface": Option("", type=valid_stripped_string),
|
||||
"iptables_cmd": Option(["/usr/sbin/iptables", "--wait=5"], type=valid_command),
|
||||
},
|
||||
|
||||
"commands": {
|
||||
"ip_cmd": Option(["/usr/bin/ip"], type=valid_command),
|
||||
"iptables_cmd": Option(["/usr/sbin/iptables", "--wait=5"], type=valid_command),
|
||||
"sysctl_cmd": Option(["/usr/sbin/sysctl"], type=valid_command),
|
||||
|
||||
"pre_start_cmd": Option(["/bin/true", "pre-start"], type=valid_command),
|
||||
"pre_start_cmd_remove": Option([], type=valid_options),
|
||||
"pre_start_cmd_append": Option([], type=valid_options),
|
||||
@ -734,7 +753,7 @@ def _get_config_scheme() -> dict:
|
||||
"desired_fps": Option(30, type=valid_stream_fps),
|
||||
"mouse_output": Option("usb", type=valid_hid_mouse_output),
|
||||
"keymap": Option("/usr/share/kvmd/keymaps/en-us", type=valid_abs_file),
|
||||
"allow_cut_after": Option(3.0, type=valid_float_f0),
|
||||
"scroll_rate": Option(4, type=functools.partial(valid_number, min=1, max=30)),
|
||||
|
||||
"server": {
|
||||
"host": Option("", type=valid_ip_or_host, if_empty=""),
|
||||
@ -786,8 +805,8 @@ def _get_config_scheme() -> dict:
|
||||
|
||||
"auth": {
|
||||
"vncauth": {
|
||||
"enabled": Option(False, type=valid_bool),
|
||||
"file": Option("/etc/kvmd/vncpasswd", type=valid_abs_file, unpack_as="path"),
|
||||
"enabled": Option(False, type=valid_bool, unpack_as="vncpass_enabled"),
|
||||
"file": Option("/etc/kvmd/vncpasswd", type=valid_abs_file, unpack_as="vncpass_path"),
|
||||
},
|
||||
"vencrypt": {
|
||||
"enabled": Option(True, type=valid_bool, unpack_as="vencrypt_enabled"),
|
||||
@ -795,13 +814,24 @@ def _get_config_scheme() -> dict:
|
||||
},
|
||||
},
|
||||
|
||||
"localhid": {
|
||||
"kvmd": {
|
||||
"unix": Option("/run/kvmd/kvmd.sock", type=valid_abs_path, unpack_as="unix_path"),
|
||||
"timeout": Option(5.0, type=valid_float_f01),
|
||||
},
|
||||
},
|
||||
|
||||
"nginx": {
|
||||
"http": {
|
||||
"port": Option(80, type=valid_port),
|
||||
"ipv4": Option("0.0.0.0", type=functools.partial(valid_ip, v6=False)),
|
||||
"ipv6": Option("::", type=functools.partial(valid_ip, v4=False)),
|
||||
"port": Option(80, type=valid_port),
|
||||
},
|
||||
"https": {
|
||||
"enabled": Option(True, type=valid_bool),
|
||||
"port": Option(443, type=valid_port),
|
||||
"enabled": Option(True, type=valid_bool),
|
||||
"ipv4": Option("0.0.0.0", type=functools.partial(valid_ip, v6=False)),
|
||||
"ipv6": Option("::", type=functools.partial(valid_ip, v4=False)),
|
||||
"port": Option(443, type=valid_port),
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@ -61,6 +61,33 @@ def _print_edid(edid: Edid) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def _find_out2_edid_path() -> str:
|
||||
card = os.path.basename(os.readlink("/dev/dri/by-path/platform-gpu-card"))
|
||||
path = f"/sys/devices/platform/gpu/drm/{card}/{card}-HDMI-A-2"
|
||||
with open(os.path.join(path, "status")) as file:
|
||||
if file.read().startswith("d"):
|
||||
raise SystemExit("No display found")
|
||||
return os.path.join(path, "edid")
|
||||
|
||||
|
||||
def _adopt_out2_ids(dest: Edid) -> None:
|
||||
src = Edid.from_file(_find_out2_edid_path())
|
||||
dest.set_monitor_name(src.get_monitor_name())
|
||||
try:
|
||||
dest.get_monitor_serial()
|
||||
except EdidNoBlockError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
ser = src.get_monitor_serial()
|
||||
except EdidNoBlockError:
|
||||
ser = "{:08X}".format(src.get_serial())
|
||||
dest.set_monitor_serial(ser)
|
||||
dest.set_mfc_id(src.get_mfc_id())
|
||||
dest.set_product_id(src.get_product_id())
|
||||
dest.set_serial(src.get_serial())
|
||||
|
||||
|
||||
# =====
|
||||
def main(argv: (list[str] | None)=None) -> None: # pylint: disable=too-many-branches,too-many-statements
|
||||
# (parent_parser, argv, _) = init(
|
||||
@ -89,6 +116,10 @@ def main(argv: (list[str] | None)=None) -> None: # pylint: disable=too-many-bra
|
||||
help="Import the specified bin/hex EDID to the [--edid] file as a hex text", metavar="<file>")
|
||||
parser.add_argument("--import-preset", choices=presets,
|
||||
help="Restore default EDID or choose the preset", metavar=f"{{ {' | '.join(presets)} }}",)
|
||||
parser.add_argument("--import-display-ids", action="store_true",
|
||||
help="On PiKVM V4, import and adopt IDs from a physical display connected to the OUT2 port")
|
||||
parser.add_argument("--import-display", action="store_true",
|
||||
help="On PiKVM V4, import full EDID from a physical display connected to the OUT2 port")
|
||||
parser.add_argument("--set-audio", type=valid_bool,
|
||||
help="Enable or disable audio", metavar="<yes|no>")
|
||||
parser.add_argument("--set-mfc-id",
|
||||
@ -120,6 +151,9 @@ def main(argv: (list[str] | None)=None) -> None: # pylint: disable=too-many-bra
|
||||
imp = f"_{imp}"
|
||||
options.imp = os.path.join(options.presets_path, f"{imp}.hex")
|
||||
|
||||
if options.import_display:
|
||||
options.imp = _find_out2_edid_path()
|
||||
|
||||
orig_edid_path = options.edid_path
|
||||
if options.imp:
|
||||
options.export_hex = options.edid_path
|
||||
@ -128,6 +162,10 @@ def main(argv: (list[str] | None)=None) -> None: # pylint: disable=too-many-bra
|
||||
edid = Edid.from_file(options.edid_path)
|
||||
changed = False
|
||||
|
||||
if options.import_display_ids:
|
||||
_adopt_out2_ids(edid)
|
||||
changed = True
|
||||
|
||||
for cmd in dir(Edid):
|
||||
if cmd.startswith("set_"):
|
||||
value = getattr(options, cmd)
|
||||
|
||||
@ -30,27 +30,27 @@ import argparse
|
||||
|
||||
from typing import Generator
|
||||
|
||||
import passlib.apache
|
||||
|
||||
from ...yamlconf import Section
|
||||
|
||||
from ...validators import ValidatorError
|
||||
from ...validators.auth import valid_user
|
||||
from ...validators.auth import valid_passwd
|
||||
|
||||
from ...crypto import KvmdHtpasswdFile
|
||||
|
||||
from .. import init
|
||||
|
||||
|
||||
# =====
|
||||
def _get_htpasswd_path(config: Section) -> str:
|
||||
if config.kvmd.auth.internal.type != "htpasswd":
|
||||
raise SystemExit(f"Error: KVMD internal auth not using 'htpasswd'"
|
||||
raise SystemExit(f"Error: KVMD internal auth does not use 'htpasswd'"
|
||||
f" (now configured {config.kvmd.auth.internal.type!r})")
|
||||
return config.kvmd.auth.internal.file
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _get_htpasswd_for_write(config: Section) -> Generator[passlib.apache.HtpasswdFile, None, None]:
|
||||
def _get_htpasswd_for_write(config: Section) -> Generator[KvmdHtpasswdFile, None, None]:
|
||||
path = _get_htpasswd_path(config)
|
||||
(tmp_fd, tmp_path) = tempfile.mkstemp(
|
||||
prefix=f".{os.path.basename(path)}.",
|
||||
@ -65,7 +65,7 @@ def _get_htpasswd_for_write(config: Section) -> Generator[passlib.apache.Htpassw
|
||||
os.fchmod(tmp_fd, st.st_mode)
|
||||
finally:
|
||||
os.close(tmp_fd)
|
||||
htpasswd = passlib.apache.HtpasswdFile(tmp_path)
|
||||
htpasswd = KvmdHtpasswdFile(tmp_path)
|
||||
yield htpasswd
|
||||
htpasswd.save()
|
||||
os.rename(tmp_path, path)
|
||||
@ -96,28 +96,55 @@ def _print_invalidate_tip(prepend_nl: bool) -> None:
|
||||
|
||||
# ====
|
||||
def _cmd_list(config: Section, _: argparse.Namespace) -> None:
|
||||
for user in sorted(passlib.apache.HtpasswdFile(_get_htpasswd_path(config)).users()):
|
||||
for user in sorted(KvmdHtpasswdFile(_get_htpasswd_path(config)).users()):
|
||||
print(user)
|
||||
|
||||
|
||||
def _cmd_set(config: Section, options: argparse.Namespace) -> None:
|
||||
def _change_user(config: Section, options: argparse.Namespace, create: bool) -> None:
|
||||
with _get_htpasswd_for_write(config) as htpasswd:
|
||||
assert options.user == options.user.strip()
|
||||
assert options.user
|
||||
|
||||
has_user = (options.user in htpasswd.users())
|
||||
if create:
|
||||
if has_user:
|
||||
raise SystemExit(f"The user {options.user!r} is already exists")
|
||||
else:
|
||||
if not has_user:
|
||||
raise SystemExit(f"The user {options.user!r} is not exist")
|
||||
|
||||
if options.read_stdin:
|
||||
passwd = valid_passwd(input())
|
||||
else:
|
||||
passwd = valid_passwd(getpass.getpass("Password: ", stream=sys.stderr))
|
||||
if valid_passwd(getpass.getpass("Repeat: ", stream=sys.stderr)) != passwd:
|
||||
raise SystemExit("Sorry, passwords do not match")
|
||||
|
||||
htpasswd.set_password(options.user, passwd)
|
||||
|
||||
if has_user and not options.quiet:
|
||||
_print_invalidate_tip(True)
|
||||
|
||||
|
||||
def _cmd_add(config: Section, options: argparse.Namespace) -> None:
|
||||
_change_user(config, options, create=True)
|
||||
|
||||
|
||||
def _cmd_set(config: Section, options: argparse.Namespace) -> None:
|
||||
_change_user(config, options, create=False)
|
||||
|
||||
|
||||
def _cmd_delete(config: Section, options: argparse.Namespace) -> None:
|
||||
with _get_htpasswd_for_write(config) as htpasswd:
|
||||
assert options.user == options.user.strip()
|
||||
assert options.user
|
||||
|
||||
has_user = (options.user in htpasswd.users())
|
||||
if not has_user:
|
||||
raise SystemExit(f"The user {options.user!r} is not exist")
|
||||
|
||||
htpasswd.delete(options.user)
|
||||
|
||||
if has_user and not options.quiet:
|
||||
_print_invalidate_tip(False)
|
||||
|
||||
@ -138,19 +165,25 @@ def main(argv: (list[str] | None)=None) -> None:
|
||||
parser.set_defaults(cmd=(lambda *_: parser.print_help()))
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
cmd_list_parser = subparsers.add_parser("list", help="List users")
|
||||
cmd_list_parser.set_defaults(cmd=_cmd_list)
|
||||
sub = subparsers.add_parser("list", help="List users")
|
||||
sub.set_defaults(cmd=_cmd_list)
|
||||
|
||||
cmd_set_parser = subparsers.add_parser("set", help="Create user or change password")
|
||||
cmd_set_parser.add_argument("user", type=valid_user)
|
||||
cmd_set_parser.add_argument("-i", "--read-stdin", action="store_true", help="Read password from stdin")
|
||||
cmd_set_parser.add_argument("-q", "--quiet", action="store_true", help="Don't show invalidation note")
|
||||
cmd_set_parser.set_defaults(cmd=_cmd_set)
|
||||
sub = subparsers.add_parser("add", help="Add user")
|
||||
sub.add_argument("user", type=valid_user)
|
||||
sub.add_argument("-i", "--read-stdin", action="store_true", help="Read password from stdin")
|
||||
sub.add_argument("-q", "--quiet", action="store_true", help="Don't show invalidation note")
|
||||
sub.set_defaults(cmd=_cmd_add)
|
||||
|
||||
cmd_delete_parser = subparsers.add_parser("del", help="Delete user")
|
||||
cmd_delete_parser.add_argument("user", type=valid_user)
|
||||
cmd_delete_parser.add_argument("-q", "--quiet", action="store_true", help="Don't show invalidation note")
|
||||
cmd_delete_parser.set_defaults(cmd=_cmd_delete)
|
||||
sub = subparsers.add_parser("set", help="Change user's password")
|
||||
sub.add_argument("user", type=valid_user)
|
||||
sub.add_argument("-i", "--read-stdin", action="store_true", help="Read password from stdin")
|
||||
sub.add_argument("-q", "--quiet", action="store_true", help="Don't show invalidation note")
|
||||
sub.set_defaults(cmd=_cmd_set)
|
||||
|
||||
sub = subparsers.add_parser("del", help="Delete user")
|
||||
sub.add_argument("user", type=valid_user)
|
||||
sub.add_argument("-q", "--quiet", action="store_true", help="Don't show invalidation note")
|
||||
sub.set_defaults(cmd=_cmd_delete)
|
||||
|
||||
options = parser.parse_args(argv[1:])
|
||||
try:
|
||||
|
||||
@ -20,7 +20,13 @@
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import dataclasses
|
||||
import threading
|
||||
import functools
|
||||
import time
|
||||
|
||||
from ...logging import get_logger
|
||||
|
||||
from ... import tools
|
||||
|
||||
|
||||
# =====
|
||||
@ -29,60 +35,42 @@ class IpmiPasswdError(Exception):
|
||||
super().__init__(f"Syntax error at {path}:{lineno}: {msg}")
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class IpmiUserCredentials:
|
||||
ipmi_user: str
|
||||
ipmi_passwd: str
|
||||
kvmd_user: str
|
||||
kvmd_passwd: str
|
||||
|
||||
|
||||
class IpmiAuthManager:
|
||||
def __init__(self, path: str) -> None:
|
||||
self.__path = path
|
||||
with open(path) as file:
|
||||
self.__credentials = self.__parse_passwd_file(file.read().split("\n"))
|
||||
self.__lock = threading.Lock()
|
||||
|
||||
def __contains__(self, ipmi_user: str) -> bool:
|
||||
return (ipmi_user in self.__credentials)
|
||||
def get(self, user: str) -> (str | None):
|
||||
creds = self.__get_credentials(int(time.time()))
|
||||
return creds.get(user)
|
||||
|
||||
def __getitem__(self, ipmi_user: str) -> str:
|
||||
return self.__credentials[ipmi_user].ipmi_passwd
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def __get_credentials(self, ts: int) -> dict[str, str]:
|
||||
_ = ts
|
||||
with self.__lock:
|
||||
try:
|
||||
return self.__read_credentials()
|
||||
except Exception as ex:
|
||||
get_logger().error("%s", tools.efmt(ex))
|
||||
return {}
|
||||
|
||||
def get_credentials(self, ipmi_user: str) -> IpmiUserCredentials:
|
||||
return self.__credentials[ipmi_user]
|
||||
def __read_credentials(self) -> dict[str, str]:
|
||||
with open(self.__path) as file:
|
||||
creds: dict[str, str] = {}
|
||||
for (lineno, line) in tools.passwds_splitted(file.read()):
|
||||
if " -> " in line: # Compatibility with old ipmipasswd file format
|
||||
line = line.split(" -> ", 1)[0]
|
||||
|
||||
def __parse_passwd_file(self, lines: list[str]) -> dict[str, IpmiUserCredentials]:
|
||||
credentials: dict[str, IpmiUserCredentials] = {}
|
||||
for (lineno, line) in enumerate(lines):
|
||||
if len(line.strip()) == 0 or line.lstrip().startswith("#"):
|
||||
continue
|
||||
if ":" not in line:
|
||||
raise IpmiPasswdError(self.__path, lineno, "Missing ':' operator")
|
||||
|
||||
if " -> " not in line:
|
||||
raise IpmiPasswdError(self.__path, lineno, "Missing ' -> ' operator")
|
||||
(user, passwd) = line.split(":", 1)
|
||||
user = user.strip()
|
||||
if len(user) == 0:
|
||||
raise IpmiPasswdError(self.__path, lineno, "Empty IPMI user")
|
||||
|
||||
(left, right) = map(str.lstrip, line.split(" -> ", 1))
|
||||
for (name, pair) in [("left", left), ("right", right)]:
|
||||
if ":" not in pair:
|
||||
raise IpmiPasswdError(self.__path, lineno, f"Missing ':' operator in {name} credentials")
|
||||
if user in creds:
|
||||
raise IpmiPasswdError(self.__path, lineno, f"Found duplicating user {user!r}")
|
||||
|
||||
(ipmi_user, ipmi_passwd) = left.split(":")
|
||||
ipmi_user = ipmi_user.strip()
|
||||
if len(ipmi_user) == 0:
|
||||
raise IpmiPasswdError(self.__path, lineno, "Empty IPMI user (left)")
|
||||
|
||||
(kvmd_user, kvmd_passwd) = right.split(":")
|
||||
kvmd_user = kvmd_user.strip()
|
||||
if len(kvmd_user) == 0:
|
||||
raise IpmiPasswdError(self.__path, lineno, "Empty KVMD user (left)")
|
||||
|
||||
if ipmi_user in credentials:
|
||||
raise IpmiPasswdError(self.__path, lineno, f"Found duplicating user {ipmi_user!r} (left)")
|
||||
|
||||
credentials[ipmi_user] = IpmiUserCredentials(
|
||||
ipmi_user=ipmi_user,
|
||||
ipmi_passwd=ipmi_passwd,
|
||||
kvmd_user=kvmd_user,
|
||||
kvmd_passwd=kvmd_passwd,
|
||||
)
|
||||
return credentials
|
||||
creds[user] = passwd
|
||||
return creds
|
||||
|
||||
@ -70,7 +70,6 @@ class IpmiServer(BaseIpmiServer): # pylint: disable=too-many-instance-attribute
|
||||
|
||||
super().__init__(authdata=auth_manager, address=host, port=port)
|
||||
|
||||
self.__auth_manager = auth_manager
|
||||
self.__kvmd = kvmd
|
||||
|
||||
self.__host = host
|
||||
@ -165,11 +164,10 @@ class IpmiServer(BaseIpmiServer): # pylint: disable=too-many-instance-attribute
|
||||
def __make_request(self, session: IpmiServerSession, name: str, func_path: str, **kwargs): # type: ignore
|
||||
async def runner(): # type: ignore
|
||||
logger = get_logger(0)
|
||||
credentials = self.__auth_manager.get_credentials(session.username.decode())
|
||||
logger.info("[%s]: Performing request %s from user %r (IPMI) as %r (KVMD)",
|
||||
session.sockaddr[0], name, credentials.ipmi_user, credentials.kvmd_user)
|
||||
logger.info("[%s]: Performing request %s from IPMI user %r ...",
|
||||
session.sockaddr[0], name, session.username.decode())
|
||||
try:
|
||||
async with self.__kvmd.make_session(credentials.kvmd_user, credentials.kvmd_passwd) as kvmd_session:
|
||||
async with self.__kvmd.make_session() as kvmd_session:
|
||||
func = functools.reduce(getattr, func_path.split("."), kvmd_session)
|
||||
return (await func(**kwargs))
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as ex:
|
||||
|
||||
@ -21,6 +21,7 @@ class _Netcfg:
|
||||
nat_type: StunNatType = dataclasses.field(default=StunNatType.ERROR)
|
||||
src_ip: str = dataclasses.field(default="")
|
||||
ext_ip: str = dataclasses.field(default="")
|
||||
stun_host: str = dataclasses.field(default="")
|
||||
stun_ip: str = dataclasses.field(default="")
|
||||
stun_port: int = dataclasses.field(default=0)
|
||||
|
||||
@ -172,7 +173,10 @@ class JanusRunner: # pylint: disable=too-many-instance-attributes
|
||||
part.format(**placeholders)
|
||||
for part in cmd
|
||||
]
|
||||
self.__janus_proc = await aioproc.run_process(cmd)
|
||||
self.__janus_proc = await aioproc.run_process(
|
||||
cmd=cmd,
|
||||
env={"JANUS_USTREAMER_WEB_ICE_URL": f"stun:{netcfg.stun_host}:{netcfg.stun_port}"},
|
||||
)
|
||||
get_logger(0).info("Started Janus pid=%d: %s", self.__janus_proc.pid, tools.cmdfmt(cmd))
|
||||
|
||||
async def __kill_janus_proc(self) -> None:
|
||||
|
||||
@ -30,6 +30,7 @@ class StunInfo:
|
||||
nat_type: StunNatType
|
||||
src_ip: str
|
||||
ext_ip: str
|
||||
stun_host: str
|
||||
stun_ip: str
|
||||
stun_port: int
|
||||
|
||||
@ -102,6 +103,7 @@ class Stun:
|
||||
nat_type=nat_type,
|
||||
src_ip=src_ip,
|
||||
ext_ip=ext_ip,
|
||||
stun_host=self.__host,
|
||||
stun_ip=self.__stun_ip,
|
||||
stun_port=self.__port,
|
||||
)
|
||||
|
||||
@ -76,14 +76,17 @@ def main(argv: (list[str] | None)=None) -> None:
|
||||
KvmdServer(
|
||||
auth_manager=AuthManager(
|
||||
enabled=config.auth.enabled,
|
||||
expire=config.auth.expire,
|
||||
usc_users=config.auth.usc.users,
|
||||
usc_groups=config.auth.usc.groups,
|
||||
unauth_paths=([] if config.prometheus.auth.enabled else ["/export/prometheus/metrics"]),
|
||||
|
||||
internal_type=config.auth.internal.type,
|
||||
internal_kwargs=config.auth.internal._unpack(ignore=["type", "force_users"]),
|
||||
force_internal_users=config.auth.internal.force_users,
|
||||
int_type=config.auth.internal.type,
|
||||
int_kwargs=config.auth.internal._unpack(ignore=["type", "force_users"]),
|
||||
force_int_users=config.auth.internal.force_users,
|
||||
|
||||
external_type=config.auth.external.type,
|
||||
external_kwargs=(config.auth.external._unpack(ignore=["type"]) if config.auth.external.type else {}),
|
||||
ext_type=config.auth.external.type,
|
||||
ext_kwargs=(config.auth.external._unpack(ignore=["type"]) if config.auth.external.type else {}),
|
||||
|
||||
totp_secret_path=config.auth.totp.secret.file,
|
||||
),
|
||||
|
||||
@ -31,9 +31,11 @@ from ....htserver import HttpExposed
|
||||
from ....htserver import exposed_http
|
||||
from ....htserver import make_json_response
|
||||
from ....htserver import set_request_auth_info
|
||||
from ....htserver import get_request_unix_credentials
|
||||
|
||||
from ....validators.auth import valid_user
|
||||
from ....validators.auth import valid_passwd
|
||||
from ....validators.auth import valid_expire
|
||||
from ....validators.auth import valid_auth_token
|
||||
|
||||
from ..auth import AuthManager
|
||||
@ -43,39 +45,64 @@ from ..auth import AuthManager
|
||||
_COOKIE_AUTH_TOKEN = "auth_token"
|
||||
|
||||
|
||||
async def check_request_auth(auth_manager: AuthManager, exposed: HttpExposed, req: Request) -> None:
|
||||
if auth_manager.is_auth_required(exposed):
|
||||
user = req.headers.get("X-KVMD-User", "")
|
||||
async def _check_xhdr(auth_manager: AuthManager, _: HttpExposed, req: Request) -> bool:
|
||||
user = req.headers.get("X-KVMD-User", "")
|
||||
if user:
|
||||
user = valid_user(user)
|
||||
passwd = req.headers.get("X-KVMD-Passwd", "")
|
||||
set_request_auth_info(req, f"{user} (xhdr)")
|
||||
if (await auth_manager.authorize(user, valid_passwd(passwd))):
|
||||
return True
|
||||
raise ForbiddenError()
|
||||
return False
|
||||
|
||||
|
||||
async def _check_token(auth_manager: AuthManager, _: HttpExposed, req: Request) -> bool:
|
||||
token = req.cookies.get(_COOKIE_AUTH_TOKEN, "")
|
||||
if token:
|
||||
user = auth_manager.check(valid_auth_token(token))
|
||||
if user:
|
||||
user = valid_user(user)
|
||||
passwd = req.headers.get("X-KVMD-Passwd", "")
|
||||
set_request_auth_info(req, f"{user} (xhdr)")
|
||||
if not (await auth_manager.authorize(user, valid_passwd(passwd))):
|
||||
raise ForbiddenError()
|
||||
return
|
||||
|
||||
token = req.cookies.get(_COOKIE_AUTH_TOKEN, "")
|
||||
if token:
|
||||
user = auth_manager.check(valid_auth_token(token)) # type: ignore
|
||||
if not user:
|
||||
set_request_auth_info(req, "- (token)")
|
||||
raise ForbiddenError()
|
||||
set_request_auth_info(req, f"{user} (token)")
|
||||
return
|
||||
return True
|
||||
set_request_auth_info(req, "- (token)")
|
||||
raise ForbiddenError()
|
||||
return False
|
||||
|
||||
basic_auth = req.headers.get("Authorization", "")
|
||||
if basic_auth and basic_auth[:6].lower() == "basic ":
|
||||
try:
|
||||
(user, passwd) = base64.b64decode(basic_auth[6:]).decode("utf-8").split(":")
|
||||
except Exception:
|
||||
raise UnauthorizedError()
|
||||
user = valid_user(user)
|
||||
set_request_auth_info(req, f"{user} (basic)")
|
||||
if not (await auth_manager.authorize(user, valid_passwd(passwd))):
|
||||
raise ForbiddenError()
|
||||
return
|
||||
|
||||
async def _check_basic(auth_manager: AuthManager, _: HttpExposed, req: Request) -> bool:
|
||||
basic_auth = req.headers.get("Authorization", "")
|
||||
if basic_auth and basic_auth[:6].lower() == "basic ":
|
||||
try:
|
||||
(user, passwd) = base64.b64decode(basic_auth[6:]).decode("utf-8").split(":")
|
||||
except Exception:
|
||||
raise UnauthorizedError()
|
||||
user = valid_user(user)
|
||||
set_request_auth_info(req, f"{user} (basic)")
|
||||
if (await auth_manager.authorize(user, valid_passwd(passwd))):
|
||||
return True
|
||||
raise ForbiddenError()
|
||||
return False
|
||||
|
||||
|
||||
async def _check_usc(auth_manager: AuthManager, exposed: HttpExposed, req: Request) -> bool:
|
||||
if exposed.allow_usc:
|
||||
creds = get_request_unix_credentials(req)
|
||||
if creds is not None:
|
||||
user = auth_manager.check_unix_credentials(creds)
|
||||
if user:
|
||||
set_request_auth_info(req, f"{user}[{creds.uid}] (unix)")
|
||||
return True
|
||||
raise UnauthorizedError()
|
||||
return False
|
||||
|
||||
|
||||
async def check_request_auth(auth_manager: AuthManager, exposed: HttpExposed, req: Request) -> None:
|
||||
if not auth_manager.is_auth_required(exposed):
|
||||
return
|
||||
for checker in [_check_xhdr, _check_token, _check_basic, _check_usc]:
|
||||
if (await checker(auth_manager, exposed, req)):
|
||||
return
|
||||
raise UnauthorizedError()
|
||||
|
||||
|
||||
class AuthApi:
|
||||
@ -84,26 +111,28 @@ class AuthApi:
|
||||
|
||||
# =====
|
||||
|
||||
@exposed_http("POST", "/auth/login", auth_required=False)
|
||||
@exposed_http("POST", "/auth/login", auth_required=False, allow_usc=False)
|
||||
async def __login_handler(self, req: Request) -> Response:
|
||||
if self.__auth_manager.is_auth_enabled():
|
||||
credentials = await req.post()
|
||||
token = await self.__auth_manager.login(
|
||||
user=valid_user(credentials.get("user", "")),
|
||||
passwd=valid_passwd(credentials.get("passwd", "")),
|
||||
expire=valid_expire(credentials.get("expire", "0")),
|
||||
)
|
||||
if token:
|
||||
return make_json_response(set_cookies={_COOKIE_AUTH_TOKEN: token})
|
||||
raise ForbiddenError()
|
||||
return make_json_response()
|
||||
|
||||
@exposed_http("POST", "/auth/logout")
|
||||
@exposed_http("POST", "/auth/logout", allow_usc=False)
|
||||
async def __logout_handler(self, req: Request) -> Response:
|
||||
if self.__auth_manager.is_auth_enabled():
|
||||
token = valid_auth_token(req.cookies.get(_COOKIE_AUTH_TOKEN, ""))
|
||||
self.__auth_manager.logout(token)
|
||||
return make_json_response()
|
||||
|
||||
@exposed_http("GET", "/auth/check")
|
||||
# XXX: This handle is used for access control so it should NEVER allow access by socket credentials
|
||||
@exposed_http("GET", "/auth/check", allow_usc=False)
|
||||
async def __check_handler(self, _: Request) -> Response:
|
||||
return make_json_response()
|
||||
|
||||
@ -21,6 +21,7 @@
|
||||
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
|
||||
from typing import Any
|
||||
|
||||
@ -57,7 +58,7 @@ class ExportApi:
|
||||
async def __get_prometheus_metrics(self) -> str:
|
||||
(atx_state, info_state, gpio_state) = await asyncio.gather(*[
|
||||
self.__atx.get_state(),
|
||||
self.__info_manager.get_state(["hw", "fan"]),
|
||||
self.__info_manager.get_state(["health", "fan"]),
|
||||
self.__user_gpio.get_state(),
|
||||
])
|
||||
rows: list[str] = []
|
||||
@ -68,10 +69,11 @@ class ExportApi:
|
||||
for mode in sorted(UserGpioModes.ALL):
|
||||
for (channel, ch_state) in gpio_state["state"][f"{mode}s"].items(): # type: ignore
|
||||
if not channel.startswith("__"): # Hide special GPIOs
|
||||
channel = re.sub(r"[^\w]", "_", channel)
|
||||
for key in ["online", "state"]:
|
||||
self.__append_prometheus_rows(rows, ch_state["state"], f"pikvm_gpio_{mode}_{key}_{channel}")
|
||||
|
||||
self.__append_prometheus_rows(rows, info_state["hw"]["health"], "pikvm_hw") # type: ignore
|
||||
self.__append_prometheus_rows(rows, info_state["health"], "pikvm_hw") # type: ignore
|
||||
self.__append_prometheus_rows(rows, info_state["fan"], "pikvm_fan")
|
||||
|
||||
return "\n".join(rows)
|
||||
|
||||
@ -23,6 +23,7 @@
|
||||
import os
|
||||
import stat
|
||||
import functools
|
||||
import itertools
|
||||
import struct
|
||||
|
||||
from typing import Iterable
|
||||
@ -31,8 +32,11 @@ from typing import Callable
|
||||
from aiohttp.web import Request
|
||||
from aiohttp.web import Response
|
||||
|
||||
from ....keyboard.mappings import WEB_TO_EVDEV
|
||||
from ....keyboard.keysym import build_symmap
|
||||
from ....keyboard.printer import text_to_web_keys
|
||||
from ....keyboard.printer import text_to_evdev_keys
|
||||
|
||||
from ....mouse import MOUSE_TO_EVDEV
|
||||
|
||||
from ....htserver import exposed_http
|
||||
from ....htserver import exposed_ws
|
||||
@ -43,7 +47,9 @@ from ....plugins.hid import BaseHid
|
||||
|
||||
from ....validators import raise_error
|
||||
from ....validators.basic import valid_bool
|
||||
from ....validators.basic import valid_number
|
||||
from ....validators.basic import valid_int_f0
|
||||
from ....validators.basic import valid_string_list
|
||||
from ....validators.os import valid_printable_filename
|
||||
from ....validators.hid import valid_hid_keyboard_output
|
||||
from ....validators.hid import valid_hid_mouse_output
|
||||
@ -97,6 +103,11 @@ class HidApi:
|
||||
await self.__hid.reset()
|
||||
return make_json_response()
|
||||
|
||||
@exposed_http("GET", "/hid/inactivity")
|
||||
async def __inactivity_handler(self, _: Request) -> Response:
|
||||
secs = self.__hid.get_inactivity_seconds()
|
||||
return make_json_response({"inactivity": secs})
|
||||
|
||||
# =====
|
||||
|
||||
async def get_keymaps(self) -> dict: # Ugly hack to generate hid_keymaps_state (see server.py)
|
||||
@ -119,15 +130,26 @@ class HidApi:
|
||||
@exposed_http("POST", "/hid/print")
|
||||
async def __print_handler(self, req: Request) -> Response:
|
||||
text = await req.text()
|
||||
limit = int(valid_int_f0(req.query.get("limit", 1024)))
|
||||
limit = valid_int_f0(req.query.get("limit", 1024))
|
||||
if limit > 0:
|
||||
text = text[:limit]
|
||||
symmap = self.__ensure_symmap(req.query.get("keymap", self.__default_keymap_name))
|
||||
slow = valid_bool(req.query.get("slow", False))
|
||||
await self.__hid.send_key_events(text_to_web_keys(text, symmap), no_ignore_keys=True, slow=slow)
|
||||
delay = float(valid_number(
|
||||
arg=req.query.get("delay", (0.02 if slow else 0)),
|
||||
min=0,
|
||||
max=5,
|
||||
type=float,
|
||||
name="keys delay",
|
||||
))
|
||||
await self.__hid.send_key_events(
|
||||
keys=text_to_evdev_keys(text, symmap),
|
||||
no_ignore_keys=True,
|
||||
delay=delay,
|
||||
)
|
||||
return make_json_response()
|
||||
|
||||
def __ensure_symmap(self, keymap_name: str) -> dict[int, dict[int, str]]:
|
||||
def __ensure_symmap(self, keymap_name: str) -> dict[int, dict[int, int]]:
|
||||
keymap_name = valid_printable_filename(keymap_name, "keymap")
|
||||
path = os.path.join(self.__keymaps_dir_path, keymap_name)
|
||||
try:
|
||||
@ -139,7 +161,7 @@ class HidApi:
|
||||
return self.__inner_ensure_symmap(path, st.st_mtime)
|
||||
|
||||
@functools.lru_cache(maxsize=10)
|
||||
def __inner_ensure_symmap(self, path: str, mod_ts: int) -> dict[int, dict[int, str]]:
|
||||
def __inner_ensure_symmap(self, path: str, mod_ts: int) -> dict[int, dict[int, int]]:
|
||||
_ = mod_ts # For LRU
|
||||
return build_symmap(path)
|
||||
|
||||
@ -148,9 +170,12 @@ class HidApi:
|
||||
@exposed_ws(1)
|
||||
async def __ws_bin_key_handler(self, _: WsSession, data: bytes) -> None:
|
||||
try:
|
||||
key = valid_hid_key(data[1:].decode("ascii"))
|
||||
state = bool(data[0] & 0b01)
|
||||
finish = bool(data[0] & 0b10)
|
||||
if data[0] & 0b10000000:
|
||||
key = struct.unpack(">H", data[1:])[0]
|
||||
else:
|
||||
key = WEB_TO_EVDEV[valid_hid_key(data[1:33].decode("ascii"))]
|
||||
except Exception:
|
||||
return
|
||||
self.__hid.send_key_event(key, state, finish)
|
||||
@ -158,7 +183,11 @@ class HidApi:
|
||||
@exposed_ws(2)
|
||||
async def __ws_bin_mouse_button_handler(self, _: WsSession, data: bytes) -> None:
|
||||
try:
|
||||
button = valid_hid_mouse_button(data[1:].decode("ascii"))
|
||||
state = bool(data[0] & 0b01)
|
||||
if data[0] & 0b10000000:
|
||||
button = struct.unpack(">H", data[1:])[0]
|
||||
else:
|
||||
button = MOUSE_TO_EVDEV[valid_hid_mouse_button(data[1:33].decode("ascii"))]
|
||||
state = bool(data[0] & 0b01)
|
||||
except Exception:
|
||||
return
|
||||
@ -199,7 +228,7 @@ class HidApi:
|
||||
@exposed_ws("key")
|
||||
async def __ws_key_handler(self, _: WsSession, event: dict) -> None:
|
||||
try:
|
||||
key = valid_hid_key(event["key"])
|
||||
key = WEB_TO_EVDEV[valid_hid_key(event["key"])]
|
||||
state = valid_bool(event["state"])
|
||||
finish = valid_bool(event.get("finish", False))
|
||||
except Exception:
|
||||
@ -209,7 +238,7 @@ class HidApi:
|
||||
@exposed_ws("mouse_button")
|
||||
async def __ws_mouse_button_handler(self, _: WsSession, event: dict) -> None:
|
||||
try:
|
||||
button = valid_hid_mouse_button(event["button"])
|
||||
button = MOUSE_TO_EVDEV[valid_hid_mouse_button(event["button"])]
|
||||
state = valid_bool(event["state"])
|
||||
except Exception:
|
||||
return
|
||||
@ -246,9 +275,22 @@ class HidApi:
|
||||
|
||||
# =====
|
||||
|
||||
@exposed_http("POST", "/hid/events/send_shortcut")
|
||||
async def __events_send_shortcut_handler(self, req: Request) -> Response:
|
||||
shortcut = valid_string_list(req.query.get("keys"), subval=valid_hid_key)
|
||||
if shortcut:
|
||||
press = [WEB_TO_EVDEV[key] for key in shortcut]
|
||||
release = list(reversed(press))
|
||||
seq = [
|
||||
*zip(press, itertools.repeat(True)),
|
||||
*zip(release, itertools.repeat(False)),
|
||||
]
|
||||
await self.__hid.send_key_events(seq, no_ignore_keys=True, delay=0.05)
|
||||
return make_json_response()
|
||||
|
||||
@exposed_http("POST", "/hid/events/send_key")
|
||||
async def __events_send_key_handler(self, req: Request) -> Response:
|
||||
key = valid_hid_key(req.query.get("key"))
|
||||
key = WEB_TO_EVDEV[valid_hid_key(req.query.get("key"))]
|
||||
if "state" in req.query:
|
||||
state = valid_bool(req.query["state"])
|
||||
finish = valid_bool(req.query.get("finish", False))
|
||||
@ -259,7 +301,7 @@ class HidApi:
|
||||
|
||||
@exposed_http("POST", "/hid/events/send_mouse_button")
|
||||
async def __events_send_mouse_button_handler(self, req: Request) -> Response:
|
||||
button = valid_hid_mouse_button(req.query.get("button"))
|
||||
button = MOUSE_TO_EVDEV[valid_hid_mouse_button(req.query.get("button"))]
|
||||
if "state" in req.query:
|
||||
state = valid_bool(req.query["state"])
|
||||
self.__hid.send_mouse_button_event(button, state)
|
||||
|
||||
@ -45,7 +45,10 @@ class InfoApi:
|
||||
|
||||
def __valid_info_fields(self, req: Request) -> list[str]:
|
||||
available = self.__info_manager.get_subs()
|
||||
available.add("hw")
|
||||
default = set(available)
|
||||
default.remove("health")
|
||||
return sorted(valid_info_fields(
|
||||
arg=req.query.get("fields", ",".join(available)),
|
||||
variants=available,
|
||||
arg=req.query.get("fields", ",".join(default)),
|
||||
variants=(available),
|
||||
) or available)
|
||||
|
||||
@ -52,17 +52,15 @@ class LogApi:
|
||||
raise LogReaderDisabledError()
|
||||
seek = valid_log_seek(req.query.get("seek", 0))
|
||||
follow = valid_bool(req.query.get("follow", False))
|
||||
response = await start_streaming(req, "text/plain")
|
||||
resp = await start_streaming(req, "text/plain")
|
||||
try:
|
||||
async for record in self.__log_reader.poll_log(seek, follow):
|
||||
await response.write(("[%s %s] --- %s" % (
|
||||
await resp.write(("[%s %s] --- %s" % (
|
||||
record["dt"].strftime("%Y-%m-%d %H:%M:%S"),
|
||||
record["service"],
|
||||
record["msg"],
|
||||
)).encode("utf-8") + b"\r\n")
|
||||
except Exception as e:
|
||||
if record is None:
|
||||
record = e
|
||||
await response.write(f"Module systemd.journal is unavailable.\n{record}".encode("utf-8"))
|
||||
return response
|
||||
return response
|
||||
except Exception as exception:
|
||||
await resp.write(f"Module systemd.journal is unavailable.\n{exception}".encode("utf-8"))
|
||||
return resp
|
||||
return resp
|
||||
|
||||
@ -84,7 +84,7 @@ class MsdApi:
|
||||
async def __set_connected_handler(self, req: Request) -> Response:
|
||||
await self.__msd.set_connected(valid_bool(req.query.get("connected")))
|
||||
return make_json_response()
|
||||
|
||||
|
||||
@exposed_http("POST", "/msd/make_image")
|
||||
async def __set_zipped_handler(self, req: Request) -> Response:
|
||||
await self.__msd.make_image(valid_bool(req.query.get("zipped")))
|
||||
@ -133,10 +133,10 @@ class MsdApi:
|
||||
src = compressed()
|
||||
size = -1
|
||||
|
||||
response = await start_streaming(req, "application/octet-stream", size, name + suffix)
|
||||
resp = await start_streaming(req, "application/octet-stream", size, name + suffix)
|
||||
async for chunk in src:
|
||||
await response.write(chunk)
|
||||
return response
|
||||
await resp.write(chunk)
|
||||
return resp
|
||||
|
||||
# =====
|
||||
|
||||
@ -166,11 +166,11 @@ class MsdApi:
|
||||
|
||||
name = ""
|
||||
size = written = 0
|
||||
response: (StreamResponse | None) = None
|
||||
resp: (StreamResponse | None) = None
|
||||
|
||||
async def stream_write_info() -> None:
|
||||
assert response is not None
|
||||
await stream_json(response, self.__make_write_info(name, size, written))
|
||||
assert resp is not None
|
||||
await stream_json(resp, self.__make_write_info(name, size, written))
|
||||
|
||||
try:
|
||||
async with htclient.download(
|
||||
@ -190,7 +190,7 @@ class MsdApi:
|
||||
get_logger(0).info("Downloading image %r as %r to MSD ...", url, name)
|
||||
async with self.__msd.write_image(name, size, remove_incomplete) as writer:
|
||||
chunk_size = writer.get_chunk_size()
|
||||
response = await start_streaming(req, "application/x-ndjson")
|
||||
resp = await start_streaming(req, "application/x-ndjson")
|
||||
await stream_write_info()
|
||||
last_report_ts = 0
|
||||
async for chunk in remote.content.iter_chunked(chunk_size):
|
||||
@ -201,12 +201,12 @@ class MsdApi:
|
||||
last_report_ts = now
|
||||
|
||||
await stream_write_info()
|
||||
return response
|
||||
return resp
|
||||
|
||||
except Exception as ex:
|
||||
if response is not None:
|
||||
if resp is not None:
|
||||
await stream_write_info()
|
||||
await stream_json_exception(response, ex)
|
||||
await stream_json_exception(resp, ex)
|
||||
elif isinstance(ex, aiohttp.ClientError):
|
||||
return make_json_exception(ex, 400)
|
||||
raise
|
||||
|
||||
@ -102,14 +102,26 @@ class RedfishApi:
|
||||
"Actions": {
|
||||
"#ComputerSystem.Reset": {
|
||||
"ResetType@Redfish.AllowableValues": list(self.__actions),
|
||||
"target": "/redfish/v1/Systems/0/Actions/ComputerSystem.Reset"
|
||||
"target": "/redfish/v1/Systems/0/Actions/ComputerSystem.Reset",
|
||||
},
|
||||
"#ComputerSystem.SetDefaultBootOrder": { # https://github.com/pikvm/pikvm/issues/1525
|
||||
"target": "/redfish/v1/Systems/0/Actions/ComputerSystem.SetDefaultBootOrder",
|
||||
},
|
||||
},
|
||||
"Id": "0",
|
||||
"HostName": host,
|
||||
"PowerState": ("On" if atx_state["leds"]["power"] else "Off"), # type: ignore
|
||||
"Boot": {
|
||||
"BootSourceOverrideEnabled": "Disabled",
|
||||
"BootSourceOverrideTarget": None,
|
||||
},
|
||||
}, wrap_result=False)
|
||||
|
||||
@exposed_http("PATCH", "/redfish/v1/Systems/0")
|
||||
async def __patch_handler(self, _: Request) -> Response:
|
||||
# https://github.com/pikvm/pikvm/issues/1525
|
||||
return Response(body=None, status=204)
|
||||
|
||||
@exposed_http("POST", "/redfish/v1/Systems/0/Actions/ComputerSystem.Reset")
|
||||
async def __power_handler(self, req: Request) -> Response:
|
||||
try:
|
||||
|
||||
@ -28,6 +28,7 @@ from ....htserver import make_json_response
|
||||
|
||||
from ....validators.basic import valid_bool
|
||||
from ....validators.basic import valid_int_f0
|
||||
from ....validators.basic import valid_float_f0
|
||||
from ....validators.basic import valid_stripped_string_not_empty
|
||||
from ....validators.kvm import valid_atx_power_action
|
||||
from ....validators.kvm import valid_atx_button
|
||||
@ -52,9 +53,19 @@ class SwitchApi:
|
||||
async def __state_handler(self, _: Request) -> Response:
|
||||
return make_json_response(await self.__switch.get_state())
|
||||
|
||||
@exposed_http("POST", "/switch/set_active_prev")
|
||||
async def __set_active_prev_handler(self, _: Request) -> Response:
|
||||
await self.__switch.set_active_prev()
|
||||
return make_json_response()
|
||||
|
||||
@exposed_http("POST", "/switch/set_active_next")
|
||||
async def __set_active_next_handler(self, _: Request) -> Response:
|
||||
await self.__switch.set_active_next()
|
||||
return make_json_response()
|
||||
|
||||
@exposed_http("POST", "/switch/set_active")
|
||||
async def __set_active_port_handler(self, req: Request) -> Response:
|
||||
port = valid_int_f0(req.query.get("port"))
|
||||
port = valid_float_f0(req.query.get("port"))
|
||||
await self.__switch.set_active_port(port)
|
||||
return make_json_response()
|
||||
|
||||
@ -62,7 +73,7 @@ class SwitchApi:
|
||||
async def __set_beacon_handler(self, req: Request) -> Response:
|
||||
on = valid_bool(req.query.get("state"))
|
||||
if "port" in req.query:
|
||||
port = valid_int_f0(req.query.get("port"))
|
||||
port = valid_float_f0(req.query.get("port"))
|
||||
await self.__switch.set_port_beacon(port, on)
|
||||
elif "uplink" in req.query:
|
||||
unit = valid_int_f0(req.query.get("uplink"))
|
||||
@ -74,11 +85,12 @@ class SwitchApi:
|
||||
|
||||
@exposed_http("POST", "/switch/set_port_params")
|
||||
async def __set_port_params(self, req: Request) -> Response:
|
||||
port = valid_int_f0(req.query.get("port"))
|
||||
port = valid_float_f0(req.query.get("port"))
|
||||
params = {
|
||||
param: validator(req.query.get(param))
|
||||
for (param, validator) in [
|
||||
("edid_id", (lambda arg: valid_switch_edid_id(arg, allow_default=True))),
|
||||
("dummy", valid_bool),
|
||||
("name", valid_switch_port_name),
|
||||
("atx_click_power_delay", valid_switch_atx_click_delay),
|
||||
("atx_click_power_long_delay", valid_switch_atx_click_delay),
|
||||
@ -142,7 +154,7 @@ class SwitchApi:
|
||||
|
||||
@exposed_http("POST", "/switch/atx/power")
|
||||
async def __power_handler(self, req: Request) -> Response:
|
||||
port = valid_int_f0(req.query.get("port"))
|
||||
port = valid_float_f0(req.query.get("port"))
|
||||
action = valid_atx_power_action(req.query.get("action"))
|
||||
await ({
|
||||
"on": self.__switch.atx_power_on,
|
||||
@ -154,7 +166,7 @@ class SwitchApi:
|
||||
|
||||
@exposed_http("POST", "/switch/atx/click")
|
||||
async def __click_handler(self, req: Request) -> Response:
|
||||
port = valid_int_f0(req.query.get("port"))
|
||||
port = valid_float_f0(req.query.get("port"))
|
||||
button = valid_atx_button(req.query.get("button"))
|
||||
await ({
|
||||
"power": self.__switch.atx_click_power,
|
||||
|
||||
@ -20,6 +20,12 @@
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import pwd
|
||||
import grp
|
||||
import dataclasses
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import secrets
|
||||
import pyotp
|
||||
|
||||
@ -31,48 +37,79 @@ from ...plugins.auth import BaseAuthService
|
||||
from ...plugins.auth import get_auth_service_class
|
||||
|
||||
from ...htserver import HttpExposed
|
||||
from ...htserver import RequestUnixCredentials
|
||||
|
||||
|
||||
# =====
|
||||
class AuthManager:
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _Session:
|
||||
user: str
|
||||
expire_ts: int
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
assert self.user == self.user.strip()
|
||||
assert self.user
|
||||
assert self.expire_ts >= 0
|
||||
|
||||
|
||||
class AuthManager: # pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||
def __init__(
|
||||
self,
|
||||
enabled: bool,
|
||||
expire: int,
|
||||
usc_users: list[str],
|
||||
usc_groups: list[str],
|
||||
unauth_paths: list[str],
|
||||
|
||||
internal_type: str,
|
||||
internal_kwargs: dict,
|
||||
force_internal_users: list[str],
|
||||
int_type: str,
|
||||
int_kwargs: dict,
|
||||
force_int_users: list[str],
|
||||
|
||||
external_type: str,
|
||||
external_kwargs: dict,
|
||||
ext_type: str,
|
||||
ext_kwargs: dict,
|
||||
|
||||
totp_secret_path: str,
|
||||
) -> None:
|
||||
|
||||
logger = get_logger(0)
|
||||
|
||||
self.__enabled = enabled
|
||||
if not enabled:
|
||||
get_logger().warning("AUTHORIZATION IS DISABLED")
|
||||
logger.warning("AUTHORIZATION IS DISABLED")
|
||||
|
||||
assert expire >= 0
|
||||
self.__expire = expire
|
||||
if expire > 0:
|
||||
logger.info("Maximum user session time is limited: %s",
|
||||
self.__format_seconds(expire))
|
||||
|
||||
self.__usc_uids = self.__load_usc_uids(usc_users, usc_groups)
|
||||
if self.__usc_uids:
|
||||
logger.info("Selfauth UNIX socket access is allowed for users: %s",
|
||||
list(self.__usc_uids.values()))
|
||||
|
||||
self.__unauth_paths = frozenset(unauth_paths) # To speed up
|
||||
for path in self.__unauth_paths:
|
||||
get_logger().warning("Authorization is disabled for API %r", path)
|
||||
if self.__unauth_paths:
|
||||
logger.info("Authorization is disabled for APIs: %s",
|
||||
list(self.__unauth_paths))
|
||||
|
||||
self.__internal_service: (BaseAuthService | None) = None
|
||||
self.__int_service: (BaseAuthService | None) = None
|
||||
if enabled:
|
||||
self.__internal_service = get_auth_service_class(internal_type)(**internal_kwargs)
|
||||
get_logger().info("Using internal auth service %r", self.__internal_service.get_plugin_name())
|
||||
self.__int_service = get_auth_service_class(int_type)(**int_kwargs)
|
||||
logger.info("Using internal auth service %r",
|
||||
self.__int_service.get_plugin_name())
|
||||
|
||||
self.__force_internal_users = force_internal_users
|
||||
self.__force_int_users = force_int_users
|
||||
|
||||
self.__external_service: (BaseAuthService | None) = None
|
||||
if enabled and external_type:
|
||||
self.__external_service = get_auth_service_class(external_type)(**external_kwargs)
|
||||
get_logger().info("Using external auth service %r", self.__external_service.get_plugin_name())
|
||||
self.__ext_service: (BaseAuthService | None) = None
|
||||
if enabled and ext_type:
|
||||
self.__ext_service = get_auth_service_class(ext_type)(**ext_kwargs)
|
||||
logger.info("Using external auth service %r",
|
||||
self.__ext_service.get_plugin_name())
|
||||
|
||||
self.__totp_secret_path = totp_secret_path
|
||||
|
||||
self.__tokens: dict[str, str] = {} # {token: user}
|
||||
self.__sessions: dict[str, _Session] = {} # {token: session}
|
||||
|
||||
def is_auth_enabled(self) -> bool:
|
||||
return self.__enabled
|
||||
@ -88,7 +125,8 @@ class AuthManager:
|
||||
assert user == user.strip()
|
||||
assert user
|
||||
assert self.__enabled
|
||||
assert self.__internal_service
|
||||
assert self.__int_service
|
||||
logger = get_logger(0)
|
||||
|
||||
if self.__totp_secret_path:
|
||||
with open(self.__totp_secret_path) as file:
|
||||
@ -96,60 +134,150 @@ class AuthManager:
|
||||
if secret:
|
||||
code = passwd[-6:]
|
||||
if not pyotp.TOTP(secret).verify(code, valid_window=1):
|
||||
get_logger().error("Got access denied for user %r by TOTP", user)
|
||||
logger.error("Got access denied for user %r by TOTP", user)
|
||||
return False
|
||||
passwd = passwd[:-6]
|
||||
|
||||
if user not in self.__force_internal_users and self.__external_service:
|
||||
service = self.__external_service
|
||||
if user not in self.__force_int_users and self.__ext_service:
|
||||
service = self.__ext_service
|
||||
else:
|
||||
service = self.__internal_service
|
||||
service = self.__int_service
|
||||
|
||||
pname = service.get_plugin_name()
|
||||
ok = (await service.authorize(user, passwd))
|
||||
if ok:
|
||||
get_logger().info("Authorized user %r via auth service %r", user, service.get_plugin_name())
|
||||
logger.info("Authorized user %r via auth service %r", user, pname)
|
||||
else:
|
||||
get_logger().error("Got access denied for user %r from auth service %r", user, service.get_plugin_name())
|
||||
logger.error("Got access denied for user %r from auth service %r", user, pname)
|
||||
return ok
|
||||
|
||||
async def login(self, user: str, passwd: str) -> (str | None):
|
||||
async def login(self, user: str, passwd: str, expire: int) -> (str | None):
|
||||
assert user == user.strip()
|
||||
assert user
|
||||
assert expire >= 0
|
||||
assert self.__enabled
|
||||
|
||||
if (await self.authorize(user, passwd)):
|
||||
token = self.__make_new_token()
|
||||
self.__tokens[token] = user
|
||||
get_logger().info("Logged in user %r", user)
|
||||
session = _Session(
|
||||
user=user,
|
||||
expire_ts=self.__make_expire_ts(expire),
|
||||
)
|
||||
self.__sessions[token] = session
|
||||
get_logger(0).info("Logged in user %r; expire=%s, sessions_now=%d",
|
||||
session.user,
|
||||
self.__format_expire_ts(session.expire_ts),
|
||||
self.__get_sessions_number(session.user))
|
||||
return token
|
||||
else:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
def __make_new_token(self) -> str:
|
||||
for _ in range(10):
|
||||
token = secrets.token_hex(32)
|
||||
if token not in self.__tokens:
|
||||
if token not in self.__sessions:
|
||||
return token
|
||||
raise AssertionError("Can't generate new unique token")
|
||||
raise RuntimeError("Can't generate new unique token")
|
||||
|
||||
def __make_expire_ts(self, expire: int) -> int:
|
||||
assert expire >= 0
|
||||
assert self.__expire >= 0
|
||||
|
||||
if expire == 0:
|
||||
# The user requested infinite session: apply global expire.
|
||||
# It will allow this (0) or set a limit.
|
||||
expire = self.__expire
|
||||
else:
|
||||
# The user wants a limited session
|
||||
if self.__expire > 0:
|
||||
# If we have a global limit, override the user limit
|
||||
assert expire > 0
|
||||
expire = min(expire, self.__expire)
|
||||
|
||||
if expire > 0:
|
||||
return (self.__get_now_ts() + expire)
|
||||
|
||||
assert expire == 0
|
||||
return 0
|
||||
|
||||
def __get_now_ts(self) -> int:
|
||||
return int(time.monotonic())
|
||||
|
||||
def __format_expire_ts(self, expire_ts: int) -> str:
|
||||
if expire_ts > 0:
|
||||
seconds = expire_ts - self.__get_now_ts()
|
||||
return f"[{self.__format_seconds(seconds)}]"
|
||||
return "INF"
|
||||
|
||||
def __format_seconds(self, seconds: int) -> str:
|
||||
return str(datetime.timedelta(seconds=seconds))
|
||||
|
||||
def __get_sessions_number(self, user: str) -> int:
|
||||
return sum(
|
||||
1
|
||||
for session in self.__sessions.values()
|
||||
if session.user == user
|
||||
)
|
||||
|
||||
def logout(self, token: str) -> None:
|
||||
assert self.__enabled
|
||||
if token in self.__tokens:
|
||||
user = self.__tokens[token]
|
||||
if token in self.__sessions:
|
||||
user = self.__sessions[token].user
|
||||
count = 0
|
||||
for (r_token, r_user) in list(self.__tokens.items()):
|
||||
if r_user == user:
|
||||
for (key_t, session) in list(self.__sessions.items()):
|
||||
if session.user == user:
|
||||
count += 1
|
||||
del self.__tokens[r_token]
|
||||
get_logger().info("Logged out user %r (%d)", user, count)
|
||||
del self.__sessions[key_t]
|
||||
get_logger(0).info("Logged out user %r; sessions_closed=%d", user, count)
|
||||
|
||||
def check(self, token: str) -> (str | None):
|
||||
assert self.__enabled
|
||||
return self.__tokens.get(token)
|
||||
session = self.__sessions.get(token)
|
||||
if session is not None:
|
||||
if session.expire_ts <= 0:
|
||||
# Infinite session
|
||||
return session.user
|
||||
else:
|
||||
# Limited session
|
||||
if self.__get_now_ts() < session.expire_ts:
|
||||
return session.user
|
||||
else:
|
||||
del self.__sessions[token]
|
||||
get_logger(0).info("The session of user %r is expired; sessions_left=%d",
|
||||
session.user,
|
||||
self.__get_sessions_number(session.user))
|
||||
return None
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def cleanup(self) -> None:
|
||||
if self.__enabled:
|
||||
assert self.__internal_service
|
||||
await self.__internal_service.cleanup()
|
||||
if self.__external_service:
|
||||
await self.__external_service.cleanup()
|
||||
assert self.__int_service
|
||||
await self.__int_service.cleanup()
|
||||
if self.__ext_service:
|
||||
await self.__ext_service.cleanup()
|
||||
|
||||
# =====
|
||||
|
||||
def __load_usc_uids(self, users: list[str], groups: list[str]) -> dict[int, str]:
|
||||
uids: dict[int, str] = {}
|
||||
|
||||
pwds: dict[str, int] = {}
|
||||
for pw in pwd.getpwall():
|
||||
assert pw.pw_name == pw.pw_name.strip()
|
||||
assert pw.pw_name
|
||||
pwds[pw.pw_name] = pw.pw_uid
|
||||
if pw.pw_name in users:
|
||||
uids[pw.pw_uid] = pw.pw_name
|
||||
|
||||
for gr in grp.getgrall():
|
||||
if gr.gr_name in groups:
|
||||
for member in gr.gr_mem:
|
||||
if member in pwds:
|
||||
uid = pwds[member]
|
||||
uids[uid] = member
|
||||
|
||||
return uids
|
||||
|
||||
def check_unix_credentials(self, creds: RequestUnixCredentials) -> (str | None):
|
||||
assert self.__enabled
|
||||
return self.__usc_uids.get(creds.uid)
|
||||
|
||||
@ -31,7 +31,7 @@ from .auth import AuthInfoSubmanager
|
||||
from .system import SystemInfoSubmanager
|
||||
from .meta import MetaInfoSubmanager
|
||||
from .extras import ExtrasInfoSubmanager
|
||||
from .hw import HwInfoSubmanager
|
||||
from .health import HealthInfoSubmanager
|
||||
from .fan import FanInfoSubmanager
|
||||
|
||||
|
||||
@ -39,11 +39,11 @@ from .fan import FanInfoSubmanager
|
||||
class InfoManager:
|
||||
def __init__(self, config: Section) -> None:
|
||||
self.__subs: dict[str, BaseInfoSubmanager] = {
|
||||
"system": SystemInfoSubmanager(config.kvmd.streamer.cmd),
|
||||
"system": SystemInfoSubmanager(config.kvmd.info.hw.platform, config.kvmd.streamer.cmd),
|
||||
"auth": AuthInfoSubmanager(config.kvmd.auth.enabled),
|
||||
"meta": MetaInfoSubmanager(config.kvmd.info.meta),
|
||||
"extras": ExtrasInfoSubmanager(config),
|
||||
"hw": HwInfoSubmanager(**config.kvmd.info.hw._unpack()),
|
||||
"health": HealthInfoSubmanager(**config.kvmd.info.hw._unpack(ignore="platform")),
|
||||
"fan": FanInfoSubmanager(**config.kvmd.info.fan._unpack()),
|
||||
}
|
||||
self.__queue: "asyncio.Queue[tuple[str, (dict | None)]]" = asyncio.Queue()
|
||||
@ -52,12 +52,29 @@ class InfoManager:
|
||||
return set(self.__subs)
|
||||
|
||||
async def get_state(self, fields: (list[str] | None)=None) -> dict:
|
||||
fields = (fields or list(self.__subs))
|
||||
return dict(zip(fields, await asyncio.gather(*[
|
||||
fields_set = set(fields or list(self.__subs))
|
||||
|
||||
hw = ("hw" in fields_set) # Old for compatible
|
||||
system = ("system" in fields_set)
|
||||
if hw:
|
||||
fields_set.remove("hw")
|
||||
fields_set.add("health")
|
||||
fields_set.add("system")
|
||||
|
||||
state = dict(zip(fields_set, await asyncio.gather(*[
|
||||
self.__subs[field].get_state()
|
||||
for field in fields
|
||||
for field in fields_set
|
||||
])))
|
||||
|
||||
if hw:
|
||||
state["hw"] = {
|
||||
"health": state.pop("health"),
|
||||
"platform": (state["system"] or {}).pop("platform"), # {} makes mypy happy
|
||||
}
|
||||
if not system:
|
||||
state.pop("system")
|
||||
return state
|
||||
|
||||
async def trigger_state(self) -> None:
|
||||
await asyncio.gather(*[
|
||||
sub.trigger_state()
|
||||
@ -70,7 +87,7 @@ class InfoManager:
|
||||
# - auth -- Partial
|
||||
# - meta -- Partial, nullable
|
||||
# - extras -- Partial, nullable
|
||||
# - hw -- Partial
|
||||
# - health -- Partial
|
||||
# - fan -- Partial
|
||||
# ===========================
|
||||
|
||||
|
||||
@ -34,7 +34,6 @@ from ....yamlconf.loader import load_yaml_file
|
||||
|
||||
from .... import tools
|
||||
from .... import aiotools
|
||||
from .... import env
|
||||
|
||||
from .. import sysunit
|
||||
|
||||
|
||||
@ -99,9 +99,9 @@ class FanInfoSubmanager(BaseInfoSubmanager):
|
||||
async def __get_fan_state(self) -> (dict | None):
|
||||
try:
|
||||
async with self.__make_http_session() as session:
|
||||
async with session.get("http://localhost/state") as response:
|
||||
htclient.raise_not_200(response)
|
||||
return (await response.json())["result"]
|
||||
async with session.get("http://localhost/state") as resp:
|
||||
htclient.raise_not_200(resp)
|
||||
return (await resp.json())["result"]
|
||||
except Exception as ex:
|
||||
get_logger(0).error("Can't read fan state: %s", ex)
|
||||
return None
|
||||
|
||||
@ -20,7 +20,6 @@
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import os
|
||||
import asyncio
|
||||
import copy
|
||||
|
||||
@ -45,59 +44,41 @@ _RetvalT = TypeVar("_RetvalT")
|
||||
|
||||
|
||||
# =====
|
||||
class HwInfoSubmanager(BaseInfoSubmanager):
|
||||
class HealthInfoSubmanager(BaseInfoSubmanager):
|
||||
def __init__(
|
||||
self,
|
||||
platform_path: str,
|
||||
vcgencmd_cmd: list[str],
|
||||
ignore_past: bool,
|
||||
state_poll: float,
|
||||
) -> None:
|
||||
|
||||
self.__platform_path = platform_path
|
||||
self.__vcgencmd_cmd = vcgencmd_cmd
|
||||
self.__ignore_past = ignore_past
|
||||
self.__state_poll = state_poll
|
||||
|
||||
self.__dt_cache: dict[str, str] = {}
|
||||
|
||||
self.__notifier = aiotools.AioNotifier()
|
||||
|
||||
async def get_state(self) -> dict:
|
||||
(
|
||||
base,
|
||||
serial,
|
||||
platform,
|
||||
throttling,
|
||||
cpu_percent,
|
||||
cpu_temp,
|
||||
mem,
|
||||
) = await asyncio.gather(
|
||||
self.__read_dt_file("model", upper=False),
|
||||
self.__read_dt_file("serial-number", upper=True),
|
||||
self.__read_platform_file(),
|
||||
self.__get_throttling(),
|
||||
self.__get_cpu_percent(),
|
||||
self.__get_cpu_temp(),
|
||||
self.__get_mem(),
|
||||
)
|
||||
return {
|
||||
"platform": {
|
||||
"type": "rpi",
|
||||
"base": base,
|
||||
"serial": serial,
|
||||
**platform, # type: ignore
|
||||
"temp": {
|
||||
"cpu": cpu_temp,
|
||||
},
|
||||
"health": {
|
||||
"temp": {
|
||||
"cpu": cpu_temp,
|
||||
},
|
||||
"cpu": {
|
||||
"percent": cpu_percent,
|
||||
},
|
||||
"mem": mem,
|
||||
"throttling": throttling,
|
||||
"cpu": {
|
||||
"percent": cpu_percent,
|
||||
},
|
||||
"mem": mem,
|
||||
"throttling": throttling,
|
||||
}
|
||||
|
||||
async def trigger_state(self) -> None:
|
||||
@ -115,42 +96,12 @@ class HwInfoSubmanager(BaseInfoSubmanager):
|
||||
|
||||
# =====
|
||||
|
||||
async def __read_dt_file(self, name: str, upper: bool) -> (str | None):
|
||||
if name not in self.__dt_cache:
|
||||
path = os.path.join(f"{env.PROCFS_PREFIX}/proc/device-tree", name)
|
||||
if not os.path.exists(path):
|
||||
path = os.path.join(f"{env.PROCFS_PREFIX}/etc/kvmd/hw_info/", name)
|
||||
try:
|
||||
self.__dt_cache[name] = (await aiotools.read_file(path)).strip(" \t\r\n\0")
|
||||
except Exception as err:
|
||||
#get_logger(0).warn("Can't read DT %s from %s: %s", name, path, err)
|
||||
return None
|
||||
return self.__dt_cache[name]
|
||||
|
||||
async def __read_platform_file(self) -> dict:
|
||||
try:
|
||||
text = await aiotools.read_file(self.__platform_path)
|
||||
parsed: dict[str, str] = {}
|
||||
for row in text.split("\n"):
|
||||
row = row.strip()
|
||||
if row:
|
||||
(key, value) = row.split("=", 1)
|
||||
parsed[key.strip()] = value.strip()
|
||||
return {
|
||||
"model": parsed["PIKVM_MODEL"],
|
||||
"video": parsed["PIKVM_VIDEO"],
|
||||
"board": parsed["PIKVM_BOARD"],
|
||||
}
|
||||
except Exception:
|
||||
get_logger(0).exception("Can't read device model")
|
||||
return {"model": None, "video": None, "board": None}
|
||||
|
||||
async def __get_cpu_temp(self) -> (float | None):
|
||||
temp_path = f"{env.SYSFS_PREFIX}/sys/class/thermal/thermal_zone0/temp"
|
||||
try:
|
||||
return int((await aiotools.read_file(temp_path)).strip()) / 1000
|
||||
except Exception as err:
|
||||
#get_logger(0).warn("Can't read CPU temp from %s: %s", temp_path, err)
|
||||
except Exception:
|
||||
# get_logger(0).warn("Can't read CPU temp from %s: %s", temp_path, err)
|
||||
return None
|
||||
|
||||
async def __get_cpu_percent(self) -> (float | None):
|
||||
@ -20,6 +20,8 @@
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import socket
|
||||
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from ....logging import get_logger
|
||||
@ -39,7 +41,10 @@ class MetaInfoSubmanager(BaseInfoSubmanager):
|
||||
|
||||
async def get_state(self) -> (dict | None):
|
||||
try:
|
||||
return ((await aiotools.run_async(load_yaml_file, self.__meta_path)) or {})
|
||||
meta = ((await aiotools.run_async(load_yaml_file, self.__meta_path)) or {})
|
||||
if meta["server"]["host"] == "@auto":
|
||||
meta["server"]["host"] = socket.getfqdn()
|
||||
return meta
|
||||
except Exception:
|
||||
get_logger(0).exception("Can't parse meta")
|
||||
return None
|
||||
|
||||
@ -28,6 +28,7 @@ from typing import AsyncGenerator
|
||||
|
||||
from ....logging import get_logger
|
||||
|
||||
from .... import env
|
||||
from .... import aiotools
|
||||
from .... import aioproc
|
||||
|
||||
@ -38,12 +39,30 @@ from .base import BaseInfoSubmanager
|
||||
|
||||
# =====
|
||||
class SystemInfoSubmanager(BaseInfoSubmanager):
|
||||
def __init__(self, streamer_cmd: list[str]) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
platform_path: str,
|
||||
streamer_cmd: list[str],
|
||||
) -> None:
|
||||
|
||||
self.__platform_path = platform_path
|
||||
self.__streamer_cmd = streamer_cmd
|
||||
|
||||
self.__dt_cache: dict[str, str] = {}
|
||||
self.__notifier = aiotools.AioNotifier()
|
||||
|
||||
async def get_state(self) -> dict:
|
||||
streamer_info = await self.__get_streamer_info()
|
||||
(
|
||||
base,
|
||||
serial,
|
||||
pl,
|
||||
streamer_info,
|
||||
) = await asyncio.gather(
|
||||
self.__read_dt_file("model", upper=False),
|
||||
self.__read_dt_file("serial-number", upper=True),
|
||||
self.__read_platform_file(),
|
||||
self.__get_streamer_info(),
|
||||
)
|
||||
uname_info = platform.uname() # Uname using the internal cache
|
||||
return {
|
||||
"kvmd": {"version": __version__},
|
||||
@ -52,6 +71,12 @@ class SystemInfoSubmanager(BaseInfoSubmanager):
|
||||
field: getattr(uname_info, field)
|
||||
for field in ["system", "release", "version", "machine"]
|
||||
},
|
||||
"platform": {
|
||||
"type": "rpi",
|
||||
"base": base,
|
||||
"serial": serial,
|
||||
**pl, # type: ignore
|
||||
},
|
||||
}
|
||||
|
||||
async def trigger_state(self) -> None:
|
||||
@ -64,6 +89,35 @@ class SystemInfoSubmanager(BaseInfoSubmanager):
|
||||
|
||||
# =====
|
||||
|
||||
async def __read_dt_file(self, name: str, upper: bool) -> (str | None):
|
||||
if name not in self.__dt_cache:
|
||||
path = os.path.join(f"{env.PROCFS_PREFIX}/proc/device-tree", name)
|
||||
try:
|
||||
value = (await aiotools.read_file(path)).strip(" \t\r\n\0")
|
||||
self.__dt_cache[name] = (value.upper() if upper else value)
|
||||
except Exception as ex:
|
||||
get_logger(0).error("Can't read DT %s from %s: %s", name, path, ex)
|
||||
return None
|
||||
return self.__dt_cache[name]
|
||||
|
||||
async def __read_platform_file(self) -> dict:
|
||||
try:
|
||||
text = await aiotools.read_file(self.__platform_path)
|
||||
parsed: dict[str, str] = {}
|
||||
for row in text.split("\n"):
|
||||
row = row.strip()
|
||||
if row:
|
||||
(key, value) = row.split("=", 1)
|
||||
parsed[key.strip()] = value.strip()
|
||||
return {
|
||||
"model": parsed["PIKVM_MODEL"],
|
||||
"video": parsed["PIKVM_VIDEO"],
|
||||
"board": parsed["PIKVM_BOARD"],
|
||||
}
|
||||
except Exception:
|
||||
get_logger(0).exception("Can't read device model")
|
||||
return {"model": None, "video": None, "board": None}
|
||||
|
||||
async def __get_streamer_info(self) -> dict:
|
||||
version = ""
|
||||
features: dict[str, bool] = {}
|
||||
|
||||
@ -29,13 +29,11 @@ import time
|
||||
from typing import AsyncGenerator
|
||||
from xmlrpc.client import ServerProxy
|
||||
|
||||
from ...logging import get_logger
|
||||
|
||||
us_systemd_journal = True
|
||||
try:
|
||||
import systemd.journal
|
||||
except ImportError:
|
||||
import supervisor.xmlrpc
|
||||
us_systemd_journal = False
|
||||
|
||||
|
||||
@ -43,14 +41,14 @@ except ImportError:
|
||||
class LogReader:
|
||||
async def poll_log(self, seek: int, follow: bool) -> AsyncGenerator[dict, None]:
|
||||
if us_systemd_journal:
|
||||
reader = systemd.journal.Reader() # type: ignore
|
||||
reader = systemd.journal.Reader() # type: ignore
|
||||
reader.this_boot()
|
||||
# XXX: Из-за смены ID машины в bootconfig это не работает при первой загрузке.
|
||||
# reader.this_machine()
|
||||
reader.log_level(systemd.journal.LOG_DEBUG) # type: ignore
|
||||
reader.log_level(systemd.journal.LOG_DEBUG) # type: ignore
|
||||
services = set(
|
||||
service
|
||||
for service in systemd.journal.Reader().query_unique("_SYSTEMD_UNIT") # type: ignore
|
||||
for service in systemd.journal.Reader().query_unique("_SYSTEMD_UNIT") # type: ignore
|
||||
if re.match(r"kvmd(-\w+)*\.service", service)
|
||||
).union(["kvmd.service"])
|
||||
|
||||
@ -69,10 +67,15 @@ class LogReader:
|
||||
else:
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
server = ServerProxy('http://127.0.0.1',transport=supervisor.xmlrpc.SupervisorTransport(None, None, serverurl='unix:///tmp/supervisor.sock'))
|
||||
log_entries = server.supervisor.readLog(0,0)
|
||||
yield log_entries
|
||||
|
||||
import supervisor.xmlrpc # pylint: disable=import-outside-toplevel
|
||||
server_transport = supervisor.xmlrpc.SupervisorTransport(None, None, serverurl="unix:///tmp/supervisor.sock")
|
||||
server = ServerProxy("http://127.0.0.1", transport=server_transport)
|
||||
log_entries = server.supervisor.readLog(0, 0)
|
||||
yield {
|
||||
"dt": int(time.time()),
|
||||
"service": "kvmd.service",
|
||||
"msg": str(log_entries).rstrip()
|
||||
}
|
||||
|
||||
def __entry_to_record(self, entry: dict) -> dict[str, dict]:
|
||||
return {
|
||||
|
||||
@ -254,6 +254,10 @@ class KvmdServer(HttpServer): # pylint: disable=too-many-arguments,too-many-ins
|
||||
async def __ws_ping_handler(self, ws: WsSession, _: dict) -> None:
|
||||
await ws.send_event("pong", {})
|
||||
|
||||
@exposed_ws(0)
|
||||
async def __ws_bin_ping_handler(self, ws: WsSession, _: bytes) -> None:
|
||||
await ws.send_bin(255, b"") # Ping-pong
|
||||
|
||||
# ===== SYSTEM STUFF
|
||||
|
||||
def run(self, **kwargs: Any) -> None: # type: ignore # pylint: disable=arguments-differ
|
||||
@ -318,18 +322,17 @@ class KvmdServer(HttpServer): # pylint: disable=too-many-arguments,too-many-ins
|
||||
while True:
|
||||
cur = (self.__has_stream_clients() or self.__snapshoter.snapshoting() or self.__stream_forever)
|
||||
if not prev and cur:
|
||||
await self.__streamer.ensure_start(reset=False)
|
||||
await self.__streamer.ensure_start()
|
||||
elif prev and not cur:
|
||||
await self.__streamer.ensure_stop(immediately=False)
|
||||
await self.__streamer.ensure_stop()
|
||||
|
||||
if self.__reset_streamer or self.__new_streamer_params:
|
||||
start = self.__streamer.is_working()
|
||||
await self.__streamer.ensure_stop(immediately=True)
|
||||
if self.__new_streamer_params:
|
||||
self.__streamer.set_params(self.__new_streamer_params)
|
||||
self.__new_streamer_params = {}
|
||||
if start:
|
||||
await self.__streamer.ensure_start(reset=self.__reset_streamer)
|
||||
if self.__new_streamer_params:
|
||||
self.__streamer.set_params(self.__new_streamer_params)
|
||||
self.__new_streamer_params = {}
|
||||
self.__reset_streamer = True
|
||||
|
||||
if self.__reset_streamer:
|
||||
await self.__streamer.ensure_restart()
|
||||
self.__reset_streamer = False
|
||||
|
||||
prev = cur
|
||||
|
||||
@ -31,6 +31,8 @@ from ... import aiotools
|
||||
|
||||
from ...plugins.hid import BaseHid
|
||||
|
||||
from ...keyboard.mappings import WEB_TO_EVDEV
|
||||
|
||||
from .streamer import Streamer
|
||||
|
||||
|
||||
@ -63,7 +65,7 @@ class Snapshoter: # pylint: disable=too-many-instance-attributes
|
||||
else:
|
||||
self.__idle_interval = self.__live_interval = 0.0
|
||||
|
||||
self.__wakeup_key = wakeup_key
|
||||
self.__wakeup_key = WEB_TO_EVDEV.get(wakeup_key, 0)
|
||||
self.__wakeup_move = wakeup_move
|
||||
|
||||
self.__online_delay = online_delay
|
||||
@ -121,8 +123,8 @@ class Snapshoter: # pylint: disable=too-many-instance-attributes
|
||||
async def __wakeup(self) -> None:
|
||||
logger = get_logger(0)
|
||||
|
||||
if self.__wakeup_key:
|
||||
logger.info("Waking up using key %r ...", self.__wakeup_key)
|
||||
if self.__wakeup_key > 0:
|
||||
logger.info("Waking up using keyboard ...")
|
||||
await self.__hid.send_key_events(
|
||||
keys=[(self.__wakeup_key, True), (self.__wakeup_key, False)],
|
||||
no_ignore_keys=True,
|
||||
|
||||
@ -1,456 +0,0 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import signal
|
||||
import asyncio
|
||||
import asyncio.subprocess
|
||||
import dataclasses
|
||||
import copy
|
||||
|
||||
from typing import AsyncGenerator
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
|
||||
from ...logging import get_logger
|
||||
|
||||
from ...clients.streamer import StreamerSnapshot
|
||||
from ...clients.streamer import HttpStreamerClient
|
||||
from ...clients.streamer import HttpStreamerClientSession
|
||||
|
||||
from ... import tools
|
||||
from ... import aiotools
|
||||
from ... import aioproc
|
||||
from ... import htclient
|
||||
|
||||
|
||||
# =====
|
||||
class _StreamerParams:
|
||||
__DESIRED_FPS = "desired_fps"
|
||||
|
||||
__QUALITY = "quality"
|
||||
|
||||
__RESOLUTION = "resolution"
|
||||
__AVAILABLE_RESOLUTIONS = "available_resolutions"
|
||||
|
||||
__H264_BITRATE = "h264_bitrate"
|
||||
__H264_GOP = "h264_gop"
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self,
|
||||
quality: int,
|
||||
|
||||
resolution: str,
|
||||
available_resolutions: list[str],
|
||||
|
||||
desired_fps: int,
|
||||
desired_fps_min: int,
|
||||
desired_fps_max: int,
|
||||
|
||||
h264_bitrate: int,
|
||||
h264_bitrate_min: int,
|
||||
h264_bitrate_max: int,
|
||||
|
||||
h264_gop: int,
|
||||
h264_gop_min: int,
|
||||
h264_gop_max: int,
|
||||
) -> None:
|
||||
|
||||
self.__has_quality = bool(quality)
|
||||
self.__has_resolution = bool(resolution)
|
||||
self.__has_h264 = bool(h264_bitrate)
|
||||
|
||||
self.__params: dict = {self.__DESIRED_FPS: min(max(desired_fps, desired_fps_min), desired_fps_max)}
|
||||
self.__limits: dict = {self.__DESIRED_FPS: {"min": desired_fps_min, "max": desired_fps_max}}
|
||||
|
||||
if self.__has_quality:
|
||||
self.__params[self.__QUALITY] = quality
|
||||
|
||||
if self.__has_resolution:
|
||||
self.__params[self.__RESOLUTION] = resolution
|
||||
self.__limits[self.__AVAILABLE_RESOLUTIONS] = available_resolutions
|
||||
|
||||
if self.__has_h264:
|
||||
self.__params[self.__H264_BITRATE] = min(max(h264_bitrate, h264_bitrate_min), h264_bitrate_max)
|
||||
self.__limits[self.__H264_BITRATE] = {"min": h264_bitrate_min, "max": h264_bitrate_max}
|
||||
self.__params[self.__H264_GOP] = min(max(h264_gop, h264_gop_min), h264_gop_max)
|
||||
self.__limits[self.__H264_GOP] = {"min": h264_gop_min, "max": h264_gop_max}
|
||||
|
||||
def get_features(self) -> dict:
|
||||
return {
|
||||
self.__QUALITY: self.__has_quality,
|
||||
self.__RESOLUTION: self.__has_resolution,
|
||||
"h264": self.__has_h264,
|
||||
}
|
||||
|
||||
def get_limits(self) -> dict:
|
||||
limits = copy.deepcopy(self.__limits)
|
||||
if self.__has_resolution:
|
||||
limits[self.__AVAILABLE_RESOLUTIONS] = list(limits[self.__AVAILABLE_RESOLUTIONS])
|
||||
return limits
|
||||
|
||||
def get_params(self) -> dict:
|
||||
return dict(self.__params)
|
||||
|
||||
def set_params(self, params: dict) -> None:
|
||||
new_params = dict(self.__params)
|
||||
|
||||
if self.__QUALITY in params and self.__has_quality:
|
||||
new_params[self.__QUALITY] = min(max(params[self.__QUALITY], 1), 100)
|
||||
|
||||
if self.__RESOLUTION in params and self.__has_resolution:
|
||||
if params[self.__RESOLUTION] in self.__limits[self.__AVAILABLE_RESOLUTIONS]:
|
||||
new_params[self.__RESOLUTION] = params[self.__RESOLUTION]
|
||||
|
||||
for (key, enabled) in [
|
||||
(self.__DESIRED_FPS, True),
|
||||
(self.__H264_BITRATE, self.__has_h264),
|
||||
(self.__H264_GOP, self.__has_h264),
|
||||
]:
|
||||
if key in params and enabled:
|
||||
if self.__check_limits_min_max(key, params[key]):
|
||||
new_params[key] = params[key]
|
||||
|
||||
self.__params = new_params
|
||||
|
||||
def __check_limits_min_max(self, key: str, value: int) -> bool:
|
||||
return (self.__limits[key]["min"] <= value <= self.__limits[key]["max"])
|
||||
|
||||
|
||||
class Streamer: # pylint: disable=too-many-instance-attributes
|
||||
__ST_FULL = 0xFF
|
||||
__ST_PARAMS = 0x01
|
||||
__ST_STREAMER = 0x02
|
||||
__ST_SNAPSHOT = 0x04
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-locals
|
||||
self,
|
||||
|
||||
reset_delay: float,
|
||||
shutdown_delay: float,
|
||||
state_poll: float,
|
||||
|
||||
unix_path: str,
|
||||
timeout: float,
|
||||
snapshot_timeout: float,
|
||||
|
||||
process_name_prefix: str,
|
||||
|
||||
pre_start_cmd: list[str],
|
||||
pre_start_cmd_remove: list[str],
|
||||
pre_start_cmd_append: list[str],
|
||||
|
||||
cmd: list[str],
|
||||
cmd_remove: list[str],
|
||||
cmd_append: list[str],
|
||||
|
||||
post_stop_cmd: list[str],
|
||||
post_stop_cmd_remove: list[str],
|
||||
post_stop_cmd_append: list[str],
|
||||
|
||||
**params_kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
self.__reset_delay = reset_delay
|
||||
self.__shutdown_delay = shutdown_delay
|
||||
self.__state_poll = state_poll
|
||||
|
||||
self.__unix_path = unix_path
|
||||
self.__snapshot_timeout = snapshot_timeout
|
||||
|
||||
self.__process_name_prefix = process_name_prefix
|
||||
|
||||
self.__pre_start_cmd = tools.build_cmd(pre_start_cmd, pre_start_cmd_remove, pre_start_cmd_append)
|
||||
self.__cmd = tools.build_cmd(cmd, cmd_remove, cmd_append)
|
||||
self.__post_stop_cmd = tools.build_cmd(post_stop_cmd, post_stop_cmd_remove, post_stop_cmd_append)
|
||||
|
||||
self.__params = _StreamerParams(**params_kwargs)
|
||||
|
||||
self.__stop_task: (asyncio.Task | None) = None
|
||||
self.__stop_wip = False
|
||||
|
||||
self.__streamer_task: (asyncio.Task | None) = None
|
||||
self.__streamer_proc: (asyncio.subprocess.Process | None) = None # pylint: disable=no-member
|
||||
|
||||
self.__client = HttpStreamerClient(
|
||||
name="jpeg",
|
||||
unix_path=self.__unix_path,
|
||||
timeout=timeout,
|
||||
user_agent=htclient.make_user_agent("KVMD"),
|
||||
)
|
||||
self.__client_session: (HttpStreamerClientSession | None) = None
|
||||
|
||||
self.__snapshot: (StreamerSnapshot | None) = None
|
||||
|
||||
self.__notifier = aiotools.AioNotifier()
|
||||
|
||||
# =====
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_start(self, reset: bool) -> None:
|
||||
if not self.__streamer_task or self.__stop_task:
|
||||
logger = get_logger(0)
|
||||
|
||||
if self.__stop_task:
|
||||
if not self.__stop_wip:
|
||||
self.__stop_task.cancel()
|
||||
await asyncio.gather(self.__stop_task, return_exceptions=True)
|
||||
logger.info("Streamer stop cancelled")
|
||||
return
|
||||
else:
|
||||
await asyncio.gather(self.__stop_task, return_exceptions=True)
|
||||
|
||||
if reset and self.__reset_delay > 0:
|
||||
logger.info("Waiting %.2f seconds for reset delay ...", self.__reset_delay)
|
||||
await asyncio.sleep(self.__reset_delay)
|
||||
logger.info("Starting streamer ...")
|
||||
await self.__inner_start()
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_stop(self, immediately: bool) -> None:
|
||||
if self.__streamer_task:
|
||||
logger = get_logger(0)
|
||||
|
||||
if immediately:
|
||||
if self.__stop_task:
|
||||
if not self.__stop_wip:
|
||||
self.__stop_task.cancel()
|
||||
await asyncio.gather(self.__stop_task, return_exceptions=True)
|
||||
logger.info("Stopping streamer immediately ...")
|
||||
await self.__inner_stop()
|
||||
else:
|
||||
await asyncio.gather(self.__stop_task, return_exceptions=True)
|
||||
else:
|
||||
logger.info("Stopping streamer immediately ...")
|
||||
await self.__inner_stop()
|
||||
|
||||
elif not self.__stop_task:
|
||||
|
||||
async def delayed_stop() -> None:
|
||||
try:
|
||||
await asyncio.sleep(self.__shutdown_delay)
|
||||
self.__stop_wip = True
|
||||
logger.info("Stopping streamer after delay ...")
|
||||
await self.__inner_stop()
|
||||
finally:
|
||||
self.__stop_task = None
|
||||
self.__stop_wip = False
|
||||
|
||||
logger.info("Planning to stop streamer in %.2f seconds ...", self.__shutdown_delay)
|
||||
self.__stop_task = asyncio.create_task(delayed_stop())
|
||||
|
||||
def is_working(self) -> bool:
|
||||
# Запущено и не планирует останавливаться
|
||||
return bool(self.__streamer_task and not self.__stop_task)
|
||||
|
||||
# =====
|
||||
|
||||
def set_params(self, params: dict) -> None:
|
||||
assert not self.__streamer_task
|
||||
self.__notifier.notify(self.__ST_PARAMS)
|
||||
return self.__params.set_params(params)
|
||||
|
||||
def get_params(self) -> dict:
|
||||
return self.__params.get_params()
|
||||
|
||||
# =====
|
||||
|
||||
async def get_state(self) -> dict:
|
||||
return {
|
||||
"features": self.__params.get_features(),
|
||||
"limits": self.__params.get_limits(),
|
||||
"params": self.__params.get_params(),
|
||||
"streamer": (await self.__get_streamer_state()),
|
||||
"snapshot": self.__get_snapshot_state(),
|
||||
}
|
||||
|
||||
async def trigger_state(self) -> None:
|
||||
self.__notifier.notify(self.__ST_FULL)
|
||||
|
||||
async def poll_state(self) -> AsyncGenerator[dict, None]:
|
||||
# ==== Granularity table ====
|
||||
# - features -- Full
|
||||
# - limits -- Partial, paired with params
|
||||
# - params -- Partial, paired with limits
|
||||
# - streamer -- Partial, nullable
|
||||
# - snapshot -- Partial
|
||||
# ===========================
|
||||
|
||||
def signal_handler(*_: Any) -> None:
|
||||
get_logger(0).info("Got SIGUSR2, checking the stream state ...")
|
||||
self.__notifier.notify(self.__ST_STREAMER)
|
||||
|
||||
get_logger(0).info("Installing SIGUSR2 streamer handler ...")
|
||||
asyncio.get_event_loop().add_signal_handler(signal.SIGUSR2, signal_handler)
|
||||
|
||||
prev: dict = {}
|
||||
while True:
|
||||
new: dict = {}
|
||||
|
||||
mask = await self.__notifier.wait(timeout=self.__state_poll)
|
||||
if mask == self.__ST_FULL:
|
||||
new = await self.get_state()
|
||||
prev = copy.deepcopy(new)
|
||||
yield new
|
||||
continue
|
||||
|
||||
if mask < 0:
|
||||
mask = self.__ST_STREAMER
|
||||
|
||||
def check_update(key: str, value: (dict | None)) -> None:
|
||||
if prev.get(key) != value:
|
||||
new[key] = value
|
||||
|
||||
if mask & self.__ST_PARAMS:
|
||||
check_update("params", self.__params.get_params())
|
||||
if mask & self.__ST_STREAMER:
|
||||
check_update("streamer", await self.__get_streamer_state())
|
||||
if mask & self.__ST_SNAPSHOT:
|
||||
check_update("snapshot", self.__get_snapshot_state())
|
||||
|
||||
if new and prev != new:
|
||||
prev.update(copy.deepcopy(new))
|
||||
yield new
|
||||
|
||||
async def __get_streamer_state(self) -> (dict | None):
|
||||
if self.__streamer_task:
|
||||
session = self.__ensure_client_session()
|
||||
try:
|
||||
return (await session.get_state())
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError):
|
||||
pass
|
||||
except Exception:
|
||||
get_logger().exception("Invalid streamer response from /state")
|
||||
return None
|
||||
|
||||
def __get_snapshot_state(self) -> dict:
|
||||
if self.__snapshot:
|
||||
snapshot = dataclasses.asdict(self.__snapshot)
|
||||
del snapshot["headers"]
|
||||
del snapshot["data"]
|
||||
return {"saved": snapshot}
|
||||
return {"saved": None}
|
||||
|
||||
# =====
|
||||
|
||||
async def take_snapshot(self, save: bool, load: bool, allow_offline: bool) -> (StreamerSnapshot | None):
|
||||
if load:
|
||||
return self.__snapshot
|
||||
logger = get_logger()
|
||||
session = self.__ensure_client_session()
|
||||
try:
|
||||
snapshot = await session.take_snapshot(self.__snapshot_timeout)
|
||||
if snapshot.online or allow_offline:
|
||||
if save:
|
||||
self.__snapshot = snapshot
|
||||
self.__notifier.notify(self.__ST_SNAPSHOT)
|
||||
return snapshot
|
||||
logger.error("Stream is offline, no signal or so")
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError) as ex:
|
||||
logger.error("Can't connect to streamer: %s", tools.efmt(ex))
|
||||
except Exception:
|
||||
logger.exception("Invalid streamer response from /snapshot")
|
||||
return None
|
||||
|
||||
def remove_snapshot(self) -> None:
|
||||
self.__snapshot = None
|
||||
|
||||
# =====
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def cleanup(self) -> None:
|
||||
await self.ensure_stop(immediately=True)
|
||||
if self.__client_session:
|
||||
await self.__client_session.close()
|
||||
self.__client_session = None
|
||||
|
||||
def __ensure_client_session(self) -> HttpStreamerClientSession:
|
||||
if not self.__client_session:
|
||||
self.__client_session = self.__client.make_session()
|
||||
return self.__client_session
|
||||
|
||||
# =====
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def __inner_start(self) -> None:
|
||||
assert not self.__streamer_task
|
||||
await self.__run_hook("PRE-START-CMD", self.__pre_start_cmd)
|
||||
self.__streamer_task = asyncio.create_task(self.__streamer_task_loop())
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def __inner_stop(self) -> None:
|
||||
assert self.__streamer_task
|
||||
self.__streamer_task.cancel()
|
||||
await asyncio.gather(self.__streamer_task, return_exceptions=True)
|
||||
await self.__kill_streamer_proc()
|
||||
await self.__run_hook("POST-STOP-CMD", self.__post_stop_cmd)
|
||||
self.__streamer_task = None
|
||||
|
||||
# =====
|
||||
|
||||
async def __streamer_task_loop(self) -> None: # pylint: disable=too-many-branches
|
||||
logger = get_logger(0)
|
||||
while True: # pylint: disable=too-many-nested-blocks
|
||||
try:
|
||||
await self.__start_streamer_proc()
|
||||
assert self.__streamer_proc is not None
|
||||
await aioproc.log_stdout_infinite(self.__streamer_proc, logger)
|
||||
raise RuntimeError("Streamer unexpectedly died")
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception:
|
||||
if self.__streamer_proc:
|
||||
logger.exception("Unexpected streamer error: pid=%d", self.__streamer_proc.pid)
|
||||
else:
|
||||
logger.exception("Can't start streamer")
|
||||
await self.__kill_streamer_proc()
|
||||
await asyncio.sleep(1)
|
||||
|
||||
def __make_cmd(self, cmd: list[str]) -> list[str]:
|
||||
return [
|
||||
part.format(
|
||||
unix=self.__unix_path,
|
||||
process_name_prefix=self.__process_name_prefix,
|
||||
**self.__params.get_params(),
|
||||
)
|
||||
for part in cmd
|
||||
]
|
||||
|
||||
async def __run_hook(self, name: str, cmd: list[str]) -> None:
|
||||
logger = get_logger()
|
||||
cmd = self.__make_cmd(cmd)
|
||||
logger.info("%s: %s", name, tools.cmdfmt(cmd))
|
||||
try:
|
||||
await aioproc.log_process(cmd, logger, prefix=name)
|
||||
except Exception as ex:
|
||||
logger.exception("Can't execute command: %s", ex)
|
||||
|
||||
async def __start_streamer_proc(self) -> None:
|
||||
assert self.__streamer_proc is None
|
||||
cmd = self.__make_cmd(self.__cmd)
|
||||
self.__streamer_proc = await aioproc.run_process(cmd)
|
||||
get_logger(0).info("Started streamer pid=%d: %s", self.__streamer_proc.pid, tools.cmdfmt(cmd))
|
||||
|
||||
async def __kill_streamer_proc(self) -> None:
|
||||
if self.__streamer_proc:
|
||||
await aioproc.kill_process(self.__streamer_proc, 1, get_logger(0))
|
||||
self.__streamer_proc = None
|
||||
254
kvmd/apps/kvmd/streamer/__init__.py
Normal file
254
kvmd/apps/kvmd/streamer/__init__.py
Normal file
@ -0,0 +1,254 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import signal
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import copy
|
||||
|
||||
from typing import AsyncGenerator
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
|
||||
from ....logging import get_logger
|
||||
|
||||
from ....clients.streamer import StreamerSnapshot
|
||||
from ....clients.streamer import HttpStreamerClient
|
||||
from ....clients.streamer import HttpStreamerClientSession
|
||||
|
||||
from .... import tools
|
||||
from .... import aiotools
|
||||
from .... import htclient
|
||||
|
||||
from .params import Params
|
||||
from .runner import Runner
|
||||
|
||||
|
||||
# =====
|
||||
class Streamer: # pylint: disable=too-many-instance-attributes
|
||||
__ST_FULL = 0xFF
|
||||
__ST_PARAMS = 0x01
|
||||
__ST_STREAMER = 0x02
|
||||
__ST_SNAPSHOT = 0x04
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments,too-many-locals
|
||||
self,
|
||||
|
||||
reset_delay: float,
|
||||
shutdown_delay: float,
|
||||
state_poll: float,
|
||||
|
||||
unix_path: str,
|
||||
timeout: float,
|
||||
snapshot_timeout: float,
|
||||
|
||||
process_name_prefix: str,
|
||||
|
||||
pre_start_cmd: list[str],
|
||||
pre_start_cmd_remove: list[str],
|
||||
pre_start_cmd_append: list[str],
|
||||
|
||||
cmd: list[str],
|
||||
cmd_remove: list[str],
|
||||
cmd_append: list[str],
|
||||
|
||||
post_stop_cmd: list[str],
|
||||
post_stop_cmd_remove: list[str],
|
||||
post_stop_cmd_append: list[str],
|
||||
|
||||
**params_kwargs: Any,
|
||||
) -> None:
|
||||
|
||||
self.__state_poll = state_poll
|
||||
|
||||
self.__unix_path = unix_path
|
||||
self.__snapshot_timeout = snapshot_timeout
|
||||
self.__process_name_prefix = process_name_prefix
|
||||
|
||||
self.__params = Params(**params_kwargs)
|
||||
|
||||
self.__runner = Runner(
|
||||
reset_delay=reset_delay,
|
||||
shutdown_delay=shutdown_delay,
|
||||
pre_start_cmd=tools.build_cmd(pre_start_cmd, pre_start_cmd_remove, pre_start_cmd_append),
|
||||
cmd=tools.build_cmd(cmd, cmd_remove, cmd_append),
|
||||
post_stop_cmd=tools.build_cmd(post_stop_cmd, post_stop_cmd_remove, post_stop_cmd_append),
|
||||
)
|
||||
|
||||
self.__client = HttpStreamerClient(
|
||||
name="jpeg",
|
||||
unix_path=self.__unix_path,
|
||||
timeout=timeout,
|
||||
user_agent=htclient.make_user_agent("KVMD"),
|
||||
)
|
||||
self.__client_session: (HttpStreamerClientSession | None) = None
|
||||
|
||||
self.__snapshot: (StreamerSnapshot | None) = None
|
||||
|
||||
self.__notifier = aiotools.AioNotifier()
|
||||
|
||||
# =====
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_start(self) -> None:
|
||||
await self.__runner.ensure_start(self.__make_params())
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_restart(self) -> None:
|
||||
await self.__runner.ensure_restart(self.__make_params())
|
||||
|
||||
def __make_params(self) -> dict:
|
||||
return {
|
||||
"unix": self.__unix_path,
|
||||
"process_name_prefix": self.__process_name_prefix,
|
||||
**self.__params.get_params(),
|
||||
}
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_stop(self) -> None:
|
||||
await self.__runner.ensure_stop(immediately=False)
|
||||
|
||||
# =====
|
||||
|
||||
def set_params(self, params: dict) -> None:
|
||||
self.__notifier.notify(self.__ST_PARAMS)
|
||||
return self.__params.set_params(params)
|
||||
|
||||
def get_params(self) -> dict:
|
||||
return self.__params.get_params()
|
||||
|
||||
# =====
|
||||
|
||||
async def get_state(self) -> dict:
|
||||
return {
|
||||
"features": self.__params.get_features(),
|
||||
"limits": self.__params.get_limits(),
|
||||
"params": self.__params.get_params(),
|
||||
"streamer": (await self.__get_streamer_state()),
|
||||
"snapshot": self.__get_snapshot_state(),
|
||||
}
|
||||
|
||||
async def trigger_state(self) -> None:
|
||||
self.__notifier.notify(self.__ST_FULL)
|
||||
|
||||
async def poll_state(self) -> AsyncGenerator[dict, None]:
|
||||
# ==== Granularity table ====
|
||||
# - features -- Full
|
||||
# - limits -- Partial, paired with params
|
||||
# - params -- Partial, paired with limits
|
||||
# - streamer -- Partial, nullable
|
||||
# - snapshot -- Partial
|
||||
# ===========================
|
||||
|
||||
def signal_handler(*_: Any) -> None:
|
||||
get_logger(0).info("Got SIGUSR2, checking the stream state ...")
|
||||
self.__notifier.notify(self.__ST_STREAMER)
|
||||
|
||||
get_logger(0).info("Installing SIGUSR2 streamer handler ...")
|
||||
asyncio.get_event_loop().add_signal_handler(signal.SIGUSR2, signal_handler)
|
||||
|
||||
prev: dict = {}
|
||||
while True:
|
||||
new: dict = {}
|
||||
|
||||
mask = await self.__notifier.wait(timeout=self.__state_poll)
|
||||
if mask == self.__ST_FULL:
|
||||
new = await self.get_state()
|
||||
prev = copy.deepcopy(new)
|
||||
yield new
|
||||
continue
|
||||
|
||||
if mask < 0:
|
||||
mask = self.__ST_STREAMER
|
||||
|
||||
def check_update(key: str, value: (dict | None)) -> None:
|
||||
if prev.get(key) != value:
|
||||
new[key] = value
|
||||
|
||||
if mask & self.__ST_PARAMS:
|
||||
check_update("params", self.__params.get_params())
|
||||
if mask & self.__ST_STREAMER:
|
||||
check_update("streamer", await self.__get_streamer_state())
|
||||
if mask & self.__ST_SNAPSHOT:
|
||||
check_update("snapshot", self.__get_snapshot_state())
|
||||
|
||||
if new and prev != new:
|
||||
prev.update(copy.deepcopy(new))
|
||||
yield new
|
||||
|
||||
async def __get_streamer_state(self) -> (dict | None):
|
||||
if self.__runner.is_running():
|
||||
session = self.__ensure_client_session()
|
||||
try:
|
||||
return (await session.get_state())
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError):
|
||||
pass
|
||||
except Exception:
|
||||
get_logger().exception("Invalid streamer response from /state")
|
||||
return None
|
||||
|
||||
def __get_snapshot_state(self) -> dict:
|
||||
if self.__snapshot:
|
||||
snapshot = dataclasses.asdict(self.__snapshot)
|
||||
del snapshot["headers"]
|
||||
del snapshot["data"]
|
||||
return {"saved": snapshot}
|
||||
return {"saved": None}
|
||||
|
||||
# =====
|
||||
|
||||
async def take_snapshot(self, save: bool, load: bool, allow_offline: bool) -> (StreamerSnapshot | None):
|
||||
if load:
|
||||
return self.__snapshot
|
||||
logger = get_logger()
|
||||
session = self.__ensure_client_session()
|
||||
try:
|
||||
snapshot = await session.take_snapshot(self.__snapshot_timeout)
|
||||
if snapshot.online or allow_offline:
|
||||
if save:
|
||||
self.__snapshot = snapshot
|
||||
self.__notifier.notify(self.__ST_SNAPSHOT)
|
||||
return snapshot
|
||||
logger.error("Stream is offline, no signal or so")
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError) as ex:
|
||||
logger.error("Can't connect to streamer: %s", tools.efmt(ex))
|
||||
except Exception:
|
||||
logger.exception("Invalid streamer response from /snapshot")
|
||||
return None
|
||||
|
||||
def remove_snapshot(self) -> None:
|
||||
self.__snapshot = None
|
||||
|
||||
# =====
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def cleanup(self) -> None:
|
||||
await self.__runner.ensure_stop(immediately=True)
|
||||
if self.__client_session:
|
||||
await self.__client_session.close()
|
||||
self.__client_session = None
|
||||
|
||||
def __ensure_client_session(self) -> HttpStreamerClientSession:
|
||||
if not self.__client_session:
|
||||
self.__client_session = self.__client.make_session()
|
||||
return self.__client_session
|
||||
117
kvmd/apps/kvmd/streamer/params.py
Normal file
117
kvmd/apps/kvmd/streamer/params.py
Normal file
@ -0,0 +1,117 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import copy
|
||||
|
||||
|
||||
# =====
|
||||
class Params:
|
||||
__DESIRED_FPS = "desired_fps"
|
||||
|
||||
__QUALITY = "quality"
|
||||
|
||||
__RESOLUTION = "resolution"
|
||||
__AVAILABLE_RESOLUTIONS = "available_resolutions"
|
||||
|
||||
__H264 = "h264"
|
||||
__H264_BITRATE = "h264_bitrate"
|
||||
__H264_GOP = "h264_gop"
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self,
|
||||
quality: int,
|
||||
|
||||
resolution: str,
|
||||
available_resolutions: list[str],
|
||||
|
||||
desired_fps: int,
|
||||
desired_fps_min: int,
|
||||
desired_fps_max: int,
|
||||
|
||||
h264_bitrate: int,
|
||||
h264_bitrate_min: int,
|
||||
h264_bitrate_max: int,
|
||||
|
||||
h264_gop: int,
|
||||
h264_gop_min: int,
|
||||
h264_gop_max: int,
|
||||
) -> None:
|
||||
|
||||
self.__has_quality = bool(quality)
|
||||
self.__has_resolution = bool(resolution)
|
||||
self.__has_h264 = bool(h264_bitrate)
|
||||
|
||||
self.__params: dict = {self.__DESIRED_FPS: min(max(desired_fps, desired_fps_min), desired_fps_max)}
|
||||
self.__limits: dict = {self.__DESIRED_FPS: {"min": desired_fps_min, "max": desired_fps_max}}
|
||||
|
||||
if self.__has_quality:
|
||||
self.__params[self.__QUALITY] = quality
|
||||
|
||||
if self.__has_resolution:
|
||||
self.__params[self.__RESOLUTION] = resolution
|
||||
self.__limits[self.__AVAILABLE_RESOLUTIONS] = available_resolutions
|
||||
|
||||
if self.__has_h264:
|
||||
self.__params[self.__H264_BITRATE] = min(max(h264_bitrate, h264_bitrate_min), h264_bitrate_max)
|
||||
self.__limits[self.__H264_BITRATE] = {"min": h264_bitrate_min, "max": h264_bitrate_max}
|
||||
self.__params[self.__H264_GOP] = min(max(h264_gop, h264_gop_min), h264_gop_max)
|
||||
self.__limits[self.__H264_GOP] = {"min": h264_gop_min, "max": h264_gop_max}
|
||||
|
||||
def get_features(self) -> dict:
|
||||
return {
|
||||
self.__QUALITY: self.__has_quality,
|
||||
self.__RESOLUTION: self.__has_resolution,
|
||||
self.__H264: self.__has_h264,
|
||||
}
|
||||
|
||||
def get_limits(self) -> dict:
|
||||
limits = copy.deepcopy(self.__limits)
|
||||
if self.__has_resolution:
|
||||
limits[self.__AVAILABLE_RESOLUTIONS] = list(limits[self.__AVAILABLE_RESOLUTIONS])
|
||||
return limits
|
||||
|
||||
def get_params(self) -> dict:
|
||||
return dict(self.__params)
|
||||
|
||||
def set_params(self, params: dict) -> None:
|
||||
new = dict(self.__params)
|
||||
|
||||
if self.__QUALITY in params and self.__has_quality:
|
||||
new[self.__QUALITY] = min(max(params[self.__QUALITY], 1), 100)
|
||||
|
||||
if self.__RESOLUTION in params and self.__has_resolution:
|
||||
if params[self.__RESOLUTION] in self.__limits[self.__AVAILABLE_RESOLUTIONS]:
|
||||
new[self.__RESOLUTION] = params[self.__RESOLUTION]
|
||||
|
||||
for (key, enabled) in [
|
||||
(self.__DESIRED_FPS, True),
|
||||
(self.__H264_BITRATE, self.__has_h264),
|
||||
(self.__H264_GOP, self.__has_h264),
|
||||
]:
|
||||
if key in params and enabled:
|
||||
if self.__check_limits_min_max(key, params[key]):
|
||||
new[key] = params[key]
|
||||
|
||||
self.__params = new
|
||||
|
||||
def __check_limits_min_max(self, key: str, value: int) -> bool:
|
||||
return (self.__limits[key]["min"] <= value <= self.__limits[key]["max"])
|
||||
182
kvmd/apps/kvmd/streamer/runner.py
Normal file
182
kvmd/apps/kvmd/streamer/runner.py
Normal file
@ -0,0 +1,182 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import asyncio
|
||||
import asyncio.subprocess
|
||||
|
||||
from ....logging import get_logger
|
||||
|
||||
from .... import tools
|
||||
from .... import aiotools
|
||||
from .... import aioproc
|
||||
|
||||
|
||||
# =====
|
||||
class Runner: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(
|
||||
self,
|
||||
reset_delay: float,
|
||||
shutdown_delay: float,
|
||||
|
||||
pre_start_cmd: list[str],
|
||||
cmd: list[str],
|
||||
post_stop_cmd: list[str],
|
||||
) -> None:
|
||||
|
||||
self.__reset_delay = reset_delay
|
||||
self.__shutdown_delay = shutdown_delay
|
||||
|
||||
self.__pre_start_cmd: list[str] = pre_start_cmd
|
||||
self.__cmd: list[str] = cmd
|
||||
self.__post_stop_cmd: list[str] = post_stop_cmd
|
||||
|
||||
self.__proc_params: dict = {}
|
||||
self.__proc_task: (asyncio.Task | None) = None
|
||||
self.__proc: (asyncio.subprocess.Process | None) = None # pylint: disable=no-member
|
||||
|
||||
self.__stopper_task: (asyncio.Task | None) = None
|
||||
self.__stopper_wip = False
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_start(self, params: dict) -> None:
|
||||
if not self.__proc_task or self.__stopper_task:
|
||||
logger = get_logger(0)
|
||||
|
||||
if self.__stopper_task:
|
||||
if not self.__stopper_wip:
|
||||
self.__stopper_task.cancel()
|
||||
await asyncio.gather(self.__stopper_task, return_exceptions=True)
|
||||
logger.info("Streamer stop cancelled")
|
||||
return
|
||||
else:
|
||||
await asyncio.gather(self.__stopper_task, return_exceptions=True)
|
||||
|
||||
logger.info("Starting streamer ...")
|
||||
await self.__inner_start(params)
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_restart(self, params: dict) -> None:
|
||||
logger = get_logger(0)
|
||||
start = bool(self.__proc_task and not self.__stopper_task) # Если запущено и не планирует останавливаться
|
||||
await self.ensure_stop(immediately=True)
|
||||
if self.__reset_delay > 0:
|
||||
logger.info("Waiting %.2f seconds for reset delay ...", self.__reset_delay)
|
||||
await asyncio.sleep(self.__reset_delay)
|
||||
if start:
|
||||
await self.ensure_start(params)
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def ensure_stop(self, immediately: bool) -> None:
|
||||
if self.__proc_task:
|
||||
logger = get_logger(0)
|
||||
|
||||
if immediately:
|
||||
if self.__stopper_task:
|
||||
if not self.__stopper_wip:
|
||||
self.__stopper_task.cancel()
|
||||
await asyncio.gather(self.__stopper_task, return_exceptions=True)
|
||||
logger.info("Stopping streamer immediately ...")
|
||||
await self.__inner_stop()
|
||||
else:
|
||||
await asyncio.gather(self.__stopper_task, return_exceptions=True)
|
||||
else:
|
||||
logger.info("Stopping streamer immediately ...")
|
||||
await self.__inner_stop()
|
||||
|
||||
elif not self.__stopper_task:
|
||||
|
||||
async def delayed_stop() -> None:
|
||||
try:
|
||||
await asyncio.sleep(self.__shutdown_delay)
|
||||
self.__stopper_wip = True
|
||||
logger.info("Stopping streamer after delay ...")
|
||||
await self.__inner_stop()
|
||||
finally:
|
||||
self.__stopper_task = None
|
||||
self.__stopper_wip = False
|
||||
|
||||
logger.info("Planning to stop streamer in %.2f seconds ...", self.__shutdown_delay)
|
||||
self.__stopper_task = asyncio.create_task(delayed_stop())
|
||||
|
||||
def is_running(self) -> bool:
|
||||
return bool(self.__proc_task)
|
||||
|
||||
# =====
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def __inner_start(self, params: dict) -> None:
|
||||
assert not self.__proc_task
|
||||
self.__proc_params = params
|
||||
await self.__run_hook("PRE-START-CMD", self.__pre_start_cmd)
|
||||
self.__proc_task = asyncio.create_task(self.__process_task_loop())
|
||||
|
||||
@aiotools.atomic_fg
|
||||
async def __inner_stop(self) -> None:
|
||||
assert self.__proc_task
|
||||
self.__proc_task.cancel()
|
||||
await asyncio.gather(self.__proc_task, return_exceptions=True)
|
||||
await self.__kill_process()
|
||||
await self.__run_hook("POST-STOP-CMD", self.__post_stop_cmd)
|
||||
self.__proc_task = None
|
||||
|
||||
# =====
|
||||
|
||||
async def __process_task_loop(self) -> None: # pylint: disable=too-many-branches
|
||||
logger = get_logger(0)
|
||||
while True: # pylint: disable=too-many-nested-blocks
|
||||
try:
|
||||
await self.__start_process()
|
||||
assert self.__proc is not None
|
||||
await aioproc.log_stdout_infinite(self.__proc, logger)
|
||||
raise RuntimeError("Streamer unexpectedly died")
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception:
|
||||
if self.__proc:
|
||||
logger.exception("Unexpected streamer error: pid=%d", self.__proc.pid)
|
||||
else:
|
||||
logger.exception("Can't start streamer")
|
||||
await self.__kill_process()
|
||||
await asyncio.sleep(1)
|
||||
|
||||
def __make_cmd(self, cmd: list[str]) -> list[str]:
|
||||
return [part.format(**self.__proc_params) for part in cmd]
|
||||
|
||||
async def __run_hook(self, name: str, cmd: list[str]) -> None:
|
||||
logger = get_logger()
|
||||
cmd = self.__make_cmd(cmd)
|
||||
logger.info("%s: %s", name, tools.cmdfmt(cmd))
|
||||
try:
|
||||
await aioproc.log_process(cmd, logger, prefix=name)
|
||||
except Exception:
|
||||
logger.exception("Can't execute %s hook: %s", name, tools.cmdfmt(cmd))
|
||||
|
||||
async def __start_process(self) -> None:
|
||||
assert self.__proc is None
|
||||
cmd = self.__make_cmd(self.__cmd)
|
||||
self.__proc = await aioproc.run_process(cmd)
|
||||
get_logger(0).info("Started streamer pid=%d: %s", self.__proc.pid, tools.cmdfmt(cmd))
|
||||
|
||||
async def __kill_process(self) -> None:
|
||||
if self.__proc:
|
||||
await aioproc.kill_process(self.__proc, 1, get_logger(0))
|
||||
self.__proc = None
|
||||
@ -32,6 +32,7 @@ from .lib import Inotify
|
||||
|
||||
from .types import Edid
|
||||
from .types import Edids
|
||||
from .types import Dummies
|
||||
from .types import Color
|
||||
from .types import Colors
|
||||
from .types import PortNames
|
||||
@ -68,6 +69,7 @@ class SwitchUnknownEdidError(SwitchOperationError):
|
||||
# =====
|
||||
class Switch: # pylint: disable=too-many-public-methods
|
||||
__X_EDIDS = "edids"
|
||||
__X_DUMMIES = "dummies"
|
||||
__X_COLORS = "colors"
|
||||
__X_PORT_NAMES = "port_names"
|
||||
__X_ATX_CP_DELAYS = "atx_cp_delays"
|
||||
@ -75,7 +77,7 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
__X_ATX_CR_DELAYS = "atx_cr_delays"
|
||||
|
||||
__X_ALL = frozenset([
|
||||
__X_EDIDS, __X_COLORS, __X_PORT_NAMES,
|
||||
__X_EDIDS, __X_DUMMIES, __X_COLORS, __X_PORT_NAMES,
|
||||
__X_ATX_CP_DELAYS, __X_ATX_CPL_DELAYS, __X_ATX_CR_DELAYS,
|
||||
])
|
||||
|
||||
@ -84,11 +86,12 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
device_path: str,
|
||||
default_edid_path: str,
|
||||
pst_unix_path: str,
|
||||
ignore_hpd_on_top: bool,
|
||||
) -> None:
|
||||
|
||||
self.__default_edid_path = default_edid_path
|
||||
|
||||
self.__chain = Chain(device_path)
|
||||
self.__chain = Chain(device_path, ignore_hpd_on_top)
|
||||
self.__cache = StateCache()
|
||||
self.__storage = Storage(pst_unix_path)
|
||||
|
||||
@ -104,6 +107,12 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
if save:
|
||||
self.__save_notifier.notify()
|
||||
|
||||
def __x_set_dummies(self, dummies: Dummies, save: bool=True) -> None:
|
||||
self.__chain.set_dummies(dummies)
|
||||
self.__cache.set_dummies(dummies)
|
||||
if save:
|
||||
self.__save_notifier.notify()
|
||||
|
||||
def __x_set_colors(self, colors: Colors, save: bool=True) -> None:
|
||||
self.__chain.set_colors(colors)
|
||||
self.__cache.set_colors(colors)
|
||||
@ -132,13 +141,19 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
|
||||
# =====
|
||||
|
||||
async def set_active_port(self, port: int) -> None:
|
||||
self.__chain.set_active_port(port)
|
||||
async def set_active_prev(self) -> None:
|
||||
self.__chain.set_active_prev()
|
||||
|
||||
async def set_active_next(self) -> None:
|
||||
self.__chain.set_active_next()
|
||||
|
||||
async def set_active_port(self, port: float) -> None:
|
||||
self.__chain.set_active_port(self.__chain.translate_port(port))
|
||||
|
||||
# =====
|
||||
|
||||
async def set_port_beacon(self, port: int, on: bool) -> None:
|
||||
self.__chain.set_port_beacon(port, on)
|
||||
async def set_port_beacon(self, port: float, on: bool) -> None:
|
||||
self.__chain.set_port_beacon(self.__chain.translate_port(port), on)
|
||||
|
||||
async def set_uplink_beacon(self, unit: int, on: bool) -> None:
|
||||
self.__chain.set_uplink_beacon(unit, on)
|
||||
@ -148,33 +163,35 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
|
||||
# =====
|
||||
|
||||
async def atx_power_on(self, port: int) -> None:
|
||||
async def atx_power_on(self, port: float) -> None:
|
||||
self.__inner_atx_cp(port, False, self.__X_ATX_CP_DELAYS)
|
||||
|
||||
async def atx_power_off(self, port: int) -> None:
|
||||
async def atx_power_off(self, port: float) -> None:
|
||||
self.__inner_atx_cp(port, True, self.__X_ATX_CP_DELAYS)
|
||||
|
||||
async def atx_power_off_hard(self, port: int) -> None:
|
||||
async def atx_power_off_hard(self, port: float) -> None:
|
||||
self.__inner_atx_cp(port, True, self.__X_ATX_CPL_DELAYS)
|
||||
|
||||
async def atx_power_reset_hard(self, port: int) -> None:
|
||||
async def atx_power_reset_hard(self, port: float) -> None:
|
||||
self.__inner_atx_cr(port, True)
|
||||
|
||||
async def atx_click_power(self, port: int) -> None:
|
||||
async def atx_click_power(self, port: float) -> None:
|
||||
self.__inner_atx_cp(port, None, self.__X_ATX_CP_DELAYS)
|
||||
|
||||
async def atx_click_power_long(self, port: int) -> None:
|
||||
async def atx_click_power_long(self, port: float) -> None:
|
||||
self.__inner_atx_cp(port, None, self.__X_ATX_CPL_DELAYS)
|
||||
|
||||
async def atx_click_reset(self, port: int) -> None:
|
||||
async def atx_click_reset(self, port: float) -> None:
|
||||
self.__inner_atx_cr(port, None)
|
||||
|
||||
def __inner_atx_cp(self, port: int, if_powered: (bool | None), x_delay: str) -> None:
|
||||
def __inner_atx_cp(self, port: float, if_powered: (bool | None), x_delay: str) -> None:
|
||||
assert x_delay in [self.__X_ATX_CP_DELAYS, self.__X_ATX_CPL_DELAYS]
|
||||
port = self.__chain.translate_port(port)
|
||||
delay = getattr(self.__cache, f"get_{x_delay}")()[port]
|
||||
self.__chain.click_power(port, delay, if_powered)
|
||||
|
||||
def __inner_atx_cr(self, port: int, if_powered: (bool | None)) -> None:
|
||||
def __inner_atx_cr(self, port: float, if_powered: (bool | None)) -> None:
|
||||
port = self.__chain.translate_port(port)
|
||||
delay = self.__cache.get_atx_cr_delays()[port]
|
||||
self.__chain.click_reset(port, delay, if_powered)
|
||||
|
||||
@ -235,12 +252,14 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
self,
|
||||
port: int,
|
||||
edid_id: (str | None)=None,
|
||||
dummy: (bool | None)=None,
|
||||
name: (str | None)=None,
|
||||
atx_click_power_delay: (float | None)=None,
|
||||
atx_click_power_long_delay: (float | None)=None,
|
||||
atx_click_reset_delay: (float | None)=None,
|
||||
) -> None:
|
||||
|
||||
port = self.__chain.translate_port(port)
|
||||
async with self.__lock:
|
||||
if edid_id is not None:
|
||||
edids = self.__cache.get_edids()
|
||||
@ -249,15 +268,16 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
edids.assign(port, edid_id)
|
||||
self.__x_set_edids(edids)
|
||||
|
||||
for (key, value) in [
|
||||
(self.__X_PORT_NAMES, name),
|
||||
(self.__X_ATX_CP_DELAYS, atx_click_power_delay),
|
||||
(self.__X_ATX_CPL_DELAYS, atx_click_power_long_delay),
|
||||
(self.__X_ATX_CR_DELAYS, atx_click_reset_delay),
|
||||
for (reset, key, value) in [
|
||||
(None, self.__X_DUMMIES, dummy), # None can't be used now
|
||||
("", self.__X_PORT_NAMES, name),
|
||||
(0, self.__X_ATX_CP_DELAYS, atx_click_power_delay),
|
||||
(0, self.__X_ATX_CPL_DELAYS, atx_click_power_long_delay),
|
||||
(0, self.__X_ATX_CR_DELAYS, atx_click_reset_delay),
|
||||
]:
|
||||
if value is not None:
|
||||
new = getattr(self.__cache, f"get_{key}")()
|
||||
new[port] = (value or None) # None == reset to default
|
||||
new[port] = (None if value == reset else value) # Value or reset default
|
||||
getattr(self, f"_Switch__x_set_{key}")(new)
|
||||
|
||||
# =====
|
||||
@ -374,7 +394,7 @@ class Switch: # pylint: disable=too-many-public-methods
|
||||
prevs = dict.fromkeys(self.__X_ALL)
|
||||
while True:
|
||||
await self.__save_notifier.wait()
|
||||
while (await self.__save_notifier.wait(5)):
|
||||
while not (await self.__save_notifier.wait(5)):
|
||||
pass
|
||||
while True:
|
||||
try:
|
||||
|
||||
@ -34,6 +34,7 @@ from .lib import aiotools
|
||||
from .lib import aioproc
|
||||
|
||||
from .types import Edids
|
||||
from .types import Dummies
|
||||
from .types import Colors
|
||||
|
||||
from .proto import Response
|
||||
@ -54,6 +55,14 @@ class _CmdSetActual(_BaseCmd):
|
||||
actual: bool
|
||||
|
||||
|
||||
class _CmdSetActivePrev(_BaseCmd):
|
||||
pass
|
||||
|
||||
|
||||
class _CmdSetActiveNext(_BaseCmd):
|
||||
pass
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _CmdSetActivePort(_BaseCmd):
|
||||
port: int
|
||||
@ -80,6 +89,11 @@ class _CmdSetEdids(_BaseCmd):
|
||||
edids: Edids
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _CmdSetDummies(_BaseCmd):
|
||||
dummies: Dummies
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _CmdSetColors(_BaseCmd):
|
||||
colors: Colors
|
||||
@ -177,13 +191,19 @@ class UnitAtxLedsEvent(BaseEvent):
|
||||
|
||||
# =====
|
||||
class Chain: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, device_path: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
device_path: str,
|
||||
ignore_hpd_on_top: bool,
|
||||
) -> None:
|
||||
|
||||
self.__device = Device(device_path)
|
||||
self.__ignore_hpd_on_top = ignore_hpd_on_top
|
||||
|
||||
self.__actual = False
|
||||
|
||||
self.__edids = Edids()
|
||||
|
||||
self.__dummies = Dummies({})
|
||||
self.__colors = Colors()
|
||||
|
||||
self.__units: list[_UnitContext] = []
|
||||
@ -200,6 +220,24 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
|
||||
# =====
|
||||
|
||||
def translate_port(self, port: float) -> int:
|
||||
assert port >= 0
|
||||
if int(port) == port:
|
||||
return int(port)
|
||||
(unit, ch) = map(int, str(port).split("."))
|
||||
unit = min(max(unit, 1), 5)
|
||||
ch = min(max(ch, 1), 4)
|
||||
port = min((unit - 1) * 4 + (ch - 1), 19)
|
||||
return port
|
||||
|
||||
# =====
|
||||
|
||||
def set_active_prev(self) -> None:
|
||||
self.__queue_cmd(_CmdSetActivePrev())
|
||||
|
||||
def set_active_next(self) -> None:
|
||||
self.__queue_cmd(_CmdSetActiveNext())
|
||||
|
||||
def set_active_port(self, port: int) -> None:
|
||||
self.__queue_cmd(_CmdSetActivePort(port))
|
||||
|
||||
@ -219,6 +257,9 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
def set_edids(self, edids: Edids) -> None:
|
||||
self.__queue_cmd(_CmdSetEdids(edids)) # Will be copied because of multiprocessing.Queue()
|
||||
|
||||
def set_dummies(self, dummies: Dummies) -> None:
|
||||
self.__queue_cmd(_CmdSetDummies(dummies))
|
||||
|
||||
def set_colors(self, colors: Colors) -> None:
|
||||
self.__queue_cmd(_CmdSetColors(colors))
|
||||
|
||||
@ -290,12 +331,21 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
self.__device.request_state()
|
||||
self.__device.request_atx_leds()
|
||||
while not self.__stop_event.is_set():
|
||||
count = 0
|
||||
if self.__select():
|
||||
count = 0
|
||||
for resp in self.__device.read_all():
|
||||
self.__update_units(resp)
|
||||
self.__adjust_quirks()
|
||||
self.__adjust_start_port()
|
||||
self.__finish_changing_request(resp)
|
||||
self.__consume_commands()
|
||||
else:
|
||||
count += 1
|
||||
if count >= 5:
|
||||
# Heartbeat
|
||||
self.__device.request_state()
|
||||
count = 0
|
||||
self.__ensure_config()
|
||||
|
||||
def __select(self) -> bool:
|
||||
@ -314,10 +364,29 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
case _CmdSetActual():
|
||||
self.__actual = cmd.actual
|
||||
|
||||
case _CmdSetActivePrev():
|
||||
if len(self.__units) > 0:
|
||||
port = self.__active_port
|
||||
port -= 1
|
||||
if port >= 0:
|
||||
self.__active_port = port
|
||||
self.__queue_event(PortActivatedEvent(self.__active_port))
|
||||
|
||||
case _CmdSetActiveNext():
|
||||
port = self.__active_port
|
||||
if port < 0:
|
||||
port = 0
|
||||
else:
|
||||
port += 1
|
||||
if port < len(self.__units) * 4:
|
||||
self.__active_port = port
|
||||
self.__queue_event(PortActivatedEvent(self.__active_port))
|
||||
|
||||
case _CmdSetActivePort():
|
||||
# Может быть вызвано изнутри при синхронизации
|
||||
self.__active_port = cmd.port
|
||||
self.__queue_event(PortActivatedEvent(self.__active_port))
|
||||
if cmd.port < len(self.__units) * 4:
|
||||
self.__active_port = cmd.port
|
||||
self.__queue_event(PortActivatedEvent(self.__active_port))
|
||||
|
||||
case _CmdSetPortBeacon():
|
||||
(unit, ch) = self.get_real_unit_channel(cmd.port)
|
||||
@ -341,6 +410,9 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
case _CmdSetEdids():
|
||||
self.__edids = cmd.edids
|
||||
|
||||
case _CmdSetDummies():
|
||||
self.__dummies = cmd.dummies
|
||||
|
||||
case _CmdSetColors():
|
||||
self.__colors = cmd.colors
|
||||
|
||||
@ -364,6 +436,15 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
self.__units[resp.header.unit].atx_leds = resp.body
|
||||
self.__queue_event(UnitAtxLedsEvent(resp.header.unit, resp.body))
|
||||
|
||||
def __adjust_quirks(self) -> None:
|
||||
for (unit, ctx) in enumerate(self.__units):
|
||||
if ctx.state is not None and ctx.state.version.is_fresh(7):
|
||||
ignore_hpd = (unit == 0 and self.__ignore_hpd_on_top)
|
||||
if ctx.state.quirks.ignore_hpd != ignore_hpd:
|
||||
get_logger().info("Applying quirk ignore_hpd=%s to [%d] ...",
|
||||
ignore_hpd, unit)
|
||||
self.__device.request_set_quirks(unit, ignore_hpd)
|
||||
|
||||
def __adjust_start_port(self) -> None:
|
||||
if self.__active_port < 0:
|
||||
for (unit, ctx) in enumerate(self.__units):
|
||||
@ -387,6 +468,7 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
self.__ensure_config_port(unit, ctx)
|
||||
if self.__actual:
|
||||
self.__ensure_config_edids(unit, ctx)
|
||||
self.__ensure_config_dummies(unit, ctx)
|
||||
self.__ensure_config_colors(unit, ctx)
|
||||
|
||||
def __ensure_config_port(self, unit: int, ctx: _UnitContext) -> None:
|
||||
@ -413,6 +495,19 @@ class Chain: # pylint: disable=too-many-instance-attributes
|
||||
ctx.changing_rid = self.__device.request_set_edid(unit, ch, edid)
|
||||
break # Busy globally
|
||||
|
||||
def __ensure_config_dummies(self, unit: int, ctx: _UnitContext) -> None:
|
||||
assert ctx.state is not None
|
||||
if ctx.state.version.is_fresh(8) and ctx.can_be_changed():
|
||||
for ch in range(4):
|
||||
port = self.get_virtual_port(unit, ch)
|
||||
dummy = self.__dummies[port]
|
||||
if ctx.state.video_dummies[ch] != dummy:
|
||||
get_logger().info("Changing dummy flag on port %d on [%d:%d]: %d -> %d ...",
|
||||
port, unit, ch,
|
||||
ctx.state.video_dummies[ch], dummy)
|
||||
ctx.changing_rid = self.__device.request_set_dummy(unit, ch, dummy)
|
||||
break # Busy globally (actually not but it can be changed in the firmware)
|
||||
|
||||
def __ensure_config_colors(self, unit: int, ctx: _UnitContext) -> None:
|
||||
assert self.__actual
|
||||
assert ctx.state is not None
|
||||
|
||||
@ -41,7 +41,9 @@ from .proto import BodySetBeacon
|
||||
from .proto import BodyAtxClick
|
||||
from .proto import BodySetEdid
|
||||
from .proto import BodyClearEdid
|
||||
from .proto import BodySetDummy
|
||||
from .proto import BodySetColors
|
||||
from .proto import BodySetQuirks
|
||||
|
||||
|
||||
# =====
|
||||
@ -163,9 +165,15 @@ class Device:
|
||||
return self.__send_request(Header.SET_EDID, unit, BodySetEdid(ch, edid))
|
||||
return self.__send_request(Header.CLEAR_EDID, unit, BodyClearEdid(ch))
|
||||
|
||||
def request_set_dummy(self, unit: int, ch: int, on: bool) -> int:
|
||||
return self.__send_request(Header.SET_DUMMY, unit, BodySetDummy(ch, on))
|
||||
|
||||
def request_set_colors(self, unit: int, ch: int, colors: Colors) -> int:
|
||||
return self.__send_request(Header.SET_COLORS, unit, BodySetColors(ch, colors))
|
||||
|
||||
def request_set_quirks(self, unit: int, ignore_hpd: bool) -> int:
|
||||
return self.__send_request(Header.SET_QUIRKS, unit, BodySetQuirks(ignore_hpd))
|
||||
|
||||
def __send_request(self, op: int, unit: int, body: (Packable | None)) -> int:
|
||||
assert self.__tty is not None
|
||||
req = Request(Header(
|
||||
|
||||
@ -60,6 +60,8 @@ class Header(Packable, Unpackable):
|
||||
SET_EDID = 9
|
||||
CLEAR_EDID = 10
|
||||
SET_COLORS = 12
|
||||
SET_QUIRKS = 13
|
||||
SET_DUMMY = 14
|
||||
|
||||
__struct = struct.Struct("<BHBB")
|
||||
|
||||
@ -89,17 +91,32 @@ class Nak(Unpackable):
|
||||
return Nak(*cls.__struct.unpack_from(data, offset=offset))
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class UnitVersion:
|
||||
hw: int
|
||||
sw: int
|
||||
sw_dev: bool
|
||||
|
||||
def is_fresh(self, version: int) -> bool:
|
||||
return (self.sw_dev or (self.sw >= version))
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class UnitFlags:
|
||||
changing_busy: bool
|
||||
flashing_busy: bool
|
||||
has_downlink: bool
|
||||
has_hpd: bool
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class UnitQuirks:
|
||||
ignore_hpd: bool
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class UnitState(Unpackable): # pylint: disable=too-many-instance-attributes
|
||||
sw_version: int
|
||||
hw_version: int
|
||||
version: UnitVersion
|
||||
flags: UnitFlags
|
||||
ch: int
|
||||
beacons: tuple[bool, bool, bool, bool, bool, bool]
|
||||
@ -108,10 +125,12 @@ class UnitState(Unpackable): # pylint: disable=too-many-instance-attributes
|
||||
video_hpd: tuple[bool, bool, bool, bool, bool]
|
||||
video_edid: tuple[bool, bool, bool, bool]
|
||||
video_crc: tuple[int, int, int, int]
|
||||
video_dummies: tuple[bool, bool, bool, bool]
|
||||
usb_5v_sens: tuple[bool, bool, bool, bool]
|
||||
atx_busy: tuple[bool, bool, bool, bool]
|
||||
quirks: UnitQuirks
|
||||
|
||||
__struct = struct.Struct("<HHHBBHHHHHHBBBHHHHBxB30x")
|
||||
__struct = struct.Struct("<HHHBBHHHHHHBBBHHHHBxBBB28x")
|
||||
|
||||
def compare_edid(self, ch: int, edid: Optional["Edid"]) -> bool:
|
||||
if edid is None:
|
||||
@ -128,15 +147,19 @@ class UnitState(Unpackable): # pylint: disable=too-many-instance-attributes
|
||||
sw_version, hw_version, flags, ch,
|
||||
beacons, nc0, nc1, nc2, nc3, nc4, nc5,
|
||||
video_5v_sens, video_hpd, video_edid, vc0, vc1, vc2, vc3,
|
||||
usb_5v_sens, atx_busy,
|
||||
usb_5v_sens, atx_busy, quirks, video_dummies,
|
||||
) = cls.__struct.unpack_from(data, offset=offset)
|
||||
return UnitState(
|
||||
sw_version,
|
||||
hw_version,
|
||||
version=UnitVersion(
|
||||
hw=hw_version,
|
||||
sw=(sw_version & 0x7FFF),
|
||||
sw_dev=bool(sw_version & 0x8000),
|
||||
),
|
||||
flags=UnitFlags(
|
||||
changing_busy=bool(flags & 0x80),
|
||||
flashing_busy=bool(flags & 0x40),
|
||||
has_downlink=bool(flags & 0x02),
|
||||
has_hpd=bool(flags & 0x04),
|
||||
),
|
||||
ch=ch,
|
||||
beacons=cls.__make_flags6(beacons),
|
||||
@ -145,8 +168,10 @@ class UnitState(Unpackable): # pylint: disable=too-many-instance-attributes
|
||||
video_hpd=cls.__make_flags5(video_hpd),
|
||||
video_edid=cls.__make_flags4(video_edid),
|
||||
video_crc=(vc0, vc1, vc2, vc3),
|
||||
video_dummies=cls.__make_flags4(video_dummies),
|
||||
usb_5v_sens=cls.__make_flags4(usb_5v_sens),
|
||||
atx_busy=cls.__make_flags4(atx_busy),
|
||||
quirks=UnitQuirks(ignore_hpd=bool(quirks & 0x01)),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@ -251,6 +276,18 @@ class BodyClearEdid(Packable):
|
||||
return self.ch.to_bytes()
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class BodySetDummy(Packable):
|
||||
ch: int
|
||||
on: bool
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
assert 0 <= self.ch <= 3
|
||||
|
||||
def pack(self) -> bytes:
|
||||
return self.ch.to_bytes() + self.on.to_bytes()
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class BodySetColors(Packable):
|
||||
ch: int
|
||||
@ -263,6 +300,14 @@ class BodySetColors(Packable):
|
||||
return self.ch.to_bytes() + self.colors.pack()
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class BodySetQuirks(Packable):
|
||||
ignore_hpd: bool
|
||||
|
||||
def pack(self) -> bytes:
|
||||
return self.ignore_hpd.to_bytes()
|
||||
|
||||
|
||||
# =====
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Request:
|
||||
|
||||
@ -27,6 +27,7 @@ import time
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from .types import Edids
|
||||
from .types import Dummies
|
||||
from .types import Color
|
||||
from .types import Colors
|
||||
from .types import PortNames
|
||||
@ -48,8 +49,8 @@ class _UnitInfo:
|
||||
|
||||
|
||||
# =====
|
||||
class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
__FW_VERSION = 5
|
||||
class StateCache: # pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
__FW_VERSION = 8
|
||||
|
||||
__FULL = 0xFFFF
|
||||
__SUMMARY = 0x01
|
||||
@ -62,6 +63,7 @@ class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.__edids = Edids()
|
||||
self.__dummies = Dummies({})
|
||||
self.__colors = Colors()
|
||||
self.__port_names = PortNames({})
|
||||
self.__atx_cp_delays = AtxClickPowerDelays({})
|
||||
@ -77,6 +79,9 @@ class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
def get_edids(self) -> Edids:
|
||||
return self.__edids.copy()
|
||||
|
||||
def get_dummies(self) -> Dummies:
|
||||
return self.__dummies.copy()
|
||||
|
||||
def get_colors(self) -> Colors:
|
||||
return self.__colors
|
||||
|
||||
@ -158,7 +163,17 @@ class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
},
|
||||
}
|
||||
if x_summary:
|
||||
state["summary"] = {"active_port": self.__active_port, "synced": self.__synced}
|
||||
state["summary"] = {
|
||||
"active_port": self.__active_port,
|
||||
"active_id": (
|
||||
"" if self.__active_port < 0 else (
|
||||
f"{self.__active_port // 4 + 1}.{self.__active_port % 4 + 1}"
|
||||
if len(self.__units) > 1 else
|
||||
f"{self.__active_port + 1}"
|
||||
)
|
||||
),
|
||||
"synced": self.__synced,
|
||||
}
|
||||
if x_edids:
|
||||
state["edids"] = {
|
||||
"all": {
|
||||
@ -195,7 +210,10 @@ class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
assert ui.state is not None
|
||||
assert ui.atx_leds is not None
|
||||
if x_model:
|
||||
state["model"]["units"].append({"firmware": {"version": ui.state.sw_version}})
|
||||
state["model"]["units"].append({"firmware": {
|
||||
"version": ui.state.version.sw,
|
||||
"devbuild": ui.state.version.sw_dev,
|
||||
}})
|
||||
if x_video:
|
||||
state["video"]["links"].extend(ui.state.video_5v_sens[:4])
|
||||
if x_usb:
|
||||
@ -216,6 +234,7 @@ class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
"unit": unit,
|
||||
"channel": ch,
|
||||
"name": self.__port_names[port],
|
||||
"id": (f"{unit + 1}.{ch + 1}" if len(self.__units) > 1 else f"{ch + 1}"),
|
||||
"atx": {
|
||||
"click_delays": {
|
||||
"power": self.__atx_cp_delays[port],
|
||||
@ -223,6 +242,9 @@ class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
"reset": self.__atx_cr_delays[port],
|
||||
},
|
||||
},
|
||||
"video": {
|
||||
"dummy": self.__dummies[port],
|
||||
},
|
||||
})
|
||||
if x_edids:
|
||||
state["edids"]["used"].append(self.__edids.get_id_for_port(port))
|
||||
@ -324,6 +346,12 @@ class StateCache: # pylint: disable=too-many-instance-attributes
|
||||
if changed:
|
||||
self.__bump_state(self.__EDIDS)
|
||||
|
||||
def set_dummies(self, dummies: Dummies) -> None:
|
||||
changed = (not self.__dummies.compare_on_ports(dummies, self.__get_ports()))
|
||||
self.__dummies = dummies.copy()
|
||||
if changed:
|
||||
self.__bump_state(self.__FULL)
|
||||
|
||||
def set_colors(self, colors: Colors) -> None:
|
||||
changed = (self.__colors != colors)
|
||||
self.__colors = colors
|
||||
|
||||
@ -39,6 +39,7 @@ from .lib import get_logger
|
||||
|
||||
from .types import Edid
|
||||
from .types import Edids
|
||||
from .types import Dummies
|
||||
from .types import Color
|
||||
from .types import Colors
|
||||
from .types import PortNames
|
||||
@ -52,6 +53,8 @@ class StorageContext:
|
||||
__F_EDIDS_ALL = "edids_all.json"
|
||||
__F_EDIDS_PORT = "edids_port.json"
|
||||
|
||||
__F_DUMMIES = "dummies.json"
|
||||
|
||||
__F_COLORS = "colors.json"
|
||||
|
||||
__F_PORT_NAMES = "port_names.json"
|
||||
@ -74,6 +77,9 @@ class StorageContext:
|
||||
})
|
||||
await self.__write_json_keyvals(self.__F_EDIDS_PORT, edids.port)
|
||||
|
||||
async def write_dummies(self, dummies: Dummies) -> None:
|
||||
await self.__write_json_keyvals(self.__F_DUMMIES, dummies.kvs)
|
||||
|
||||
async def write_colors(self, colors: Colors) -> None:
|
||||
await self.__write_json_keyvals(self.__F_COLORS, {
|
||||
role: {
|
||||
@ -116,6 +122,10 @@ class StorageContext:
|
||||
port_edids = await self.__read_json_keyvals_int(self.__F_EDIDS_PORT)
|
||||
return Edids(all_edids, port_edids)
|
||||
|
||||
async def read_dummies(self) -> Dummies:
|
||||
kvs = await self.__read_json_keyvals_int(self.__F_DUMMIES)
|
||||
return Dummies({key: bool(value) for (key, value) in kvs.items()})
|
||||
|
||||
async def read_colors(self) -> Colors:
|
||||
raw = await self.__read_json_keyvals(self.__F_COLORS)
|
||||
return Colors(**{ # type: ignore
|
||||
|
||||
@ -59,31 +59,37 @@ class EdidInfo:
|
||||
except ParsedEdidNoBlockError:
|
||||
pass
|
||||
|
||||
audio: bool = False
|
||||
try:
|
||||
audio = parsed.get_audio()
|
||||
except ParsedEdidNoBlockError:
|
||||
pass
|
||||
|
||||
return EdidInfo(
|
||||
mfc_id=parsed.get_mfc_id(),
|
||||
product_id=parsed.get_product_id(),
|
||||
serial=parsed.get_serial(),
|
||||
monitor_name=monitor_name,
|
||||
monitor_serial=monitor_serial,
|
||||
audio=parsed.get_audio(),
|
||||
audio=audio,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Edid:
|
||||
name: str
|
||||
data: bytes
|
||||
crc: int = dataclasses.field(default=0)
|
||||
valid: bool = dataclasses.field(default=False)
|
||||
info: (EdidInfo | None) = dataclasses.field(default=None)
|
||||
|
||||
__HEADER = b"\x00\xFF\xFF\xFF\xFF\xFF\xFF\x00"
|
||||
name: str
|
||||
data: bytes
|
||||
crc: int = dataclasses.field(default=0)
|
||||
valid: bool = dataclasses.field(default=False)
|
||||
info: (EdidInfo | None) = dataclasses.field(default=None)
|
||||
_packed: bytes = dataclasses.field(default=b"")
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
assert len(self.name) > 0
|
||||
assert len(self.data) == 256
|
||||
object.__setattr__(self, "crc", bitbang.make_crc16(self.data))
|
||||
object.__setattr__(self, "valid", self.data.startswith(self.__HEADER))
|
||||
assert len(self.data) in [128, 256]
|
||||
object.__setattr__(self, "_packed", (self.data + (b"\x00" * 128))[:256])
|
||||
object.__setattr__(self, "crc", bitbang.make_crc16(self._packed)) # Calculate CRC for filled data
|
||||
object.__setattr__(self, "valid", ParsedEdid.is_header_valid(self.data))
|
||||
try:
|
||||
object.__setattr__(self, "info", EdidInfo.from_data(self.data))
|
||||
except Exception:
|
||||
@ -93,7 +99,7 @@ class Edid:
|
||||
return "".join(f"{item:0{2}X}" for item in self.data)
|
||||
|
||||
def pack(self) -> bytes:
|
||||
return self.data
|
||||
return self._packed
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, name: str, data: (str | bytes | None)) -> "Edid":
|
||||
@ -101,14 +107,14 @@ class Edid:
|
||||
return Edid(name, b"\x00" * 256)
|
||||
|
||||
if isinstance(data, bytes):
|
||||
if data.startswith(cls.__HEADER):
|
||||
if ParsedEdid.is_header_valid(cls.data):
|
||||
return Edid(name, data) # Бинарный едид
|
||||
data_hex = data.decode() # Текстовый едид, прочитанный как бинарный из файла
|
||||
else: # isinstance(data, str)
|
||||
data_hex = str(data) # Текстовый едид
|
||||
|
||||
data_hex = re.sub(r"\s", "", data_hex)
|
||||
assert len(data_hex) == 512
|
||||
assert len(data_hex) in [256, 512]
|
||||
data = bytes([
|
||||
int(data_hex[index:index + 2], 16)
|
||||
for index in range(0, len(data_hex), 2)
|
||||
@ -275,6 +281,19 @@ class _PortsDict(Generic[_T]):
|
||||
else:
|
||||
self.kvs[port] = value
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return (self.kvs == other.kvs)
|
||||
|
||||
|
||||
class Dummies(_PortsDict[bool]):
|
||||
def __init__(self, kvs: dict[int, bool]) -> None:
|
||||
super().__init__(True, kvs)
|
||||
|
||||
def copy(self) -> "Dummies":
|
||||
return Dummies(self.kvs)
|
||||
|
||||
|
||||
class PortNames(_PortsDict[str]):
|
||||
def __init__(self, kvs: dict[int, str]) -> None:
|
||||
|
||||
45
kvmd/apps/localhid/__init__.py
Normal file
45
kvmd/apps/localhid/__init__.py
Normal file
@ -0,0 +1,45 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2020 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
from ...clients.kvmd import KvmdClient
|
||||
|
||||
from ... import htclient
|
||||
|
||||
from .. import init
|
||||
|
||||
from .server import LocalHidServer
|
||||
|
||||
|
||||
# =====
|
||||
def main(argv: (list[str] | None)=None) -> None:
|
||||
config = init(
|
||||
prog="kvmd-localhid",
|
||||
description=" Local HID to KVMD proxy",
|
||||
check_run=True,
|
||||
argv=argv,
|
||||
)[2].localhid
|
||||
|
||||
user_agent = htclient.make_user_agent("KVMD-LocalHID")
|
||||
|
||||
LocalHidServer(
|
||||
kvmd=KvmdClient(user_agent=user_agent, **config.kvmd._unpack()),
|
||||
).run()
|
||||
24
kvmd/apps/localhid/__main__.py
Normal file
24
kvmd/apps/localhid/__main__.py
Normal file
@ -0,0 +1,24 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
from . import main
|
||||
main()
|
||||
152
kvmd/apps/localhid/hid.py
Normal file
152
kvmd/apps/localhid/hid.py
Normal file
@ -0,0 +1,152 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import asyncio
|
||||
|
||||
from typing import Final
|
||||
from typing import Generator
|
||||
|
||||
import evdev
|
||||
from evdev import ecodes
|
||||
|
||||
|
||||
# =====
|
||||
class Hid: # pylint: disable=too-many-instance-attributes
|
||||
KEY: Final[int] = 0
|
||||
MOUSE_BUTTON: Final[int] = 1
|
||||
MOUSE_REL: Final[int] = 2
|
||||
MOUSE_WHEEL: Final[int] = 3
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
self.__device = evdev.InputDevice(path)
|
||||
|
||||
caps = self.__device.capabilities(absinfo=False)
|
||||
|
||||
syns = caps.get(ecodes.EV_SYN, [])
|
||||
self.__has_syn = (ecodes.SYN_REPORT in syns)
|
||||
|
||||
leds = caps.get(ecodes.EV_LED, [])
|
||||
self.__has_caps = (ecodes.LED_CAPSL in leds)
|
||||
self.__has_scroll = (ecodes.LED_SCROLLL in leds)
|
||||
self.__has_num = (ecodes.LED_NUML in leds)
|
||||
|
||||
keys = caps.get(ecodes.EV_KEY, [])
|
||||
self.__has_keyboard = (
|
||||
ecodes.KEY_LEFTCTRL in keys
|
||||
or ecodes.KEY_RIGHTCTRL in keys
|
||||
or ecodes.KEY_LEFTSHIFT in keys
|
||||
or ecodes.KEY_RIGHTSHIFT in keys
|
||||
)
|
||||
|
||||
rels = caps.get(ecodes.EV_REL, [])
|
||||
self.__has_mouse_rel = (
|
||||
ecodes.BTN_LEFT in keys
|
||||
and ecodes.REL_X in rels
|
||||
)
|
||||
|
||||
self.__grabbed = False
|
||||
|
||||
def is_suitable(self) -> bool:
|
||||
return (self.__has_keyboard or self.__has_mouse_rel)
|
||||
|
||||
def set_leds(self, caps: bool, scroll: bool, num: bool) -> None:
|
||||
if self.__grabbed:
|
||||
if self.__has_caps:
|
||||
self.__device.set_led(ecodes.LED_CAPSL, caps)
|
||||
if self.__has_scroll:
|
||||
self.__device.set_led(ecodes.LED_SCROLLL, scroll)
|
||||
if self.__has_num:
|
||||
self.__device.set_led(ecodes.LED_NUML, num)
|
||||
|
||||
def set_grabbed(self, grabbed: bool) -> None:
|
||||
if self.__grabbed != grabbed:
|
||||
getattr(self.__device, ("grab" if grabbed else "ungrab"))()
|
||||
self.__grabbed = grabbed
|
||||
|
||||
def close(self) -> None:
|
||||
try:
|
||||
self.__device.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async def poll_to_queue(self, queue: asyncio.Queue[tuple[int, tuple]]) -> None:
|
||||
def put(event: int, args: tuple) -> None:
|
||||
queue.put_nowait((event, args))
|
||||
|
||||
move_x = move_y = 0
|
||||
wheel_x = wheel_y = 0
|
||||
async for event in self.__device.async_read_loop():
|
||||
if not self.__grabbed:
|
||||
# Клавиши перехватываются всегда для обработки хоткеев,
|
||||
# всё остальное пропускается для экономии ресурсов.
|
||||
if event.type == ecodes.EV_KEY and event.value != 2 and (event.code in ecodes.KEY):
|
||||
put(self.KEY, (event.code, bool(event.value)))
|
||||
continue
|
||||
|
||||
if event.type == ecodes.EV_REL:
|
||||
match event.code:
|
||||
case ecodes.REL_X:
|
||||
move_x += event.value
|
||||
case ecodes.REL_Y:
|
||||
move_y += event.value
|
||||
case ecodes.REL_HWHEEL:
|
||||
wheel_x += event.value
|
||||
case ecodes.REL_WHEEL:
|
||||
wheel_y += event.value
|
||||
|
||||
if not self.__has_syn or event.type == ecodes.SYN_REPORT:
|
||||
if move_x or move_y:
|
||||
for xy in self.__splitted_deltas(move_x, move_y):
|
||||
put(self.MOUSE_REL, xy)
|
||||
move_x = move_y = 0
|
||||
if wheel_x or wheel_y:
|
||||
for xy in self.__splitted_deltas(wheel_x, wheel_y):
|
||||
put(self.MOUSE_WHEEL, xy)
|
||||
wheel_x = wheel_y = 0
|
||||
|
||||
elif event.type == ecodes.EV_KEY and event.value != 2:
|
||||
if event.code in ecodes.KEY:
|
||||
put(self.KEY, (event.code, bool(event.value)))
|
||||
elif event.code in ecodes.BTN:
|
||||
put(self.MOUSE_BUTTON, (event.code, bool(event.value)))
|
||||
|
||||
def __splitted_deltas(self, delta_x: int, delta_y: int) -> Generator[tuple[int, int], None, None]:
|
||||
sign_x = (-1 if delta_x < 0 else 1)
|
||||
sign_y = (-1 if delta_y < 0 else 1)
|
||||
delta_x = abs(delta_x)
|
||||
delta_y = abs(delta_y)
|
||||
while delta_x > 0 or delta_y > 0:
|
||||
dx = sign_x * max(min(delta_x, 127), 0)
|
||||
dy = sign_y * max(min(delta_y, 127), 0)
|
||||
yield (dx, dy)
|
||||
delta_x -= 127
|
||||
delta_y -= 127
|
||||
|
||||
def __str__(self) -> str:
|
||||
info: list[str] = []
|
||||
if self.__has_syn:
|
||||
info.append("syn")
|
||||
if self.__has_keyboard:
|
||||
info.append("keyboard")
|
||||
if self.__has_mouse_rel:
|
||||
info.append("mouse_rel")
|
||||
return f"Hid({self.__device.path!r}, {self.__device.name!r}, {self.__device.phys!r}, {', '.join(info)})"
|
||||
178
kvmd/apps/localhid/multi.py
Normal file
178
kvmd/apps/localhid/multi.py
Normal file
@ -0,0 +1,178 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2020 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import errno
|
||||
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import pyudev
|
||||
|
||||
from ...logging import get_logger
|
||||
|
||||
from ... import aiotools
|
||||
|
||||
from .hid import Hid
|
||||
|
||||
|
||||
# =====
|
||||
def _udev_check(device: pyudev.Device) -> str:
|
||||
props = device.properties
|
||||
if props.get("ID_INPUT") == "1":
|
||||
path = props.get("DEVNAME")
|
||||
if isinstance(path, str) and path.startswith("/dev/input/event"):
|
||||
return path
|
||||
return ""
|
||||
|
||||
|
||||
async def _follow_udev_hids() -> AsyncGenerator[tuple[bool, str], None]:
|
||||
ctx = pyudev.Context()
|
||||
|
||||
monitor = pyudev.Monitor.from_netlink(pyudev.Context())
|
||||
monitor.filter_by(subsystem="input")
|
||||
monitor.start()
|
||||
fd = monitor.fileno()
|
||||
|
||||
read_event = asyncio.Event()
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.add_reader(fd, read_event.set)
|
||||
|
||||
try:
|
||||
for device in ctx.list_devices(subsystem="input"):
|
||||
path = _udev_check(device)
|
||||
if path:
|
||||
yield (True, path)
|
||||
|
||||
while True:
|
||||
await read_event.wait()
|
||||
while True:
|
||||
device = monitor.poll(0)
|
||||
if device is None:
|
||||
read_event.clear()
|
||||
break
|
||||
path = _udev_check(device)
|
||||
if path:
|
||||
if device.action == "add":
|
||||
yield (True, path)
|
||||
elif device.action == "remove":
|
||||
yield (False, path)
|
||||
finally:
|
||||
loop.remove_reader(fd)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class _Worker:
|
||||
task: asyncio.Task
|
||||
hid: (Hid | None)
|
||||
|
||||
|
||||
class MultiHid:
|
||||
def __init__(self, queue: asyncio.Queue[tuple[int, tuple]]) -> None:
|
||||
self.__queue = queue
|
||||
self.__workers: dict[str, _Worker] = {}
|
||||
self.__grabbed = True
|
||||
self.__leds = (False, False, False)
|
||||
|
||||
async def run(self) -> None:
|
||||
logger = get_logger(0)
|
||||
logger.info("Starting UDEV loop ...")
|
||||
try:
|
||||
async for (added, path) in _follow_udev_hids():
|
||||
if added:
|
||||
await self.__add_worker(path)
|
||||
else:
|
||||
await self.__remove_worker(path)
|
||||
finally:
|
||||
logger.info("Cleanup ...")
|
||||
await aiotools.shield_fg(self.__cleanup())
|
||||
|
||||
async def __cleanup(self) -> None:
|
||||
for path in list(self.__workers):
|
||||
await self.__remove_worker(path)
|
||||
|
||||
async def __add_worker(self, path: str) -> None:
|
||||
if path in self.__workers:
|
||||
await self.__remove_worker(path)
|
||||
self.__workers[path] = _Worker(asyncio.create_task(self.__worker_task_loop(path)), None)
|
||||
|
||||
async def __remove_worker(self, path: str) -> None:
|
||||
if path not in self.__workers:
|
||||
return
|
||||
try:
|
||||
worker = self.__workers[path]
|
||||
worker.task.cancel()
|
||||
await asyncio.gather(worker.task, return_exceptions=True)
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
self.__workers.pop(path, None)
|
||||
|
||||
async def __worker_task_loop(self, path: str) -> None:
|
||||
logger = get_logger(0)
|
||||
while True:
|
||||
hid: (Hid | None) = None
|
||||
try:
|
||||
hid = Hid(path)
|
||||
if not hid.is_suitable():
|
||||
break
|
||||
logger.info("Opened: %s", hid)
|
||||
if self.__grabbed:
|
||||
hid.set_grabbed(True)
|
||||
hid.set_leds(*self.__leds)
|
||||
self.__workers[path].hid = hid
|
||||
await hid.poll_to_queue(self.__queue)
|
||||
except Exception as ex:
|
||||
if isinstance(ex, OSError) and ex.errno == errno.ENODEV: # pylint: disable=no-member
|
||||
logger.info("Closed: %s", hid)
|
||||
break
|
||||
logger.exception("Unhandled exception while polling %s", hid)
|
||||
await asyncio.sleep(5)
|
||||
finally:
|
||||
self.__workers[path].hid = None
|
||||
if hid:
|
||||
hid.close()
|
||||
|
||||
def is_grabbed(self) -> bool:
|
||||
return self.__grabbed
|
||||
|
||||
async def set_grabbed(self, grabbed: bool) -> None:
|
||||
await aiotools.run_async(self.__inner_set_grabbed, grabbed)
|
||||
|
||||
def __inner_set_grabbed(self, grabbed: bool) -> None:
|
||||
if self.__grabbed != grabbed:
|
||||
get_logger(0).info("Grabbing ..." if grabbed else "Ungrabbing ...")
|
||||
self.__grabbed = grabbed
|
||||
for worker in self.__workers.values():
|
||||
if worker.hid:
|
||||
worker.hid.set_grabbed(grabbed)
|
||||
self.__inner_set_leds(*self.__leds)
|
||||
|
||||
async def set_leds(self, caps: bool, scroll: bool, num: bool) -> None:
|
||||
await aiotools.run_async(self.__inner_set_leds, caps, scroll, num)
|
||||
|
||||
def __inner_set_leds(self, caps: bool, scroll: bool, num: bool) -> None:
|
||||
self.__leds = (caps, scroll, num)
|
||||
if self.__grabbed:
|
||||
for worker in self.__workers.values():
|
||||
if worker.hid:
|
||||
worker.hid.set_leds(*self.__leds)
|
||||
192
kvmd/apps/localhid/server.py
Normal file
192
kvmd/apps/localhid/server.py
Normal file
@ -0,0 +1,192 @@
|
||||
# ========================================================================== #
|
||||
# #
|
||||
# KVMD - The main PiKVM daemon. #
|
||||
# #
|
||||
# Copyright (C) 2020 Maxim Devaev <mdevaev@gmail.com> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
# the Free Software Foundation, either version 3 of the License, or #
|
||||
# (at your option) any later version. #
|
||||
# #
|
||||
# This program is distributed in the hope that it will be useful, #
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
|
||||
# GNU General Public License for more details. #
|
||||
# #
|
||||
# You should have received a copy of the GNU General Public License #
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
|
||||
# #
|
||||
# ========================================================================== #
|
||||
|
||||
|
||||
import asyncio
|
||||
import errno
|
||||
|
||||
from typing import Callable
|
||||
from typing import Coroutine
|
||||
|
||||
import aiohttp
|
||||
import async_lru
|
||||
|
||||
from evdev import ecodes
|
||||
|
||||
from ...logging import get_logger
|
||||
|
||||
from ... import tools
|
||||
from ... import aiotools
|
||||
|
||||
from ...keyboard.magic import MagicHandler
|
||||
|
||||
from ...clients.kvmd import KvmdClient
|
||||
from ...clients.kvmd import KvmdClientSession
|
||||
from ...clients.kvmd import KvmdClientWs
|
||||
|
||||
from .hid import Hid
|
||||
from .multi import MultiHid
|
||||
|
||||
|
||||
# =====
|
||||
class LocalHidServer: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, kvmd: KvmdClient) -> None:
|
||||
self.__kvmd = kvmd
|
||||
|
||||
self.__kvmd_session: (KvmdClientSession | None) = None
|
||||
self.__kvmd_ws: (KvmdClientWs | None) = None
|
||||
|
||||
self.__queue: asyncio.Queue[tuple[int, tuple]] = asyncio.Queue()
|
||||
self.__hid = MultiHid(self.__queue)
|
||||
|
||||
self.__info_switch_units = 0
|
||||
self.__info_switch_active = ""
|
||||
self.__info_mouse_absolute = True
|
||||
self.__info_mouse_outputs: list[str] = []
|
||||
|
||||
self.__magic = MagicHandler(
|
||||
proxy_handler=self.__on_magic_key_proxy,
|
||||
key_handlers={
|
||||
ecodes.KEY_H: self.__on_magic_grab,
|
||||
ecodes.KEY_K: self.__on_magic_ungrab,
|
||||
ecodes.KEY_UP: self.__on_magic_switch_prev,
|
||||
ecodes.KEY_LEFT: self.__on_magic_switch_prev,
|
||||
ecodes.KEY_DOWN: self.__on_magic_switch_next,
|
||||
ecodes.KEY_RIGHT: self.__on_magic_switch_next,
|
||||
},
|
||||
numeric_handler=self.__on_magic_switch_port,
|
||||
)
|
||||
|
||||
def run(self) -> None:
|
||||
try:
|
||||
aiotools.run(self.__inner_run())
|
||||
finally:
|
||||
get_logger(0).info("Bye-bye")
|
||||
|
||||
async def __inner_run(self) -> None:
|
||||
await aiotools.spawn_and_follow(
|
||||
self.__create_loop(self.__hid.run),
|
||||
self.__create_loop(self.__queue_worker),
|
||||
self.__create_loop(self.__api_worker),
|
||||
)
|
||||
|
||||
async def __create_loop(self, func: Callable[[], Coroutine]) -> None:
|
||||
while True:
|
||||
try:
|
||||
await func()
|
||||
except Exception as ex:
|
||||
if isinstance(ex, OSError) and ex.errno == errno.ENODEV: # pylint: disable=no-member
|
||||
pass # Device disconnected
|
||||
elif isinstance(ex, aiohttp.ClientError):
|
||||
get_logger(0).error("KVMD client error: %s", tools.efmt(ex))
|
||||
else:
|
||||
get_logger(0).exception("Unhandled exception in the loop: %s", func)
|
||||
await asyncio.sleep(5)
|
||||
|
||||
async def __queue_worker(self) -> None:
|
||||
while True:
|
||||
(event, args) = await self.__queue.get()
|
||||
if event == Hid.KEY:
|
||||
await self.__magic.handle_key(*args)
|
||||
continue
|
||||
elif self.__hid.is_grabbed() and self.__kvmd_session and self.__kvmd_ws:
|
||||
match event:
|
||||
case Hid.MOUSE_BUTTON:
|
||||
await self.__kvmd_ws.send_mouse_button_event(*args)
|
||||
case Hid.MOUSE_REL:
|
||||
await self.__ensure_mouse_relative()
|
||||
await self.__kvmd_ws.send_mouse_relative_event(*args)
|
||||
case Hid.MOUSE_WHEEL:
|
||||
await self.__kvmd_ws.send_mouse_wheel_event(*args)
|
||||
|
||||
async def __api_worker(self) -> None:
|
||||
logger = get_logger(0)
|
||||
async with self.__kvmd.make_session() as session:
|
||||
async with session.ws(stream=False) as ws:
|
||||
logger.info("KVMD session opened")
|
||||
self.__kvmd_session = session
|
||||
self.__kvmd_ws = ws
|
||||
try:
|
||||
async for (event_type, event) in ws.communicate():
|
||||
if event_type == "hid":
|
||||
if "leds" in event.get("keyboard", {}):
|
||||
await self.__hid.set_leds(**event["keyboard"]["leds"])
|
||||
if "absolute" in event.get("mouse", {}):
|
||||
self.__info_mouse_outputs = event["mouse"]["outputs"]["available"]
|
||||
self.__info_mouse_absolute = event["mouse"]["absolute"]
|
||||
elif event_type == "switch":
|
||||
if "model" in event:
|
||||
self.__info_switch_units = len(event["model"]["units"])
|
||||
if "summary" in event:
|
||||
self.__info_switch_active = event["summary"]["active_id"]
|
||||
finally:
|
||||
logger.info("KVMD session closed")
|
||||
self.__kvmd_session = None
|
||||
self.__kvmd_ws = None
|
||||
|
||||
# =====
|
||||
|
||||
async def __ensure_mouse_relative(self) -> None:
|
||||
if self.__info_mouse_absolute:
|
||||
# Avoid unnecessary LRU checks, just to speed up a bit
|
||||
await self.__inner_ensure_mouse_relative()
|
||||
|
||||
@async_lru.alru_cache(maxsize=1, ttl=1)
|
||||
async def __inner_ensure_mouse_relative(self) -> None:
|
||||
if self.__kvmd_session and self.__info_mouse_absolute:
|
||||
for output in ["usb_rel", "ps2"]:
|
||||
if output in self.__info_mouse_outputs:
|
||||
await self.__kvmd_session.hid.set_params(mouse_output=output)
|
||||
|
||||
async def __on_magic_key_proxy(self, key: int, state: bool) -> None:
|
||||
if self.__hid.is_grabbed() and self.__kvmd_ws:
|
||||
await self.__kvmd_ws.send_key_event(key, state)
|
||||
|
||||
async def __on_magic_grab(self) -> None:
|
||||
await self.__hid.set_grabbed(True)
|
||||
|
||||
async def __on_magic_ungrab(self) -> None:
|
||||
await self.__hid.set_grabbed(False)
|
||||
|
||||
async def __on_magic_switch_prev(self) -> None:
|
||||
if self.__kvmd_session and self.__info_switch_units > 0:
|
||||
get_logger(0).info("Switching port to the previous one ...")
|
||||
await self.__kvmd_session.switch.set_active_prev()
|
||||
|
||||
async def __on_magic_switch_next(self) -> None:
|
||||
if self.__kvmd_session and self.__info_switch_units > 0:
|
||||
get_logger(0).info("Switching port to the next one ...")
|
||||
await self.__kvmd_session.switch.set_active_next()
|
||||
|
||||
async def __on_magic_switch_port(self, codes: list[int]) -> bool:
|
||||
assert len(codes) > 0
|
||||
if self.__info_switch_units <= 0:
|
||||
return True
|
||||
elif 1 <= self.__info_switch_units <= 2:
|
||||
port = float(codes[0])
|
||||
else: # self.__info_switch_units > 2:
|
||||
if len(codes) == 1:
|
||||
return False # Wait for the second key
|
||||
port = (codes[0] + 1) + (codes[1] + 1) / 10
|
||||
if self.__kvmd_session:
|
||||
get_logger(0).info("Switching port to %s ...", port)
|
||||
await self.__kvmd_session.switch.set_active(port)
|
||||
return True
|
||||
@ -52,6 +52,9 @@ class _Source:
|
||||
clients: dict[WsSession, "_Client"] = dataclasses.field(default_factory=dict)
|
||||
key_required: bool = dataclasses.field(default=False)
|
||||
|
||||
def is_diff(self) -> bool:
|
||||
return StreamerFormats.is_diff(self.streamer.get_format())
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class _Client:
|
||||
@ -98,6 +101,14 @@ class MediaServer(HttpServer):
|
||||
async def __ws_bin_ping_handler(self, ws: WsSession, _: bytes) -> None:
|
||||
await ws.send_bin(255, b"") # Ping-pong
|
||||
|
||||
@exposed_ws(1)
|
||||
async def __ws_bin_key_handler(self, ws: WsSession, _: bytes) -> None:
|
||||
for src in self.__srcs:
|
||||
if ws in src.clients:
|
||||
if src.is_diff():
|
||||
src.key_required = True
|
||||
break
|
||||
|
||||
@exposed_ws("start")
|
||||
async def __ws_start_handler(self, ws: WsSession, event: dict) -> None:
|
||||
try:
|
||||
@ -145,7 +156,7 @@ class MediaServer(HttpServer):
|
||||
# =====
|
||||
|
||||
async def __sender(self, client: _Client) -> None:
|
||||
need_key = StreamerFormats.is_diff(client.src.streamer.get_format())
|
||||
need_key = client.src.is_diff()
|
||||
if need_key:
|
||||
client.src.key_required = True
|
||||
has_key = False
|
||||
|
||||
@ -50,8 +50,12 @@ def main(argv: (list[str] | None)=None) -> None:
|
||||
template = in_file.read()
|
||||
|
||||
rendered = mako.template.Template(template).render(
|
||||
http_ipv4=config.nginx.http.ipv4,
|
||||
http_ipv6=config.nginx.http.ipv6,
|
||||
http_port=config.nginx.http.port,
|
||||
https_enabled=config.nginx.https.enabled,
|
||||
https_ipv4=config.nginx.https.ipv4,
|
||||
https_ipv6=config.nginx.https.ipv6,
|
||||
https_port=config.nginx.https.port,
|
||||
ipv6_enabled=network.is_ipv6_enabled(),
|
||||
)
|
||||
|
||||
@ -78,6 +78,7 @@ def main() -> None: # pylint: disable=too-many-locals,too-many-branches,too-man
|
||||
parser.add_argument("--image", default="", type=(lambda arg: _get_data_path("pics", arg)), help="Display some image, wait a single interval and exit")
|
||||
parser.add_argument("--text", default="", help="Display some text, wait a single interval and exit")
|
||||
parser.add_argument("--pipe", action="store_true", help="Read and display lines from stdin until EOF, wait a single interval and exit")
|
||||
parser.add_argument("--fill", action="store_true", help="Fill the display with 0xFF")
|
||||
parser.add_argument("--clear-on-exit", action="store_true", help="Clear display on exit")
|
||||
parser.add_argument("--contrast", default=64, type=int, help="Set OLED contrast, values from 0 to 255")
|
||||
parser.add_argument("--fahrenheit", action="store_true", help="Display temperature in Fahrenheit instead of Celsius")
|
||||
@ -121,6 +122,9 @@ def main() -> None: # pylint: disable=too-many-locals,too-many-branches,too-man
|
||||
text = ""
|
||||
time.sleep(options.interval)
|
||||
|
||||
elif options.fill:
|
||||
screen.draw_white()
|
||||
|
||||
else:
|
||||
stop_reason: (str | None) = None
|
||||
|
||||
|
||||
@ -52,3 +52,7 @@ class Screen:
|
||||
def draw_image(self, image_path: str) -> None:
|
||||
with luma_canvas(self.__device) as draw:
|
||||
draw.bitmap(self.__offset, Image.open(image_path).convert("1"), fill="white")
|
||||
|
||||
def draw_white(self) -> None:
|
||||
with luma_canvas(self.__device) as draw:
|
||||
draw.rectangle((0, 0, self.__device.width, self.__device.height), fill="white")
|
||||
|
||||
@ -201,8 +201,8 @@ class _GadgetConfig:
|
||||
rw: bool,
|
||||
removable: bool,
|
||||
fua: bool,
|
||||
inquiry_string_cdrom: str,
|
||||
inquiry_string_flash: str,
|
||||
_inquiry_string_cdrom: str,
|
||||
_inquiry_string_flash: str,
|
||||
) -> None:
|
||||
|
||||
# Endpoints number depends on transport_type but we can consider that this is 2
|
||||
@ -216,8 +216,8 @@ class _GadgetConfig:
|
||||
_write(join(func_path, "lun.0/ro"), int(not rw))
|
||||
_write(join(func_path, "lun.0/removable"), int(removable))
|
||||
_write(join(func_path, "lun.0/nofua"), int(not fua))
|
||||
#_write(join(func_path, "lun.0/inquiry_string_cdrom"), inquiry_string_cdrom)
|
||||
#_write(join(func_path, "lun.0/inquiry_string"), inquiry_string_flash)
|
||||
# _write(join(func_path, "lun.0/inquiry_string_cdrom"), inquiry_string_cdrom)
|
||||
# _write(join(func_path, "lun.0/inquiry_string"), inquiry_string_flash)
|
||||
if user != "root":
|
||||
_chown(join(func_path, "lun.0/cdrom"), user)
|
||||
_chown(join(func_path, "lun.0/ro"), user)
|
||||
@ -291,8 +291,9 @@ def _cmd_start(config: Section) -> None: # pylint: disable=too-many-statements,
|
||||
|
||||
profile_path = join(gadget_path, usb.G_PROFILE)
|
||||
_mkdir(profile_path)
|
||||
_mkdir(join(profile_path, "strings/0x409"))
|
||||
_write(join(profile_path, "strings/0x409/configuration"), f"Config 1: {config.otg.config}")
|
||||
if config.otg.config:
|
||||
_mkdir(join(profile_path, "strings/0x409"))
|
||||
_write(join(profile_path, "strings/0x409/configuration"), config.otg.config)
|
||||
_write(join(profile_path, "MaxPower"), config.otg.max_power)
|
||||
if config.otg.remote_wakeup:
|
||||
# XXX: Should we use MaxPower=100 with Remote Wakeup?
|
||||
@ -316,8 +317,8 @@ def _cmd_start(config: Section) -> None: # pylint: disable=too-many-statements,
|
||||
gc.add_msd(
|
||||
start=cod.msd.start,
|
||||
user=config.otg.user,
|
||||
inquiry_string_cdrom=usb.make_inquiry_string(**cod.msd.default.inquiry_string.cdrom._unpack()),
|
||||
inquiry_string_flash=usb.make_inquiry_string(**cod.msd.default.inquiry_string.flash._unpack()),
|
||||
_inquiry_string_cdrom=usb.make_inquiry_string(**cod.msd.default.inquiry_string.cdrom._unpack()),
|
||||
_inquiry_string_flash=usb.make_inquiry_string(**cod.msd.default.inquiry_string.flash._unpack()),
|
||||
**cod.msd.default._unpack(ignore="inquiry_string"),
|
||||
)
|
||||
if cod.drives.enabled:
|
||||
@ -326,8 +327,8 @@ def _cmd_start(config: Section) -> None: # pylint: disable=too-many-statements,
|
||||
gc.add_msd(
|
||||
start=cod.drives.start,
|
||||
user="root",
|
||||
inquiry_string_cdrom=usb.make_inquiry_string(**cod.drives.default.inquiry_string.cdrom._unpack()),
|
||||
inquiry_string_flash=usb.make_inquiry_string(**cod.drives.default.inquiry_string.flash._unpack()),
|
||||
_inquiry_string_cdrom=usb.make_inquiry_string(**cod.drives.default.inquiry_string.cdrom._unpack()),
|
||||
_inquiry_string_flash=usb.make_inquiry_string(**cod.drives.default.inquiry_string.flash._unpack()),
|
||||
**cod.drives.default._unpack(ignore="inquiry_string"),
|
||||
)
|
||||
|
||||
|
||||
@ -23,6 +23,7 @@
|
||||
|
||||
import errno
|
||||
import argparse
|
||||
import os
|
||||
|
||||
from ...validators.basic import valid_bool
|
||||
from ...validators.basic import valid_int_f0
|
||||
@ -37,6 +38,7 @@ from .. import init
|
||||
def _has_param(gadget: str, instance: int, param: str) -> bool:
|
||||
return os.access(_get_param_path(gadget, instance, param), os.F_OK)
|
||||
|
||||
|
||||
def _get_param_path(gadget: str, instance: int, param: str) -> str:
|
||||
return usb.get_gadget_path(gadget, usb.G_FUNCTIONS, f"mass_storage.usb{instance}/lun.0", param)
|
||||
|
||||
|
||||
@ -45,6 +45,7 @@ from .netctl import IptablesAllowIcmpCtl
|
||||
from .netctl import IptablesAllowPortCtl
|
||||
from .netctl import IptablesForwardOut
|
||||
from .netctl import IptablesForwardIn
|
||||
from .netctl import SysctlIpv4ForwardCtl
|
||||
from .netctl import CustomCtl
|
||||
|
||||
|
||||
@ -63,14 +64,16 @@ class _Netcfg: # pylint: disable=too-many-instance-attributes
|
||||
|
||||
class _Service: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, config: Section) -> None:
|
||||
self.__ip_cmd: list[str] = config.otgnet.commands.ip_cmd
|
||||
self.__iptables_cmd: list[str] = config.otgnet.commands.iptables_cmd
|
||||
self.__sysctl_cmd: list[str] = config.otgnet.commands.sysctl_cmd
|
||||
|
||||
self.__iface_net: str = config.otgnet.iface.net
|
||||
self.__ip_cmd: list[str] = config.otgnet.iface.ip_cmd
|
||||
|
||||
self.__allow_icmp: bool = config.otgnet.firewall.allow_icmp
|
||||
self.__allow_tcp: list[int] = sorted(set(config.otgnet.firewall.allow_tcp))
|
||||
self.__allow_udp: list[int] = sorted(set(config.otgnet.firewall.allow_udp))
|
||||
self.__forward_iface: str = config.otgnet.firewall.forward_iface
|
||||
self.__iptables_cmd: list[str] = config.otgnet.firewall.iptables_cmd
|
||||
|
||||
def build_cmd(key: str) -> list[str]:
|
||||
return tools.build_cmd(
|
||||
@ -115,6 +118,7 @@ class _Service: # pylint: disable=too-many-instance-attributes
|
||||
*([IptablesForwardIn(self.__iptables_cmd, netcfg.iface)] if self.__forward_iface else []),
|
||||
IptablesDropAllCtl(self.__iptables_cmd, netcfg.iface),
|
||||
IfaceAddIpCtl(self.__ip_cmd, netcfg.iface, f"{netcfg.iface_ip}/{netcfg.net_prefix}"),
|
||||
*([SysctlIpv4ForwardCtl(self.__sysctl_cmd)] if self.__forward_iface else []),
|
||||
CustomCtl(self.__post_start_cmd, self.__pre_stop_cmd, placeholders),
|
||||
]
|
||||
if direct:
|
||||
@ -130,6 +134,8 @@ class _Service: # pylint: disable=too-many-instance-attributes
|
||||
async def __run_ctl(self, ctl: BaseCtl, direct: bool) -> bool:
|
||||
logger = get_logger()
|
||||
cmd = ctl.get_command(direct)
|
||||
if not cmd:
|
||||
return True
|
||||
logger.info("CMD: %s", tools.cmdfmt(cmd))
|
||||
try:
|
||||
return (not (await aioproc.log_process(cmd, logger)).returncode)
|
||||
|
||||
@ -121,6 +121,16 @@ class IptablesForwardIn(BaseCtl):
|
||||
]
|
||||
|
||||
|
||||
class SysctlIpv4ForwardCtl(BaseCtl):
|
||||
def __init__(self, base_cmd: list[str]) -> None:
|
||||
self.__base_cmd = base_cmd
|
||||
|
||||
def get_command(self, direct: bool) -> list[str]:
|
||||
if direct:
|
||||
return [*self.__base_cmd, "net.ipv4.ip_forward=1"]
|
||||
return [] # Don't revert the command because some services can require it too
|
||||
|
||||
|
||||
class CustomCtl(BaseCtl):
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@ -66,22 +66,22 @@ async def _run_process(cmd: list[str], data_path: str) -> asyncio.subprocess.Pro
|
||||
|
||||
async def _run_cmd_ws(cmd: list[str], ws: aiohttp.ClientWebSocketResponse) -> int: # pylint: disable=too-many-branches
|
||||
logger = get_logger(0)
|
||||
receive_task: (asyncio.Task | None) = None
|
||||
recv_task: (asyncio.Task | None) = None
|
||||
proc_task: (asyncio.Task | None) = None
|
||||
proc: (asyncio.subprocess.Process | None) = None # pylint: disable=no-member
|
||||
|
||||
try: # pylint: disable=too-many-nested-blocks
|
||||
while True:
|
||||
if receive_task is None:
|
||||
receive_task = asyncio.create_task(ws.receive())
|
||||
if recv_task is None:
|
||||
recv_task = asyncio.create_task(ws.receive())
|
||||
if proc_task is None and proc is not None:
|
||||
proc_task = asyncio.create_task(proc.wait())
|
||||
|
||||
tasks = list(filter(None, [receive_task, proc_task]))
|
||||
tasks = list(filter(None, [recv_task, proc_task]))
|
||||
done = (await aiotools.wait_first(*tasks))[0]
|
||||
|
||||
if receive_task in done:
|
||||
msg = receive_task.result()
|
||||
if recv_task in done:
|
||||
msg = recv_task.result()
|
||||
if msg.type == aiohttp.WSMsgType.TEXT:
|
||||
(event_type, event) = htserver.parse_ws_event(msg.data)
|
||||
if event_type == "storage":
|
||||
@ -98,15 +98,15 @@ async def _run_cmd_ws(cmd: list[str], ws: aiohttp.ClientWebSocketResponse) -> in
|
||||
else:
|
||||
logger.error("Unknown PST message type: %r", msg)
|
||||
break
|
||||
receive_task = None
|
||||
recv_task = None
|
||||
|
||||
if proc_task in done:
|
||||
break
|
||||
except Exception:
|
||||
logger.exception("Unhandled exception")
|
||||
|
||||
if receive_task is not None:
|
||||
receive_task.cancel()
|
||||
if recv_task is not None:
|
||||
recv_task.cancel()
|
||||
if proc_task is not None:
|
||||
proc_task.cancel()
|
||||
if proc is not None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user