Merge branch 'main' into db

# Conflicts:
#	app/orm.py
This commit is contained in:
Oscar Krause 2025-04-16 14:55:27 +02:00
commit 522b0a123e
5 changed files with 97 additions and 109 deletions

View File

@ -16,12 +16,12 @@ build:docker:
interruptible: true
stage: build
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
# deployment is in "deploy:docker:"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
- app/**/*
- Dockerfile
- requirements.txt
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
tags: [ docker ]
before_script:
- docker buildx inspect
@ -44,16 +44,13 @@ build:apt:
- if: $CI_COMMIT_TAG
variables:
VERSION: $CI_COMMIT_REF_NAME
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH)
changes:
- app/**/*
- .DEBIAN/**/*
- .gitlab-ci.yml
variables:
VERSION: "0.0.1"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
variables:
VERSION: "0.0.1"
before_script:
- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
# install build dependencies
@ -94,16 +91,13 @@ build:pacman:
- if: $CI_COMMIT_TAG
variables:
VERSION: $CI_COMMIT_REF_NAME
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH)
changes:
- app/**/*
- .PKGBUILD/**/*
- .gitlab-ci.yml
variables:
VERSION: "0.0.1"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
variables:
VERSION: "0.0.1"
before_script:
#- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
# install build dependencies
@ -126,13 +120,12 @@ build:pacman:
paths:
- "*.pkg.tar.zst"
test:
image: python:3.12-slim-bookworm
test:python:
image: $IMAGE
stage: test
interruptible: true
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
@ -142,17 +135,20 @@ test:
DATABASE: sqlite:///../app/db.sqlite
parallel:
matrix:
- REQUIREMENTS:
- 'requirements.txt'
# - '.DEBIAN/requirements-bookworm-12.txt'
# - '.DEBIAN/requirements-ubuntu-24.04.txt'
# - '.DEBIAN/requirements-ubuntu-24.10.txt'
- IMAGE:
# https://devguide.python.org/versions/#supported-versions
# - python:3.14-rc-alpine # EOL 2030-10 => uvicorn does not support 3.14 yet
- python:3.13-alpine # EOL 2029-10
- python:3.12-alpine # EOL 2028-10
- python:3.11-alpine # EOL 2027-10
# - python:3.10-alpine # EOL 2026-10 => ImportError: cannot import name 'UTC' from 'datetime'
# - python:3.9-alpine # EOL 2025-10 => ImportError: cannot import name 'UTC' from 'datetime'
before_script:
- apt-get update && apt-get install -y python3-dev python3-pip python3-venv gcc
- apk --no-cache add openssl
- python3 -m venv venv
- source venv/bin/activate
- pip install --upgrade pip
- pip install -r $REQUIREMENTS
- pip install -r requirements.txt
- pip install pytest pytest-cov pytest-custom_exit_code httpx
- mkdir -p app/cert
- openssl genrsa -out app/cert/instance.private.pem 2048
@ -162,17 +158,26 @@ test:
- python -m pytest main.py --junitxml=report.xml
artifacts:
reports:
dotenv: version.env
junit: ['**/report.xml']
.test:apt:
test:apt:
image: $IMAGE
stage: test
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
- app/**/*
- .DEBIAN/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
parallel:
matrix:
- IMAGE:
- debian:trixie-slim # EOL: t.b.a.
- debian:bookworm-slim # EOL: June 06, 2026
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
needs:
- job: build:apt
artifacts: true
@ -204,24 +209,15 @@ test:
- apt-get purge -qq -y fastapi-dls
- apt-get autoremove -qq -y && apt-get clean -qq
test:apt:
extends: .test:apt
image: $IMAGE
parallel:
matrix:
- IMAGE:
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
test:pacman:archlinux:
image: archlinux:base
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
- app/**/*
- .PKGBUILD/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- .gitlab-ci.yml
needs:
- job: build:pacman
artifacts: true
@ -296,15 +292,12 @@ gemnasium-python-dependency_scanning:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
.deploy:
rules:
- if: $CI_COMMIT_TAG
deploy:docker:
extends: .deploy
image: docker:dind
stage: deploy
tags: [ docker ]
rules:
- if: $CI_COMMIT_TAG
before_script:
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
- docker buildx inspect
@ -323,9 +316,10 @@ deploy:docker:
deploy:apt:
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
extends: .deploy
image: debian:bookworm-slim
stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs:
- job: build:apt
artifacts: true
@ -362,9 +356,10 @@ deploy:apt:
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
deploy:pacman:
extends: .deploy
image: archlinux:base-devel
stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs:
- job: build:pacman
artifacts: true
@ -385,7 +380,7 @@ deploy:pacman:
release:
image: registry.gitlab.com/gitlab-org/release-cli:latest
stage: .post
needs: [ test ]
needs: [ build:docker, build:apt, build:pacman ]
rules:
- if: $CI_COMMIT_TAG
script:

View File

@ -2,7 +2,7 @@
Minimal Delegated License Service (DLS).
> [!note]
> [!note] Compatibility
> Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1, 3.4.0. For Driver compatibility
> see [compatibility matrix](#vgpu-software-compatibility-matrix).
@ -402,6 +402,9 @@ Continue [here](#unraid-guest) for docker guest setup.
Tanks to [@mrzenc](https://github.com/mrzenc) for [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos).
> [!note] Native NixOS-Package
> There is a [pull request](https://github.com/NixOS/nixpkgs/pull/358647) which adds fastapi-dls into nixpkgs.
## Let's Encrypt Certificate (optional)
If you're using installation via docker, you can use `traefik`. Please refer to their documentation.
@ -792,13 +795,13 @@ Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to:
- @samicrusader who created build file for **ArchLinux**
- @cyrus who wrote the section for **openSUSE**
- @midi who wrote the section for **unRAID**
- @polloloco who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
- @DualCoder who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
- Krutav Shah who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
- Wim van 't Hoog for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
- @mrzenc who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos)
- `samicrusader` who created build file for **ArchLinux**
- `cyrus` who wrote the section for **openSUSE**
- `midi` who wrote the section for **unRAID**
- `polloloco` who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
- `DualCoder` who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
- `Krutav Shah` who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
- `Wim van 't Hoog` for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
- `mrzenc` who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos)
And thanks to all people who contributed to all these libraries!

View File

@ -1,4 +1,5 @@
import logging
from json import load as json_load
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey, generate_private_key
@ -7,6 +8,14 @@ from cryptography.hazmat.primitives.serialization import load_pem_private_key, l
logging.basicConfig()
def load_file(filename: str) -> bytes:
log = logging.getLogger(f'{__name__}')
log.debug(f'Loading contents of file "{filename}')
with open(filename, 'rb') as file:
content = file.read()
return content
class PrivateKey:
def __init__(self, data: bytes):
@ -76,37 +85,32 @@ class PublicKey:
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
def load_file(filename: str) -> bytes:
log = logging.getLogger(f'{__name__}')
log.debug(f'Loading contents of file "{filename}')
with open(filename, 'rb') as file:
content = file.read()
return content
class NV:
class DriverMatrix:
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
def __init__(self):
self.log = logging.getLogger(self.__class__.__name__)
if NV.__DRIVER_MATRIX is None:
from json import load as json_load
if DriverMatrix.__DRIVER_MATRIX is None:
self.__load()
def __load(self):
try:
file = open(NV.__DRIVER_MATRIX_FILENAME)
NV.__DRIVER_MATRIX = json_load(file)
file = open(DriverMatrix.__DRIVER_MATRIX_FILENAME)
DriverMatrix.__DRIVER_MATRIX = json_load(file)
file.close()
self.log.debug(f'Successfully loaded "{NV.__DRIVER_MATRIX_FILENAME}".')
self.log.debug(f'Successfully loaded "{DriverMatrix.__DRIVER_MATRIX_FILENAME}".')
except Exception as e:
NV.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
DriverMatrix.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}')
@staticmethod
def find(version: str) -> dict | None:
if NV.__DRIVER_MATRIX is None:
if DriverMatrix.__DRIVER_MATRIX is None:
return None
for idx, (key, branch) in enumerate(NV.__DRIVER_MATRIX.items()):
for idx, (key, branch) in enumerate(DriverMatrix.__DRIVER_MATRIX.items()):
for release in branch.get('$releases'):
linux_driver = release.get('Linux Driver')
windows_driver = release.get('Windows Driver')

View File

@ -1,8 +1,8 @@
fastapi==0.115.12
uvicorn[standard]==0.34.0
uvicorn[standard]==0.34.1
python-jose[cryptography]==3.4.0
cryptography==44.0.2
python-dateutil==2.9.0
sqlalchemy==2.0.40
markdown==3.7
markdown==3.8
python-dotenv==1.1.0

View File

@ -6,7 +6,7 @@ logger.setLevel(logging.INFO)
URL = 'https://docs.nvidia.com/vgpu/index.html'
BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch'
BRANCH_STATUS_KEY = 'vGPU Branch Status'
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
@ -26,12 +26,15 @@ def __driver_versions(html: 'BeautifulSoup'):
# find wrapper for "DriverVersions" and find tables
data = html.find('div', {'id': 'driver-versions'})
items = data.findAll('bsp-accordion', {'class': 'Accordion-items-item'})
items = data.find_all('bsp-accordion', {'class': 'Accordion-items-item'})
for item in items:
software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip()
software_branch = software_branch.replace(' Releases', '')
matrix_key = software_branch.lower()
branch_status = item.find('a', href=True, string='Branch status')
branch_status = branch_status.next_sibling.replace(':', '').strip()
# driver version info from table-heads (ths) and table-rows (trs)
table = item.find('table')
ths, trs = table.find_all('th'), table.find_all('tr')
@ -42,48 +45,20 @@ def __driver_versions(html: 'BeautifulSoup'):
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
x.setdefault(BRANCH_STATUS_KEY, branch_status)
releases.append(x)
# add to matrix
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
def __release_branches(html: 'BeautifulSoup'):
# find wrapper for "AllReleaseBranches" and find table
data = html.find('div', {'id': 'all-release-branches'})
table = data.find('table')
# branch releases info from table-heads (ths) and table-rows (trs)
ths, trs = table.find_all('th'), table.find_all('tr')
headers = [header.text.strip() for header in ths]
for trs in trs:
tds = trs.find_all('td')
if len(tds) == 0: # skip empty
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: cell.text.strip() for i, cell in enumerate(tds)}
# get matrix_key
software_branch = x.get(SOFTWARE_BRANCH_KEY)
matrix_key = software_branch.lower()
# add to matrix
MATRIX.update({matrix_key: MATRIX.get(matrix_key) | x})
def __debug():
# print table head
s = f'{SOFTWARE_BRANCH_KEY:^21} | {BRANCH_STATUS_KEY:^21} | {VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {EOL_KEY:>21}'
s = f'{VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {BRANCH_STATUS_KEY:^21}'
print(s)
# iterate over dict & format some variables to not overload table
for idx, (key, branch) in enumerate(MATRIX.items()):
branch_status = branch.get(BRANCH_STATUS_KEY)
branch_status = branch_status.replace('Branch ', '')
branch_status = branch_status.replace('Long-Term Support', 'LTS')
branch_status = branch_status.replace('Production', 'Prod.')
software_branch = branch.get(SOFTWARE_BRANCH_KEY).replace('NVIDIA ', '')
for release in branch.get(JSON_RELEASES_KEY):
version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
@ -92,13 +67,25 @@ def __debug():
windows_driver = release.get(WINDOWS_DRIVER_KEY)
release_date = release.get(RELEASE_DATE_KEY)
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
branch_status = __parse_branch_status(release.get(BRANCH_STATUS_KEY, ''))
version = f'{version} *' if is_latest else version
eol = branch.get(EOL_KEY) if is_latest else ''
s = f'{software_branch:^21} | {branch_status:^21} | {version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {eol:>21}'
s = f'{version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {branch_status:^21}'
print(s)
def __parse_branch_status(string: str) -> str:
string = string.replace('Production Branch', 'Prod. -')
string = string.replace('Long-Term Support Branch', 'LTS -')
string = string.replace('supported until', '')
string = string.replace('EOL since', 'EOL - ')
string = string.replace('EOL from', 'EOL -')
return string
def __dump(filename: str):
import json
@ -128,7 +115,6 @@ if __name__ == '__main__':
# build matrix
__driver_versions(soup)
__release_branches(soup)
# debug output
__debug()