Compare commits

..

No commits in common. "main" and "1.5.1" have entirely different histories.
main ... 1.5.1

17 changed files with 398 additions and 252 deletions

View File

@ -2,7 +2,7 @@ Package: fastapi-dls
Version: 0.0 Version: 0.0
Architecture: all Architecture: all
Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de
Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-josepy, python3-sqlalchemy, python3-cryptography, python3-markdown, uvicorn, openssl Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-josepy, python3-sqlalchemy, python3-pycryptodome, python3-markdown, uvicorn, openssl
Recommends: curl Recommends: curl
Installed-Size: 10240 Installed-Size: 10240
Homepage: https://git.collinwebdesigns.de/oscar.krause/fastapi-dls Homepage: https://git.collinwebdesigns.de/oscar.krause/fastapi-dls

View File

@ -1,8 +1,8 @@
# https://packages.debian.org/hu/ # https://packages.debian.org/hu/
fastapi==0.92.0 fastapi==0.92.0
uvicorn[standard]==0.17.6 uvicorn[standard]==0.17.6
python-jose[cryptography]==3.3.0 python-jose[pycryptodome]==3.3.0
cryptography==38.0.4 pycryptodome==3.11.0
python-dateutil==2.8.2 python-dateutil==2.8.2
sqlalchemy==1.4.46 sqlalchemy==1.4.46
markdown==3.4.1 markdown==3.4.1

View File

@ -1,8 +1,8 @@
# https://packages.ubuntu.com # https://packages.ubuntu.com
fastapi==0.101.0 fastapi==0.101.0
uvicorn[standard]==0.27.1 uvicorn[standard]==0.27.1
python-jose[cryptography]==3.3.0 python-jose[pycryptodome]==3.3.0
cryptography==41.0.7 pycryptodome==3.20.0
python-dateutil==2.8.2 python-dateutil==2.8.2
sqlalchemy==1.4.50 sqlalchemy==1.4.50
markdown==3.5.2 markdown==3.5.2

View File

@ -1,8 +1,8 @@
# https://packages.ubuntu.com # https://packages.ubuntu.com
fastapi==0.110.3 fastapi==0.110.3
uvicorn[standard]==0.30.3 uvicorn[standard]==0.30.3
python-jose[cryptography]==3.3.0 python-jose[pycryptodome]==3.3.0
cryptography==42.0.5 pycryptodome==3.20.0
python-dateutil==2.9.0 python-dateutil==2.9.0
sqlalchemy==2.0.32 sqlalchemy==2.0.32
markdown==3.6 markdown==3.6

View File

@ -8,7 +8,7 @@ pkgdesc='NVIDIA DLS server implementation with FastAPI'
arch=('any') arch=('any')
url='https://git.collinwebdesigns.de/oscar.krause/fastapi-dls' url='https://git.collinwebdesigns.de/oscar.krause/fastapi-dls'
license=('MIT') license=('MIT')
depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastapi' 'python-dotenv' 'python-dateutil' 'python-sqlalchemy' 'python-cryptography' 'uvicorn' 'python-markdown' 'openssl') depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastapi' 'python-dotenv' 'python-dateutil' 'python-sqlalchemy' 'python-pycryptodome' 'uvicorn' 'python-markdown' 'openssl')
provider=("$pkgname") provider=("$pkgname")
install="$pkgname.install" install="$pkgname.install"
backup=('etc/default/fastapi-dls') backup=('etc/default/fastapi-dls')
@ -39,7 +39,7 @@ check() {
package() { package() {
install -d "$pkgdir/usr/share/doc/$pkgname" install -d "$pkgdir/usr/share/doc/$pkgname"
install -d "$pkgdir/var/lib/$pkgname/cert" install -d "$pkgdir/var/lib/$pkgname/cert"
#cp -r "$srcdir/$pkgname/doc"/* "$pkgdir/usr/share/doc/$pkgname/" cp -r "$srcdir/$pkgname/doc"/* "$pkgdir/usr/share/doc/$pkgname/"
install -Dm644 "$srcdir/$pkgname/README.md" "$pkgdir/usr/share/doc/$pkgname/README.md" install -Dm644 "$srcdir/$pkgname/README.md" "$pkgdir/usr/share/doc/$pkgname/README.md"
install -Dm644 "$srcdir/$pkgname/version.env" "$pkgdir/usr/share/doc/$pkgname/version.env" install -Dm644 "$srcdir/$pkgname/version.env" "$pkgdir/usr/share/doc/$pkgname/version.env"

2
.gitignore vendored
View File

@ -1,6 +1,6 @@
.DS_Store .DS_Store
venv/ venv/
.idea/ .idea/
*.sqlite app/*.sqlite*
app/cert/*.* app/cert/*.*
.pytest_cache .pytest_cache

View File

@ -16,12 +16,12 @@ build:docker:
interruptible: true interruptible: true
stage: build stage: build
rules: rules:
# deployment is in "deploy:docker:" - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes: changes:
- app/**/* - app/**/*
- Dockerfile - Dockerfile
- requirements.txt - requirements.txt
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
tags: [ docker ] tags: [ docker ]
before_script: before_script:
- docker buildx inspect - docker buildx inspect
@ -44,13 +44,16 @@ build:apt:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
variables: variables:
VERSION: $CI_COMMIT_REF_NAME VERSION: $CI_COMMIT_REF_NAME
- if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes: changes:
- app/**/* - app/**/*
- .DEBIAN/**/* - .DEBIAN/**/*
- .gitlab-ci.yml - .gitlab-ci.yml
variables: variables:
VERSION: "0.0.1" VERSION: "0.0.1"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
variables:
VERSION: "0.0.1"
before_script: before_script:
- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env - echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
# install build dependencies # install build dependencies
@ -91,13 +94,16 @@ build:pacman:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
variables: variables:
VERSION: $CI_COMMIT_REF_NAME VERSION: $CI_COMMIT_REF_NAME
- if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes: changes:
- app/**/* - app/**/*
- .PKGBUILD/**/* - .PKGBUILD/**/*
- .gitlab-ci.yml - .gitlab-ci.yml
variables: variables:
VERSION: "0.0.1" VERSION: "0.0.1"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
variables:
VERSION: "0.0.1"
before_script: before_script:
#- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env #- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
# install build dependencies # install build dependencies
@ -120,12 +126,13 @@ build:pacman:
paths: paths:
- "*.pkg.tar.zst" - "*.pkg.tar.zst"
test:python: test:
image: $IMAGE image: python:3.12-slim-bookworm
stage: test stage: test
interruptible: true interruptible: true
rules: rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes: changes:
@ -135,21 +142,18 @@ test:python:
DATABASE: sqlite:///../app/db.sqlite DATABASE: sqlite:///../app/db.sqlite
parallel: parallel:
matrix: matrix:
- IMAGE: - REQUIREMENTS:
# https://devguide.python.org/versions/#supported-versions - 'requirements.txt'
# - python:3.14-rc-alpine # EOL 2030-10 => uvicorn does not support 3.14 yet # - '.DEBIAN/requirements-bookworm-12.txt'
- python:3.13-alpine # EOL 2029-10 # - '.DEBIAN/requirements-ubuntu-24.04.txt'
- python:3.12-alpine # EOL 2028-10 # - '.DEBIAN/requirements-ubuntu-24.10.txt'
- python:3.11-alpine # EOL 2027-10
# - python:3.10-alpine # EOL 2026-10 => ImportError: cannot import name 'UTC' from 'datetime'
# - python:3.9-alpine # EOL 2025-10 => ImportError: cannot import name 'UTC' from 'datetime'
before_script: before_script:
- apk --no-cache add openssl - apt-get update && apt-get install -y python3-dev python3-pip python3-venv gcc
- python3 -m venv venv - python3 -m venv venv
- source venv/bin/activate - source venv/bin/activate
- pip install --upgrade pip - pip install --upgrade pip
- pip install -r requirements.txt - pip install -r $REQUIREMENTS
- pip install pytest pytest-cov pytest-custom_exit_code httpx - pip install pytest httpx
- mkdir -p app/cert - mkdir -p app/cert
- openssl genrsa -out app/cert/instance.private.pem 2048 - openssl genrsa -out app/cert/instance.private.pem 2048
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem - openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
@ -158,26 +162,17 @@ test:python:
- python -m pytest main.py --junitxml=report.xml - python -m pytest main.py --junitxml=report.xml
artifacts: artifacts:
reports: reports:
dotenv: version.env
junit: ['**/report.xml'] junit: ['**/report.xml']
test:apt: .test:apt:
image: $IMAGE
stage: test stage: test
rules: rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes: changes:
- app/**/* - app/**/*
- .DEBIAN/**/* - .DEBIAN/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
parallel:
matrix:
- IMAGE:
- debian:trixie-slim # EOL: t.b.a.
- debian:bookworm-slim # EOL: June 06, 2026
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
needs: needs:
- job: build:apt - job: build:apt
artifacts: true artifacts: true
@ -209,15 +204,24 @@ test:apt:
- apt-get purge -qq -y fastapi-dls - apt-get purge -qq -y fastapi-dls
- apt-get autoremove -qq -y && apt-get clean -qq - apt-get autoremove -qq -y && apt-get clean -qq
test:apt:
extends: .test:apt
image: $IMAGE
parallel:
matrix:
- IMAGE:
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
test:pacman:archlinux: test:pacman:archlinux:
image: archlinux:base image: archlinux:base
rules: rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes: changes:
- app/**/* - app/**/*
- .PKGBUILD/**/* - .PKGBUILD/**/*
- .gitlab-ci.yml - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
needs: needs:
- job: build:pacman - job: build:pacman
artifacts: true artifacts: true
@ -261,19 +265,19 @@ test_coverage:
before_script: before_script:
- apt-get update && apt-get install -y python3-dev gcc - apt-get update && apt-get install -y python3-dev gcc
- pip install -r requirements.txt - pip install -r requirements.txt
- pip install pytest pytest-cov pytest-custom_exit_code httpx - pip install pytest httpx
- mkdir -p app/cert - mkdir -p app/cert
- openssl genrsa -out app/cert/instance.private.pem 2048 - openssl genrsa -out app/cert/instance.private.pem 2048
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem - openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
- cd test - cd test
script: script:
- coverage run -m pytest main.py --junitxml=report.xml --suppress-no-test-exit-code - pip install pytest pytest-cov
- coverage run -m pytest main.py
- coverage report - coverage report
- coverage xml - coverage xml
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/' coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
artifacts: artifacts:
reports: reports:
junit: [ '**/report.xml' ]
coverage_report: coverage_report:
coverage_format: cobertura coverage_format: cobertura
path: '**/coverage.xml' path: '**/coverage.xml'
@ -292,12 +296,15 @@ gemnasium-python-dependency_scanning:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
.deploy:
rules:
- if: $CI_COMMIT_TAG
deploy:docker: deploy:docker:
extends: .deploy
image: docker:dind image: docker:dind
stage: deploy stage: deploy
tags: [ docker ] tags: [ docker ]
rules:
- if: $CI_COMMIT_TAG
before_script: before_script:
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME" - echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
- docker buildx inspect - docker buildx inspect
@ -316,10 +323,9 @@ deploy:docker:
deploy:apt: deploy:apt:
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package # doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
extends: .deploy
image: debian:bookworm-slim image: debian:bookworm-slim
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs: needs:
- job: build:apt - job: build:apt
artifacts: true artifacts: true
@ -356,10 +362,9 @@ deploy:apt:
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"' - 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
deploy:pacman: deploy:pacman:
extends: .deploy
image: archlinux:base-devel image: archlinux:base-devel
stage: deploy stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs: needs:
- job: build:pacman - job: build:pacman
artifacts: true artifacts: true
@ -380,7 +385,7 @@ deploy:pacman:
release: release:
image: registry.gitlab.com/gitlab-org/release-cli:latest image: registry.gitlab.com/gitlab-org/release-cli:latest
stage: .post stage: .post
needs: [ deploy:docker, deploy:apt, deploy:pacman ] needs: [ test ]
rules: rules:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
script: script:

17
FAQ.md Normal file
View File

@ -0,0 +1,17 @@
# FAQ
## `Failed to acquire license from <ip> (Info: <license> - Error: The allowed time to process response has expired)`
- Did your timezone settings are correct on fastapi-dls **and your guest**?
- Did you download the client-token more than an hour ago?
Please download a new client-token. The guest have to register within an hour after client-token was created.
## `jose.exceptions.JWTError: Signature verification failed.`
- Did you recreated `instance.public.pem` / `instance.private.pem`?
Then you have to download a **new** client-token on each of your guests.

View File

@ -2,12 +2,8 @@
Minimal Delegated License Service (DLS). Minimal Delegated License Service (DLS).
> [!note] Compatibility Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1, 3.4.0. For Driver compatibility
> Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1, 3.4.0. For Driver compatibility see [compatibility matrix](#vgpu-software-compatibility-matrix).
> see [compatibility matrix](#vgpu-software-compatibility-matrix).
> [!warning] 18.x Drivers are not yet supported!
> Drivers are only supported until **17.x releases**.
This service can be used without internet connection. This service can be used without internet connection.
Only the clients need a connection to this service on configured port. Only the clients need a connection to this service on configured port.
@ -86,7 +82,7 @@ docker run -e DLS_URL=`hostname -i` -e DLS_PORT=443 -p 443:443 -v $WORKING_DIR:/
See [`examples`](examples) directory for more advanced examples (with reverse proxy usage). See [`examples`](examples) directory for more advanced examples (with reverse proxy usage).
> Adjust `REQUIRED` variables as needed > Adjust *REQUIRED* variables as needed
```yaml ```yaml
version: '3.9' version: '3.9'
@ -333,14 +329,14 @@ Packages are available here:
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages) - [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages)
Successful tested with (**LTS Version**): Successful tested with:
- **Debian 12 (Bookworm)** (EOL: June 06, 2026) - **Debian 12 (Bookworm)** (EOL: June 06, 2026)
- *Ubuntu 22.10 (Kinetic Kudu)* (EOL: July 20, 2023) - *Ubuntu 22.10 (Kinetic Kudu)* (EOL: July 20, 2023)
- *Ubuntu 23.04 (Lunar Lobster)* (EOL: January 2024) - *Ubuntu 23.04 (Lunar Lobster)* (EOL: January 2024)
- *Ubuntu 23.10 (Mantic Minotaur)* (EOL: July 2024) - *Ubuntu 23.10 (Mantic Minotaur)* (EOL: July 2024)
- **Ubuntu 24.04 (Noble Numbat)** (EOL: Apr 2029) - **Ubuntu 24.04 (Noble Numbat)** (EOL: April 2036)
- *Ubuntu 24.10 (Oracular Oriole)* (EOL: Jul 2025) - *Ubuntu 24.10 (Oracular Oriole)* (EOL: tba.)
Not working with: Not working with:
@ -402,9 +398,6 @@ Continue [here](#unraid-guest) for docker guest setup.
Tanks to [@mrzenc](https://github.com/mrzenc) for [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos). Tanks to [@mrzenc](https://github.com/mrzenc) for [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos).
> [!note] Native NixOS-Package
> There is a [pull request](https://github.com/NixOS/nixpkgs/pull/358647) which adds fastapi-dls into nixpkgs.
## Let's Encrypt Certificate (optional) ## Let's Encrypt Certificate (optional)
If you're using installation via docker, you can use `traefik`. Please refer to their documentation. If you're using installation via docker, you can use `traefik`. Please refer to their documentation.
@ -606,21 +599,6 @@ Logs are available in `C:\Users\Public\Documents\Nvidia\LoggingLog.NVDisplay.Con
# Known Issues # Known Issues
## Generic
### `Failed to acquire license from <ip> (Info: <license> - Error: The allowed time to process response has expired)`
- Did your timezone settings are correct on fastapi-dls **and your guest**?
- Did you download the client-token more than an hour ago?
Please download a new client-token. The guest have to register within an hour after client-token was created.
### `jose.exceptions.JWTError: Signature verification failed.`
- Did you recreate `instance.public.pem` / `instance.private.pem`?
Then you have to download a **new** client-token on each of your guests.
## Linux ## Linux
### Invalid HTTP request ### Invalid HTTP request
@ -753,11 +731,6 @@ The error message can safely be ignored (since we have no license limitation :P)
# vGPU Software Compatibility Matrix # vGPU Software Compatibility Matrix
**18.x Drivers are not supported on FastAPI-DLS Versions < 1.6.0**
<details>
<summary>Show Table</summary>
Successfully tested with this package versions. Successfully tested with this package versions.
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date | | vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
@ -781,8 +754,6 @@ Successfully tested with this package versions.
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | December 2023 | | `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | December 2023 |
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 | | `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
</details>
- https://docs.nvidia.com/grid/index.html - https://docs.nvidia.com/grid/index.html
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html - https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
@ -795,13 +766,13 @@ Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to: Special thanks to:
- `samicrusader` who created build file for **ArchLinux** - @samicrusader who created build file for **ArchLinux**
- `cyrus` who wrote the section for **openSUSE** - @cyrus who wrote the section for **openSUSE**
- `midi` who wrote the section for **unRAID** - @midi who wrote the section for **unRAID**
- `polloloco` who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)* - @polloloco who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
- `DualCoder` who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock) - @DualCoder who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
- `Krutav Shah` who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/) - Krutav Shah who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
- `Wim van 't Hoog` for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/) - Wim van 't Hoog for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
- `mrzenc` who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos) - @mrzenc who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos)
And thanks to all people who contributed to all these libraries! And thanks to all people who contributed to all these libraries!

View File

@ -21,7 +21,7 @@ from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
from orm import Origin, Lease, init as db_init, migrate from orm import Origin, Lease, init as db_init, migrate
from util import PrivateKey, PublicKey, load_file from util import load_key, load_file
# Load variables # Load variables
load_dotenv('../version.env') load_dotenv('../version.env')
@ -42,8 +42,8 @@ DLS_PORT = int(env('DLS_PORT', '443'))
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000')) SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001')) INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001'))
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001'))
INSTANCE_KEY_RSA = PrivateKey.from_file(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem')))) INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
INSTANCE_KEY_PUB = PublicKey.from_file(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem')))) INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0))) TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0)))
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0))) LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15)) LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
@ -51,8 +51,8 @@ LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=in
CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12) CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12)
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}'] CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.pem(), algorithm=ALGORITHMS.RS256) jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.pem(), algorithm=ALGORITHMS.RS256) jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
# Logging # Logging
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
@ -264,10 +264,10 @@ async def _client_token():
}, },
"service_instance_public_key_configuration": { "service_instance_public_key_configuration": {
"service_instance_public_key_me": { "service_instance_public_key_me": {
"mod": hex(INSTANCE_KEY_PUB.raw().public_numbers().n)[2:], "mod": hex(INSTANCE_KEY_PUB.public_key().n)[2:],
"exp": int(INSTANCE_KEY_PUB.raw().public_numbers().e), "exp": int(INSTANCE_KEY_PUB.public_key().e),
}, },
"service_instance_public_key_pem": INSTANCE_KEY_PUB.pem().decode('utf-8'), "service_instance_public_key_pem": INSTANCE_KEY_PUB.export_key().decode('utf-8'),
"key_retention_mode": "LATEST_ONLY" "key_retention_mode": "LATEST_ONLY"
}, },
} }
@ -287,7 +287,7 @@ async def auth_v1_origin(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC) j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
origin_ref = j.get('candidate_origin_ref') origin_ref = j.get('candidate_origin_ref')
logger.info(f'> [ origin ]: {origin_ref}: {j}') logging.info(f'> [ origin ]: {origin_ref}: {j}')
data = Origin( data = Origin(
origin_ref=origin_ref, origin_ref=origin_ref,
@ -317,7 +317,7 @@ async def auth_v1_origin_update(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC) j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
origin_ref = j.get('origin_ref') origin_ref = j.get('origin_ref')
logger.info(f'> [ update ]: {origin_ref}: {j}') logging.info(f'> [ update ]: {origin_ref}: {j}')
data = Origin( data = Origin(
origin_ref=origin_ref, origin_ref=origin_ref,
@ -344,7 +344,7 @@ async def auth_v1_code(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC) j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
origin_ref = j.get('origin_ref') origin_ref = j.get('origin_ref')
logger.info(f'> [ code ]: {origin_ref}: {j}') logging.info(f'> [ code ]: {origin_ref}: {j}')
delta = relativedelta(minutes=15) delta = relativedelta(minutes=15)
expires = cur_time + delta expires = cur_time + delta
@ -381,7 +381,7 @@ async def auth_v1_token(request: Request):
return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)}) return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)})
origin_ref = payload.get('origin_ref') origin_ref = payload.get('origin_ref')
logger.info(f'> [ auth ]: {origin_ref}: {j}') logging.info(f'> [ auth ]: {origin_ref}: {j}')
# validate the code challenge # validate the code challenge
challenge = b64enc(sha256(j.get('code_verifier').encode('utf-8')).digest()).rstrip(b'=').decode('utf-8') challenge = b64enc(sha256(j.get('code_verifier').encode('utf-8')).digest()).rstrip(b'=').decode('utf-8')
@ -424,7 +424,7 @@ async def leasing_v1_lessor(request: Request):
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
scope_ref_list = j.get('scope_ref_list') scope_ref_list = j.get('scope_ref_list')
logger.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}') logging.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}')
lease_result_list = [] lease_result_list = []
for scope_ref in scope_ref_list: for scope_ref in scope_ref_list:
@ -468,7 +468,7 @@ async def leasing_v1_lessor_lease(request: Request):
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
active_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref))) active_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
logger.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases') logging.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases')
response = { response = {
"active_lease_list": active_lease_list, "active_lease_list": active_lease_list,
@ -486,7 +486,7 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
token, cur_time = __get_token(request), datetime.now(UTC) token, cur_time = __get_token(request), datetime.now(UTC)
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
logger.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}') logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
entity = Lease.find_by_origin_ref_and_lease_ref(db, origin_ref, lease_ref) entity = Lease.find_by_origin_ref_and_lease_ref(db, origin_ref, lease_ref)
if entity is None: if entity is None:
@ -513,7 +513,7 @@ async def leasing_v1_lease_delete(request: Request, lease_ref: str):
token, cur_time = __get_token(request), datetime.now(UTC) token, cur_time = __get_token(request), datetime.now(UTC)
origin_ref = token.get('origin_ref') origin_ref = token.get('origin_ref')
logger.info(f'> [ return ]: {origin_ref}: return {lease_ref}') logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
entity = Lease.find_by_lease_ref(db, lease_ref) entity = Lease.find_by_lease_ref(db, lease_ref)
if entity.origin_ref != origin_ref: if entity.origin_ref != origin_ref:
@ -542,7 +542,7 @@ async def leasing_v1_lessor_lease_remove(request: Request):
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref))) released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
deletions = Lease.cleanup(db, origin_ref) deletions = Lease.cleanup(db, origin_ref)
logger.info(f'> [ remove ]: {origin_ref}: removed {deletions} leases') logging.info(f'> [ remove ]: {origin_ref}: removed {deletions} leases')
response = { response = {
"released_lease_list": released_lease_list, "released_lease_list": released_lease_list,
@ -564,7 +564,7 @@ async def leasing_v1_lessor_shutdown(request: Request):
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref))) released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
deletions = Lease.cleanup(db, origin_ref) deletions = Lease.cleanup(db, origin_ref)
logger.info(f'> [ shutdown ]: {origin_ref}: removed {deletions} leases') logging.info(f'> [ shutdown ]: {origin_ref}: removed {deletions} leases')
response = { response = {
"released_lease_list": released_lease_list, "released_lease_list": released_lease_list,
@ -587,7 +587,7 @@ if __name__ == '__main__':
# #
### ###
logger.info(f'> Starting dev-server ...') logging.info(f'> Starting dev-server ...')
ssl_keyfile = join(dirname(__file__), 'cert/webserver.key') ssl_keyfile = join(dirname(__file__), 'cert/webserver.key')
ssl_certfile = join(dirname(__file__), 'cert/webserver.crt') ssl_certfile = join(dirname(__file__), 'cert/webserver.crt')

View File

@ -5,7 +5,7 @@ from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker, declarative_base from sqlalchemy.orm import sessionmaker, declarative_base
from util import DriverMatrix from util import NV
Base = declarative_base() Base = declarative_base()
@ -25,7 +25,7 @@ class Origin(Base):
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})' return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
def serialize(self) -> dict: def serialize(self) -> dict:
_ = DriverMatrix().find(self.guest_driver_version) _ = NV().find(self.guest_driver_version)
return { return {
'origin_ref': self.origin_ref, 'origin_ref': self.origin_ref,

View File

@ -1,9 +1,4 @@
import logging import logging
from json import load as json_load
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey, generate_private_key
from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key
logging.basicConfig() logging.basicConfig()
@ -16,101 +11,56 @@ def load_file(filename: str) -> bytes:
return content return content
class PrivateKey: def load_key(filename: str) -> "RsaKey":
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
def __init__(self, data: bytes):
self.__key = load_pem_private_key(data, password=None)
@staticmethod
def from_file(filename: str) -> "PrivateKey":
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
log.debug(f'Importing RSA-Private-Key from "{filename}"') log.debug(f'Importing RSA-Key from "{filename}"')
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
with open(filename, 'rb') as f:
data = f.read()
return PrivateKey(data=data.strip()) def generate_key() -> "RsaKey":
try:
def raw(self) -> RSAPrivateKey: # Crypto | Cryptodome on Debian
return self.__key from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
def pem(self) -> bytes: except ModuleNotFoundError:
return self.__key.private_bytes( from Cryptodome.PublicKey import RSA
encoding=serialization.Encoding.PEM, from Cryptodome.PublicKey.RSA import RsaKey
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
def public_key(self) -> "PublicKey":
data = self.__key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return PublicKey(data=data)
@staticmethod
def generate(public_exponent: int = 65537, key_size: int = 2048) -> "PrivateKey":
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
log.debug(f'Generating RSA-Key') log.debug(f'Generating RSA-Key')
key = generate_private_key(public_exponent=public_exponent, key_size=key_size) return RSA.generate(bits=2048)
data = key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
return PrivateKey(data=data)
class PublicKey: class NV:
def __init__(self, data: bytes):
self.__key = load_pem_public_key(data)
@staticmethod
def from_file(filename: str) -> "PublicKey":
log = logging.getLogger(__name__)
log.debug(f'Importing RSA-Public-Key from "{filename}"')
with open(filename, 'rb') as f:
data = f.read()
return PublicKey(data=data.strip())
def raw(self) -> RSAPublicKey:
return self.__key
def pem(self) -> bytes:
return self.__key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
class DriverMatrix:
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json' __DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions" __DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
def __init__(self): def __init__(self):
self.log = logging.getLogger(self.__class__.__name__) self.log = logging.getLogger(self.__class__.__name__)
if DriverMatrix.__DRIVER_MATRIX is None: if NV.__DRIVER_MATRIX is None:
self.__load() from json import load as json_load
def __load(self):
try: try:
file = open(DriverMatrix.__DRIVER_MATRIX_FILENAME) file = open(NV.__DRIVER_MATRIX_FILENAME)
DriverMatrix.__DRIVER_MATRIX = json_load(file) NV.__DRIVER_MATRIX = json_load(file)
file.close() file.close()
self.log.debug(f'Successfully loaded "{DriverMatrix.__DRIVER_MATRIX_FILENAME}".') self.log.debug(f'Successfully loaded "{NV.__DRIVER_MATRIX_FILENAME}".')
except Exception as e: except Exception as e:
DriverMatrix.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app NV.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}') # self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}')
@staticmethod @staticmethod
def find(version: str) -> dict | None: def find(version: str) -> dict | None:
if DriverMatrix.__DRIVER_MATRIX is None: if NV.__DRIVER_MATRIX is None:
return None return None
for idx, (key, branch) in enumerate(DriverMatrix.__DRIVER_MATRIX.items()): for idx, (key, branch) in enumerate(NV.__DRIVER_MATRIX.items()):
for release in branch.get('$releases'): for release in branch.get('$releases'):
linux_driver = release.get('Linux Driver') linux_driver = release.get('Linux Driver')
windows_driver = release.get('Windows Driver') windows_driver = release.get('Windows Driver')

26
doc/Database.md Normal file
View File

@ -0,0 +1,26 @@
# Database structure
## `request_routing.service_instance`
| xid | org_name |
|----------------------------------------|--------------------------|
| `10000000-0000-0000-0000-000000000000` | `lic-000000000000000000` |
- `xid` is used as `SERVICE_INSTANCE_XID`
## `request_routing.license_allotment_service_instance`
| xid | service_instance_xid | license_allotment_xid |
|----------------------------------------|----------------------------------------|----------------------------------------|
| `90000000-0000-0000-0000-000000000001` | `10000000-0000-0000-0000-000000000000` | `80000000-0000-0000-0000-000000000001` |
- `xid` is only a primary-key and never used as foreign-key or reference
- `license_allotment_xid` must be used to fetch `xid`'s from `request_routing.license_allotment_reference`
## `request_routing.license_allotment_reference`
| xid | license_allotment_xid |
|----------------------------------------|----------------------------------------|
| `20000000-0000-0000-0000-000000000001` | `80000000-0000-0000-0000-000000000001` |
- `xid` is used as `scope_ref_list` on token request

View File

@ -0,0 +1,177 @@
# Reverse Engineering Notes
# Usefully commands
## Check licensing status
- `nvidia-smi -q | grep "License"`
**Output**
```
vGPU Software Licensed Product
License Status : Licensed (Expiry: 2023-1-14 12:59:52 GMT)
```
## Track licensing progress
- NVIDIA Grid Log: `journalctl -u nvidia-gridd -f`
```
systemd[1]: Started NVIDIA Grid Daemon.
nvidia-gridd[2986]: Configuration parameter ( ServerAddress ) not set
nvidia-gridd[2986]: vGPU Software package (0)
nvidia-gridd[2986]: Ignore service provider and node-locked licensing
nvidia-gridd[2986]: NLS initialized
nvidia-gridd[2986]: Acquiring license. (Info: license.nvidia.space; NVIDIA RTX Virtual Workstation)
nvidia-gridd[2986]: License acquired successfully. (Info: license.nvidia.space, NVIDIA RTX Virtual Workstation; Expiry: 2023-1-29 22:3:0 GMT)
```
# DLS-Container File-System (Docker)
## Configuration data
Most variables and configs are stored in `/var/lib/docker/volumes/configurations/_data`.
Files can be modified with `docker cp <container-id>:/venv/... /opt/localfile/...` and back.
(May you need to fix permissions with `docker exec -u 0 <container-id> chown nonroot:nonroot /venv/...`)
## Dive / Docker image inspector
- `dive dls:appliance`
The source code is stored in `/venv/lib/python3.9/site-packages/nls_*`.
Image-Reference:
```
Tags: (unavailable)
Id: d1c7976a5d2b3681ff6c5a30f8187e4015187a83f3f285ba4a37a45458bd6b98
Digest: sha256:311223c5af7a298ec1104f5dc8c3019bfb0e1f77256dc3d995244ffb295a97
1f
Command:
#(nop) ADD file:c1900d3e3a29c29a743a8da86c437006ec5d2aa873fb24e48033b6bf492bb37b in /
```
## Private Key (Site-Key)
- `/etc/dls/config/decryptor/decryptor`
```shell
docker exec -it <container-id> /etc/dls/config/decryptor/decryptor > /tmp/private-key.pem
```
```
-----BEGIN RSA PRIVATE KEY-----
...
-----END RSA PRIVATE KEY-----
```
## Site Key Uri - `/etc/dls/config/site_key_uri.bin`
```
base64-content...
```
## DB Password - `/etc/dls/config/dls_db_password.bin`
```
base64-content...
```
**Decrypt database password**
```
cd /var/lib/docker/volumes/configurations/_data
cat dls_db_password.bin | base64 -d > dls_db_password.bin.raw
openssl rsautl -decrypt -inkey /tmp/private-key.pem -in dls_db_password.bin.raw
```
# Database
- It's enough to manipulate database licenses. There must not be changed any line of code to bypass licensing
validations.
# Logging / Stack Trace
- https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html#troubleshooting-dls-instance
**Failed licensing log**
```
{
"activity": 100,
"context": {
"SERVICE_INSTANCE_ID": "b43d6e46-d6d0-4943-8b8d-c66a5f6e0d38",
"SERVICE_INSTANCE_NAME": "DEFAULT_2022-12-14_12:48:30",
"description": "borrow failed: NotFoundError(no pool features found for: NVIDIA RTX Virtual Workstation)",
"event_type": null,
"function_name": "_evt",
"lineno": 54,
"module_name": "nls_dal_lease_dls.event",
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24",
"origin_ref": "3f7f5a50-a26b-425b-8d5e-157f63e72b1c",
"service_name": "nls_services_lease"
},
"detail": {
"oc": {
"license_allotment_xid": "10c4317f-7c4c-11ed-a524-0e4252a7e5f1",
"origin_ref": "3f7f5a50-a26b-425b-8d5e-157f63e72b1c",
"service_instance_xid": "b43d6e46-d6d0-4943-8b8d-c66a5f6e0d38"
},
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24"
},
"id": "0cc9e092-3b92-4652-8d9e-7622ef85dc79",
"metadata": {},
"ts": "2022-12-15T10:25:36.827661Z"
}
{
"activity": 400,
"context": {
"SERVICE_INSTANCE_ID": "b43d6e46-d6d0-4943-8b8d-c66a5f6e0d38",
"SERVICE_INSTANCE_NAME": "DEFAULT_2022-12-14_12:48:30",
"description": "lease_multi_create failed: no pool features found for: NVIDIA RTX Virtual Workstation",
"event_by": "system",
"function_name": "lease_multi_create",
"level": "warning",
"lineno": 157,
"module_name": "nls_services_lease.controllers.lease_multi_controller",
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24",
"service_name": "nls_services_lease"
},
"detail": {
"_msg": "lease_multi_create failed: no pool features found for: NVIDIA RTX Virtual Workstation",
"exec_info": ["NotFoundError", "NotFoundError(no pool features found for: NVIDIA RTX Virtual Workstation)", " File \"/venv/lib/python3.9/site-packages/nls_services_lease/controllers/lease_multi_controller.py\", line 127, in lease_multi_create\n data = _leaseMulti.lease_multi_create(event_args)\n File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 208, in lease_multi_create\n raise e\n File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 184, in lease_multi_create\n self._try_proposals(oc, mlr, results, detail)\n File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 219, in _try_proposals\n lease = self._leases.create(creator)\n File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 230, in create\n features = self._get_features(creator)\n File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 148, in _get_features\n self._explain_not_available(cur, creator)\n File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 299, in _explain_not_available\n raise NotFoundError(f'no pool features found for: {lcc.product_name}')\n"],
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24"
},
"id": "282801b9-d612-40a5-9145-b56d8e420dac",
"metadata": {},
"ts": "2022-12-15T10:25:36.831673Z"
}
```
**Stack Trace**
```
"NotFoundError", "NotFoundError(no pool features found for: NVIDIA RTX Virtual Workstation)", " File \"/venv/lib/python3.9/site-packages/nls_services_lease/controllers/lease_multi_controller.py\", line 127, in lease_multi_create
data = _leaseMulti.lease_multi_create(event_args)
File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 208, in lease_multi_create
raise e
File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 184, in lease_multi_create
self._try_proposals(oc, mlr, results, detail)
File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 219, in _try_proposals
lease = self._leases.create(creator)
File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 230, in create
features = self._get_features(creator)
File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 148, in _get_features
self._explain_not_available(cur, creator)
File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 299, in _explain_not_available
raise NotFoundError(f'no pool features found for: {lcc.product_name}')
"
```
# Nginx
- NGINX uses `/opt/certs/cert.pem` and `/opt/certs/key.pem`

View File

@ -1,8 +1,8 @@
fastapi==0.115.12 fastapi==0.115.6
uvicorn[standard]==0.34.1 uvicorn[standard]==0.34.0
python-jose[cryptography]==3.4.0 python-jose==3.3.0
cryptography==44.0.2 pycryptodome==3.21.0
python-dateutil==2.9.0 python-dateutil==2.8.2
sqlalchemy==2.0.40 sqlalchemy==2.0.37
markdown==3.8 markdown==3.7
python-dotenv==1.1.0 python-dotenv==1.0.1

View File

@ -6,7 +6,7 @@ logger.setLevel(logging.INFO)
URL = 'https://docs.nvidia.com/vgpu/index.html' URL = 'https://docs.nvidia.com/vgpu/index.html'
BRANCH_STATUS_KEY = 'vGPU Branch Status' BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch'
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch' VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver' LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver' WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
@ -26,15 +26,12 @@ def __driver_versions(html: 'BeautifulSoup'):
# find wrapper for "DriverVersions" and find tables # find wrapper for "DriverVersions" and find tables
data = html.find('div', {'id': 'driver-versions'}) data = html.find('div', {'id': 'driver-versions'})
items = data.find_all('bsp-accordion', {'class': 'Accordion-items-item'}) items = data.findAll('bsp-accordion', {'class': 'Accordion-items-item'})
for item in items: for item in items:
software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip() software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip()
software_branch = software_branch.replace(' Releases', '') software_branch = software_branch.replace(' Releases', '')
matrix_key = software_branch.lower() matrix_key = software_branch.lower()
branch_status = item.find('a', href=True, string='Branch status')
branch_status = branch_status.next_sibling.replace(':', '').strip()
# driver version info from table-heads (ths) and table-rows (trs) # driver version info from table-heads (ths) and table-rows (trs)
table = item.find('table') table = item.find('table')
ths, trs = table.find_all('th'), table.find_all('tr') ths, trs = table.find_all('th'), table.find_all('tr')
@ -45,20 +42,48 @@ def __driver_versions(html: 'BeautifulSoup'):
continue continue
# create dict with table-heads as key and cell content as value # create dict with table-heads as key and cell content as value
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)} x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
x.setdefault(BRANCH_STATUS_KEY, branch_status)
releases.append(x) releases.append(x)
# add to matrix # add to matrix
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}}) MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
def __release_branches(html: 'BeautifulSoup'):
# find wrapper for "AllReleaseBranches" and find table
data = html.find('div', {'id': 'all-release-branches'})
table = data.find('table')
# branch releases info from table-heads (ths) and table-rows (trs)
ths, trs = table.find_all('th'), table.find_all('tr')
headers = [header.text.strip() for header in ths]
for trs in trs:
tds = trs.find_all('td')
if len(tds) == 0: # skip empty
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: cell.text.strip() for i, cell in enumerate(tds)}
# get matrix_key
software_branch = x.get(SOFTWARE_BRANCH_KEY)
matrix_key = software_branch.lower()
# add to matrix
MATRIX.update({matrix_key: MATRIX.get(matrix_key) | x})
def __debug(): def __debug():
# print table head # print table head
s = f'{VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {BRANCH_STATUS_KEY:^21}' s = f'{SOFTWARE_BRANCH_KEY:^21} | {BRANCH_STATUS_KEY:^21} | {VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {EOL_KEY:>21}'
print(s) print(s)
# iterate over dict & format some variables to not overload table # iterate over dict & format some variables to not overload table
for idx, (key, branch) in enumerate(MATRIX.items()): for idx, (key, branch) in enumerate(MATRIX.items()):
branch_status = branch.get(BRANCH_STATUS_KEY)
branch_status = branch_status.replace('Branch ', '')
branch_status = branch_status.replace('Long-Term Support', 'LTS')
branch_status = branch_status.replace('Production', 'Prod.')
software_branch = branch.get(SOFTWARE_BRANCH_KEY).replace('NVIDIA ', '')
for release in branch.get(JSON_RELEASES_KEY): for release in branch.get(JSON_RELEASES_KEY):
version = release.get(VGPU_KEY, release.get(GRID_KEY, '')) version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, '')) linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
@ -67,25 +92,13 @@ def __debug():
windows_driver = release.get(WINDOWS_DRIVER_KEY) windows_driver = release.get(WINDOWS_DRIVER_KEY)
release_date = release.get(RELEASE_DATE_KEY) release_date = release.get(RELEASE_DATE_KEY)
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY) is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
branch_status = __parse_branch_status(release.get(BRANCH_STATUS_KEY, ''))
version = f'{version} *' if is_latest else version version = f'{version} *' if is_latest else version
s = f'{version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {branch_status:^21}' eol = branch.get(EOL_KEY) if is_latest else ''
s = f'{software_branch:^21} | {branch_status:^21} | {version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {eol:>21}'
print(s) print(s)
def __parse_branch_status(string: str) -> str:
string = string.replace('Production Branch', 'Prod. -')
string = string.replace('Long-Term Support Branch', 'LTS -')
string = string.replace('supported until', '')
string = string.replace('EOL since', 'EOL - ')
string = string.replace('EOL from', 'EOL -')
return string
def __dump(filename: str): def __dump(filename: str):
import json import json
@ -115,6 +128,7 @@ if __name__ == '__main__':
# build matrix # build matrix
__driver_versions(soup) __driver_versions(soup)
__release_branches(soup)
# debug output # debug output
__debug() __debug()

View File

@ -16,7 +16,7 @@ sys.path.append('../')
sys.path.append('../app') sys.path.append('../app')
from app import main from app import main
from util import PrivateKey, PublicKey from app.util import load_key
client = TestClient(main.app) client = TestClient(main.app)
@ -25,11 +25,11 @@ ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-00000
# INSTANCE_KEY_RSA = generate_key() # INSTANCE_KEY_RSA = generate_key()
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key() # INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
INSTANCE_KEY_RSA = PrivateKey.from_file(str(join(dirname(__file__), '../app/cert/instance.private.pem'))) INSTANCE_KEY_RSA = load_key(str(join(dirname(__file__), '../app/cert/instance.private.pem')))
INSTANCE_KEY_PUB = PublicKey.from_file(str(join(dirname(__file__), '../app/cert/instance.public.pem'))) INSTANCE_KEY_PUB = load_key(str(join(dirname(__file__), '../app/cert/instance.public.pem')))
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.pem(), algorithm=ALGORITHMS.RS256) jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.pem(), algorithm=ALGORITHMS.RS256) jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def __bearer_token(origin_ref: str) -> str: def __bearer_token(origin_ref: str) -> str:
@ -187,6 +187,8 @@ def test_leasing_v1_lessor():
assert len(lease_result_list[0]['lease']['ref']) == 36 assert len(lease_result_list[0]['lease']['ref']) == 36
assert str(UUID(lease_result_list[0]['lease']['ref'])) == lease_result_list[0]['lease']['ref'] assert str(UUID(lease_result_list[0]['lease']['ref'])) == lease_result_list[0]['lease']['ref']
return lease_result_list[0]['lease']['ref']
def test_leasing_v1_lessor_lease(): def test_leasing_v1_lessor_lease():
response = client.get('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)}) response = client.get('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
@ -229,23 +231,7 @@ def test_leasing_v1_lease_delete():
def test_leasing_v1_lessor_lease_remove(): def test_leasing_v1_lessor_lease_remove():
# see "test_leasing_v1_lessor()" lease_ref = test_leasing_v1_lessor()
payload = {
'fulfillment_context': {
'fulfillment_class_ref_list': []
},
'lease_proposal_list': [{
'license_type_qualifiers': {'count': 1},
'product': {'name': 'NVIDIA RTX Virtual Workstation'}
}],
'proposal_evaluation_mode': 'ALL_OF',
'scope_ref_list': [ALLOTMENT_REF]
}
response = client.post('/leasing/v1/lessor', json=payload, headers={'authorization': __bearer_token(ORIGIN_REF)})
lease_result_list = response.json().get('lease_result_list')
lease_ref = lease_result_list[0]['lease']['ref']
#
response = client.delete('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)}) response = client.delete('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
assert response.status_code == 200 assert response.status_code == 200