Compare commits

...

88 Commits

Author SHA1 Message Date
cd4674caad fixes 2024-06-21 19:35:42 +02:00
b0b627a3f0 Merge branch 'refs/heads/dev' into db
# Conflicts:
#	.gitlab-ci.yml
#	Dockerfile
#	README.md
#	app/main.py
#	app/orm.py
#	requirements.txt
2024-06-21 19:28:23 +02:00
c79455b84d added way to include driver version in api
use `create_driver_matrix_json.py` to generate file in `static/driver_matrix.json`. Logging currently is disabled to not confuse users when file is missing. This is optional!
2024-06-21 19:01:33 +02:00
35fc5ea6b0 set log format 2024-06-21 18:59:23 +02:00
c9ac915055 code styling & comments 2024-06-13 20:34:45 +02:00
8c5850beda migrated from deprecated "startup" to "lifespan" hook (fastapi) 2024-06-13 20:34:27 +02:00
0d9e814d0d show if debug is enabled on app startup 2024-06-13 20:18:33 +02:00
5438317fb7 rearranged imports 2024-06-13 20:18:18 +02:00
21f19be8ab code styling & improved logging 2024-06-13 20:16:51 +02:00
eff6aae25d code styling 2024-06-13 19:53:57 +02:00
6473655e57 import fixes 2024-06-13 19:24:46 +02:00
c45aa1a2a8 fixed [[__TOC__]] to [TOC] to support "markdown" package 2024-06-12 21:27:56 +02:00
1d0631417d added link to gpu support matrix 2024-06-12 19:44:37 +02:00
847d3589c5 typos 2024-06-07 08:42:41 +02:00
ca53a4e084 updated supported vGPU releases 2024-06-07 08:42:37 +02:00
ad3b622c23 requirements.txt updated 2024-05-10 10:09:28 +02:00
e51d6bd391 added Ubuntu 24.04 as supported 2024-05-10 09:16:20 +02:00
78c1978dd5 added test matrix for python3.12 2024-05-10 09:15:46 +02:00
4ebb4d790e added ubuntu-24.04 "requirements-ubuntu-24.04.txt" 2024-05-10 08:22:47 +02:00
11f1456538 test image matrix 2024-05-10 07:56:13 +02:00
be6797efc7 requirements.txt updated 2024-04-23 07:43:39 +02:00
9eb91cbe1a link to proxmox-installer.sh and credits 2024-04-18 07:07:57 +02:00
395884f643 credits & further reading 2024-04-10 07:09:01 +02:00
254e4ee08c requirements.txt updated 2024-04-09 08:13:33 +02:00
07273c3ebd update vGPU Version Matrix 2024-04-09 08:01:20 +02:00
e04723d128 removed biggerthanshit link 2024-04-09 08:01:01 +02:00
8f498f4960 added 17.1 as supported 2024-04-08 15:10:01 +02:00
dd69f60fd0 added link to releases & release notes 2024-03-06 20:58:51 +01:00
a5d599a52c typos 2024-03-04 21:31:56 +01:00
66d203e72a requirements.txt updated 2024-03-04 21:13:12 +01:00
7800bf73a8 added "16.4" and "17.0" as supported 2024-02-27 15:44:34 +01:00
ed59260a10 added "16.3" support 2024-02-26 20:53:47 +01:00
7c70d121be requirements.txt updated 2024-02-26 20:53:33 +01:00
213e768708 removed todo 2024-01-18 17:02:09 +01:00
0696900d67 fixes 2024-01-18 16:58:33 +01:00
4fb90a22e3 make tests interruptible 2024-01-18 13:10:12 +01:00
6aa197dcae only run test matrix when "app" or "test" changes 2024-01-18 13:09:30 +01:00
46f6c9fe99 fixed CI/CD path from "/builds" to "/tmp/builds" 2024-01-18 13:06:45 +01:00
2baaeb561b run different jobs on "$CI_DEFAULT_BRANCH" 2024-01-18 12:59:06 +01:00
867cd7018a removed pylint 2024-01-18 12:58:43 +01:00
9c686913dd disabled pylint 2024-01-18 12:46:51 +01:00
d3c4dc3fb7 disabled code_quality debug 2024-01-18 08:34:43 +01:00
af8b1c2387 Update .codeclimate.yml 2024-01-17 23:13:20 +01:00
d37d96dc34 fixed test_coverage (fail on matrix) 2024-01-17 23:05:57 +01:00
21d052523f added code_quality debug 2024-01-17 22:43:47 +01:00
22110df791 added code_quality “SOURCE_CODE” variable 2024-01-17 22:37:33 +01:00
c7f354d50c removed "cython" from "test" 2024-01-17 11:33:22 +01:00
3bdfc94527 removed tests for "23.04"
> gcc -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -fPIC -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/include -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/src -I/usr/local/include/python3.11 -c httptools/parser/parser.c -o build/temp.linux-x86_64-cpython-311/httptools/parser/parser.o -O2
      httptools/parser/parser.c:212:12: fatal error: longintrepr.h: No such file or directory
2024-01-17 09:33:58 +01:00
9473f10653 added tests for Ubuntu "Mantic Minotaur" 2024-01-17 08:08:37 +01:00
e9ad1d7791 requirements.txt updated 2024-01-12 14:53:17 +01:00
f97ee9c8fc updated debian bookworm 12 dependencies 2024-01-12 14:25:03 +01:00
236948e483 updated test to debian bookworm 2023-11-03 14:03:48 +01:00
948934ad0e fixed testing dependency 2023-11-03 12:53:50 +01:00
3ef14e5522 added gcc as dependency 2023-11-03 11:41:44 +01:00
ee50ede2ea fixes 2023-11-03 10:49:06 +01:00
b11579de98 fixed debian package versions 2023-11-03 09:28:23 +01:00
dc33c29158 fixed versions & added 16.2 as supported 2023-11-03 08:23:07 +01:00
6f9107087b added os specific requirements.txt 2023-10-25 07:36:17 +02:00
01fd954252 implemented python test matrix for different python dependencies on different os releases 2023-10-25 07:31:29 +02:00
995dbdac80 README.md updated 2023-10-25 07:30:57 +02:00
51b28dcdc3 updated ubuntu from 22.10 (EOL) to 23.04 2023-10-16 09:50:24 +02:00
9512e29ed9 requirements.txt updated 2023-09-26 07:09:06 +02:00
713e33eed1 added 16.1 as supported nvidia driver release 2023-09-26 07:08:58 +02:00
4b16b02a7d added macOS as supported host (using python-venv) 2023-09-26 07:08:41 +02:00
3e9d7c0061 added Docker supported system architectures 2023-09-26 07:08:12 +02:00
7480cb4cf7 added linkt to driver compatibility section 2023-07-13 06:46:27 +02:00
2d7909546d requirements.txt updated 2023-07-10 18:47:46 +02:00
fec099ae81 added support for 16.0 drivers to readme 2023-07-10 13:32:32 +02:00
df5cb3c9c3 Merge branch 'main' into 'dev'
# Conflicts:
#   .gitlab-ci.yml
2023-07-04 16:19:49 +00:00
eca64fb1d5 push multiarch image to docker-hub 2023-07-04 17:47:10 +02:00
7ae1201c8f fixed new docker registry image path 2023-07-04 13:43:15 +02:00
a4e98dae46 fixed docker image path 2023-07-04 13:42:21 +02:00
d4267f3ee6 toggle api endpoints 2023-07-04 12:42:31 +02:00
c02ca762ea typos 2023-07-04 12:42:19 +02:00
10caf2310c added information about ipv6 may be must disabled 2023-07-04 12:39:13 +02:00
7380e4328e removed mysql from included docker drivers 2023-07-04 12:38:54 +02:00
c1eaa33d9e added docker command to logging section
thanks to @libreshare (#2)
2023-07-04 12:22:22 +02:00
45545953ed improvements
thanks to @AbsolutelyFree (#1)
2023-07-04 12:19:07 +02:00
4c8c2ed3d6 fixed "deploy:pacman" 2023-07-04 11:55:26 +02:00
6483af4ba9 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!26
2023-07-04 11:39:37 +02:00
e6595c05d5 fixed mariadb-client installation
ref. https://github.com/PyMySQL/mysqlclient/discussions/624
2023-07-04 11:06:00 +02:00
fb1dbea1ee added missing "pkg-config" for "mysqlclient==2.2.0"
ref. https://stackoverflow.com/questions/76533384/docker-alpine-build-fails-on-mysqlclient-installation-with-error-exception-can
2023-07-04 10:50:35 +02:00
f576ded038 fixed versions 2023-07-04 10:33:30 +02:00
54eaf55ee8 refactored docker-compose.yml so very simple example, and moved proxy to "examples" directory 2023-07-04 10:24:11 +02:00
3119d2c7ea added 15.3 to supported drivers list 2023-07-04 10:18:07 +02:00
e40f4ce41f updated compatibility list 2023-07-04 10:17:45 +02:00
576f22333e docker-compose.yml - added note for TZ 2023-07-04 10:17:34 +02:00
0f53436700 requirements.txt updated 2023-07-04 10:17:12 +02:00
9 changed files with 347 additions and 101 deletions

View File

@ -0,0 +1,10 @@
# https://packages.ubuntu.com
fastapi==0.101.0
uvicorn[standard]==0.27.1
python-jose[pycryptodome]==3.3.0
pycryptodome==3.20.0
python-dateutil==2.8.2
sqlalchemy==1.4.50
markdown==3.5.2
python-dotenv==1.0.1
jinja2==3.1.2

View File

@ -126,7 +126,7 @@ build:pacman:
- "*.pkg.tar.zst"
test:
image: python:3.11-slim-bookworm
image: $IMAGE
stage: test
interruptible: true
rules:
@ -141,10 +141,12 @@ test:
DATABASE: sqlite:///../app/db.sqlite
parallel:
matrix:
- REQUIREMENTS:
- IMAGE: [ 'python:3.11-slim-bookworm', 'python:3.12-slim-bullseye' ]
REQUIREMENTS:
- requirements.txt
- .DEBIAN/requirements-bookworm-12.txt
- .DEBIAN/requirements-ubuntu-23.10.txt
- .DEBIAN/requirements-ubuntu-24.04.txt
before_script:
- apt-get update && apt-get install -y python3-dev gcc
- pip install -r $REQUIREMENTS
@ -205,7 +207,7 @@ test:debian:
test:ubuntu:
extends: .test:linux
image: ubuntu:23.10
image: ubuntu:24.04
test:archlinux:
image: archlinux:base
@ -295,17 +297,13 @@ gemnasium-python-dependency_scanning:
deploy:docker:
extends: .deploy
image: docker:dind
stage: deploy
tags: [ docker ]
before_script:
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
- docker buildx inspect
- docker buildx create --use
script:
- echo "========== GitLab-Registry =========="
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME --push .
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest --push .
- echo "========== Docker-Hub =========="

View File

@ -10,7 +10,7 @@ RUN apk update \
&& apk add --no-cache --virtual build-deps gcc g++ python3-dev musl-dev pkgconfig \
&& apk add --no-cache curl postgresql postgresql-dev mariadb-dev sqlite-dev \
&& pip install --no-cache-dir --upgrade uvicorn \
&& pip install --no-cache-dir psycopg2==2.9.6 mysqlclient==2.2.0 pysqlite3==0.5.1 \
&& pip install --no-cache-dir psycopg2==2.9.9 mysqlclient==2.2.4 pysqlite3==0.5.2 \
&& pip install --no-cache-dir -r /tmp/requirements.txt \
&& apk del build-deps

View File

@ -9,15 +9,24 @@ Only the clients need a connection to this service on configured port.
**Official Links**
- https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
- https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
- https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
* https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
* https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
* https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
*All other repositories are forks! (which is no bad - just for information and bug reports)*
[Releases & Release Notes](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/releases)
**Further Reading**
* [NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox) - This document serves as a guide to install NVIDIA vGPU host drivers on the latest Proxmox VE version
* [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock) - Unlock vGPU functionality for consumer-grade Nvidia GPUs.
* [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q) - Guide for `vgpu_unlock`
* [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/) - Also known as `proxmox-installer.sh`
---
[[_TOC_]]
[TOC]
# Setup (Service)
@ -102,7 +111,7 @@ volumes:
dls-db:
```
## Debian/Ubuntu/macOS (manual method using `git clone` and python virtual environment)
## Debian / Ubuntu / macOS (manual method using `git clone` and python virtual environment)
Tested on `Debian 11 (bullseye)`, `Debian 12 (bookworm)` and `macOS Ventura (13.6)`, Ubuntu may also work.
**Please note that setup on macOS differs from Debian based systems.**
@ -309,7 +318,7 @@ EOF
Now you have to run `systemctl daemon-reload`. After that you can start service
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
## Debian/Ubuntu (using `dpkg`)
## Debian / Ubuntu (using `dpkg` / `apt`)
Packages are available here:
@ -317,10 +326,11 @@ Packages are available here:
Successful tested with:
- Debian 12 (Bookworm)
- Debian 12 (Bookworm) (EOL: tba.)
- Ubuntu 22.10 (Kinetic Kudu) (EOL: July 20, 2023)
- Ubuntu 23.04 (Lunar Lobster) (EOL: January 2024)
- Ubuntu 23.10 (Mantic Minotaur) (EOL: July 2024)
- Ubuntu 24.04 (Noble Numbat) (EOL: April 2036)
Not working with:
@ -416,9 +426,9 @@ After first success you have to replace `--issue` with `--renew`.
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
client has 19.2 hours in which to re-establish connectivity before its license expires.
\*3 Always use `https`, since guest-drivers only support secure connections!
\*2 Always use `https`, since guest-drivers only support secure connections!
\*4 If you recreate instance keys you need to **recreate client-token for each guest**!
\*3 If you recreate your instance keys you need to **recreate client-token for each guest**!
# Setup (Client)
@ -426,25 +436,30 @@ client has 19.2 hours in which to re-establish connectivity before its license e
Successfully tested with this package versions:
| vGPU Suftware | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date |
|---------------|--------------------|--------------|----------------|---------------|
| `16.3` | `535.154.02` | `535.154.05` | `538.15` | January 2024 |
| `16.2` | `535.129.03` | `535.129.03` | `537.70` | October 2023 |
| `16.1` | `535.104.06` | `535.104.05` | `537.13` | August 2023 |
| `16.0` | `535.54.06` | `535.54.03` | `536.22` | July 2023 |
| `15.3` | `525.125.03` | `525.125.06` | `529.11` | June 2023 |
| `15.2` | `525.105.14` | `525.105.17` | `528.89` | March 2023 |
| `15.1` | `525.85.07` | `525.85.05` | `528.24` | January 2023 |
| `15.0` | `525.60.12` | `525.60.13` | `527.41` | December 2022 |
| `14.4` | `510.108.03` | `510.108.03` | `514.08` | December 2022 |
| `14.3` | `510.108.03` | `510.108.03` | `513.91` | November 2022 |
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:|
| `17.2` | R550 | `550.90.05` | `550.90.07` | `552.55` | June 2024 | February 2025 |
| `17.1` | R550 | `550.54.16` | `550.54.15` | `551.78` | March 2024 | |
| `17.0` | R550 | `550.54.10` | `550.54.14` | `551.61` | February 2024 | |
| `16.6` | R535 | `535.183.04` | `535.183.01` | `538.67` | June 2024 | July 2026 |
| `16.5` | R535 | `535.161.05` | `535.161.08` | `538.46` | February 2024 | |
| `16.4` | R535 | `535.161.05` | `535.161.07` | `538.33` | February 2024 | |
| `16.3` | R535 | `535.154.02` | `535.154.05` | `538.15` | January 2024 | |
| `16.2` | R535 | `535.129.03` | `535.129.03` | `537.70` | October 2023 | |
| `16.1` | R535 | `535.104.06` | `535.104.05` | `537.13` | August 2023 | |
| `16.0` | R535 | `535.54.06` | `535.54.03` | `536.22` | July 2023 | |
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | October 2023 |
| `15.3` | R525 | `525.125.03` | `525.125.06` | `529.11` | June 2023 | |
| `15.2` | R525 | `525.105.14` | `525.105.17` | `528.89` | March 2023 | |
| `15.1` | R525 | `525.85.07` | `525.85.05` | `528.24` | January 2023 | |
| `15.0` | R525 | `525.60.12` | `525.60.13` | `527.41` | December 2022 | |
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
| `14.3` | R510 | `510.108.03` | `510.108.03` | `513.91` | November 2022 | |
- https://docs.nvidia.com/grid/index.html
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`) on channel `licensing` `biggerthanshit`
https://archive.biggerthanshit.com/NVIDIA/ (nvidia / b1gg3rth4nsh1t)
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`) on channel `licensing` `biggerthanshit`
## Linux
@ -579,7 +594,7 @@ Generate client token, (see [installation](#installation)).
There are many other internal api endpoints for handling authentication and lease process.
</details>
# Troubleshoot
# Troubleshoot / Debug
**Please make sure that fastapi-dls and your guests are on the same timezone!**
@ -735,6 +750,12 @@ Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to
- @samicrusader who created build file for ArchLinux
- @cyrus who wrote the section for openSUSE
- @midi who wrote the section for unRAID
- @samicrusader who created build file for **ArchLinux**
- @cyrus who wrote the section for **openSUSE**
- @midi who wrote the section for **unRAID**
- @polloloco who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
- @DualCoder who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
- Krutav Shah who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
- Wim van 't Hoog for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
And thanks to all people who contributed to all these libraries!

View File

@ -1,50 +1,81 @@
import logging
from base64 import b64encode as b64enc
from calendar import timegm
from contextlib import asynccontextmanager
from datetime import datetime
from hashlib import sha256
from uuid import uuid4
from os.path import join, dirname
from json import loads as json_loads
from os import getenv as env
from os.path import join, dirname
from uuid import uuid4
from dateutil.relativedelta import relativedelta
from dotenv import load_dotenv
from fastapi import FastAPI
from fastapi.requests import Request
from json import loads as json_loads
from datetime import datetime
from dateutil.relativedelta import relativedelta
from calendar import timegm
from jose import jws, jwt, JWTError
from jose.constants import ALGORITHMS
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, \
RedirectResponse
from orm import init as db_init, migrate, Site, Instance, Origin, Lease
# Load variables
load_dotenv('../version.env')
# get local timezone
# Get current timezone
TZ = datetime.now().astimezone().tzinfo
# fetch version info
# Load basic variables
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
# fastapi setup
config = dict(openapi_url='/-/openapi.json', docs_url=None, redoc_url=None)
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
# database setup
# Database connection
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
db_init(db), migrate(db)
# DLS setup (static)
# Load DLS variables (all prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service)
DLS_URL = str(env('DLS_URL', 'localhost'))
DLS_PORT = int(env('DLS_PORT', '443'))
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo
# fastapi middleware
# FastAPI
@asynccontextmanager
async def lifespan(_: FastAPI):
# on startup
default_instance = Instance.get_default_instance(db)
lease_renewal_period = default_instance.lease_renewal_period
lease_renewal_delta = default_instance.get_lease_renewal_delta()
client_token_expire_delta = default_instance.get_client_token_expire_delta()
logger.info(f'''
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
''')
logger.info(f'Debug is {"enabled" if DEBUG else "disabled"}.')
validate_settings()
yield
# on shutdown
logger.info(f'Shutting down ...')
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, lifespan=lifespan, **config)
app.debug = DEBUG
app.add_middleware(
CORSMiddleware,
@ -54,10 +85,20 @@ app.add_middleware(
allow_headers=['*'],
)
# logging
logging.basicConfig()
# Logging
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
logging.basicConfig(format='[{levelname:^7}] [{module:^15}] {message}', style='{')
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
logger.setLevel(LOG_LEVEL)
logging.getLogger('util').setLevel(LOG_LEVEL)
logging.getLogger('NV').setLevel(LOG_LEVEL)
# Helper
def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
authorization_header = request.headers.get('authorization')
token = authorization_header.split(' ')[1]
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
def validate_settings():
@ -72,11 +113,7 @@ def validate_settings():
session.close()
def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
authorization_header = request.headers.get('authorization')
token = authorization_header.split(' ')[1]
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
# Endpoints
@app.get('/', summary='Index')
async def index():
@ -118,8 +155,7 @@ async def _config():
async def _readme():
from markdown import markdown
from util import load_file
content = load_file('../README.md').decode('utf-8')
content = load_file(join(dirname(__file__), '../README.md')).decode('utf-8')
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
@ -595,26 +631,6 @@ async def leasing_v1_lessor_shutdown(request: Request):
return JSONr(response)
@app.on_event('startup')
async def app_on_startup():
default_instance = Instance.get_default_instance(db)
lease_renewal_period = default_instance.lease_renewal_period
lease_renewal_delta = default_instance.get_lease_renewal_delta()
client_token_expire_delta = default_instance.get_client_token_expire_delta()
logger.info(f'''
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
''')
validate_settings()
if __name__ == '__main__':
import uvicorn

View File

@ -1,11 +1,12 @@
import logging
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text, BLOB, INT, FLOAT
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
from app.util import parse_key
from util import NV
logging.basicConfig()
logger = logging.getLogger(__name__)
@ -148,6 +149,8 @@ class Origin(Base):
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
def serialize(self) -> dict:
_ = NV().find(self.guest_driver_version)
return {
'origin_ref': self.origin_ref,
# 'service_instance_xid': self.service_instance_xid,
@ -155,6 +158,7 @@ class Origin(Base):
'guest_driver_version': self.guest_driver_version,
'os_platform': self.os_platform,
'os_version': self.os_version,
'$driver': _ if _ is not None else None,
}
@staticmethod
@ -323,7 +327,6 @@ class Lease(Base):
def init_default_site(session: Session):
from uuid import uuid4
from app.util import generate_key
private_key = generate_key()
@ -394,30 +397,37 @@ def migrate(engine: Engine):
# INSTANCE_KEY_RSA, INSTANCE_KEY_PUB
default_instance_private_key_path = str(join(dirname(__file__), 'cert/instance.private.pem'))
if instance_private_key := env('INSTANCE_KEY_RSA', None) is not None:
instance_private_key = env('INSTANCE_KEY_RSA', None)
if instance_private_key is not None:
instance.private_key = load_key(str(instance_private_key))
elif isfile(default_instance_private_key_path):
instance.private_key = load_key(default_instance_private_key_path)
default_instance_public_key_path = str(join(dirname(__file__), 'cert/instance.public.pem'))
if instance_public_key := env('INSTANCE_KEY_PUB', None) is not None:
instance_public_key = env('INSTANCE_KEY_PUB', None)
if instance_public_key is not None:
instance.public_key = load_key(str(instance_public_key))
elif isfile(default_instance_public_key_path):
instance.public_key = load_key(default_instance_public_key_path)
# TOKEN_EXPIRE_DELTA
if token_expire_delta := env('TOKEN_EXPIRE_DAYS', None) not in (None, 0):
token_expire_delta = env('TOKEN_EXPIRE_DAYS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 86_400
if token_expire_delta := env('TOKEN_EXPIRE_HOURS', None) not in (None, 0):
token_expire_delta = env('TOKEN_EXPIRE_HOURS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 3_600
# LEASE_EXPIRE_DELTA, LEASE_RENEWAL_DELTA
if lease_expire_delta := env('LEASE_EXPIRE_DAYS', None) not in (None, 0):
lease_expire_delta = env('LEASE_EXPIRE_DAYS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 86_400
if lease_expire_delta := env('LEASE_EXPIRE_HOURS', None) not in (None, 0):
lease_expire_delta = env('LEASE_EXPIRE_HOURS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 3_600
# LEASE_RENEWAL_PERIOD
if lease_renewal_period := env('LEASE_RENEWAL_PERIOD', None) is not None:
lease_renewal_period = env('LEASE_RENEWAL_PERIOD', None)
if lease_renewal_period is not None:
instance.lease_renewal_period = lease_renewal_period
# todo: update site, instance

View File

@ -1,10 +1,17 @@
def load_file(filename) -> bytes:
import logging
logging.basicConfig()
def load_file(filename: str) -> bytes:
log = logging.getLogger(f'{__name__}')
log.debug(f'Loading contents of file "{filename}')
with open(filename, 'rb') as file:
content = file.read()
return content
def load_key(filename) -> "RsaKey":
def load_key(filename: str) -> "RsaKey":
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
@ -13,6 +20,8 @@ def load_key(filename) -> "RsaKey":
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
log = logging.getLogger(__name__)
log.debug(f'Importing RSA-Key from "{filename}"')
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
@ -36,5 +45,50 @@ def generate_key() -> "RsaKey":
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
log = logging.getLogger(__name__)
log.debug(f'Generating RSA-Key')
return RSA.generate(bits=2048)
class NV:
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
def __init__(self):
self.log = logging.getLogger(self.__class__.__name__)
if NV.__DRIVER_MATRIX is None:
from json import load as json_load
try:
file = open(NV.__DRIVER_MATRIX_FILENAME)
NV.__DRIVER_MATRIX = json_load(file)
file.close()
self.log.debug(f'Successfully loaded "{NV.__DRIVER_MATRIX_FILENAME}".')
except Exception as e:
NV.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}')
@staticmethod
def find(version: str) -> dict | None:
if NV.__DRIVER_MATRIX is None:
return None
for idx, (key, branch) in enumerate(NV.__DRIVER_MATRIX.items()):
for release in branch.get('$releases'):
linux_driver = release.get('Linux Driver')
windows_driver = release.get('Windows Driver')
if version == linux_driver or version == windows_driver:
tmp = branch.copy()
tmp.pop('$releases')
is_latest = release.get('vGPU Software') == branch.get('Latest Release in Branch')
return {
'software_branch': branch.get('vGPU Software Branch'),
'branch_version': release.get('vGPU Software'),
'driver_branch': branch.get('Driver Branch'),
'branch_status': branch.get('vGPU Branch Status'),
'release_date': release.get('Release Date'),
'eol': branch.get('EOL Date') if is_latest else None,
'is_latest': is_latest,
}
return None

View File

@ -1,8 +1,8 @@
fastapi==0.110.0
uvicorn[standard]==0.27.1
fastapi==0.111.0
uvicorn[standard]==0.29.0
python-jose==3.3.0
pycryptodome==3.20.0
python-dateutil==2.8.2
sqlalchemy==2.0.27
markdown==3.5.2
sqlalchemy==2.0.30
markdown==3.6
python-dotenv==1.0.1

View File

@ -0,0 +1,137 @@
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
URL = 'https://docs.nvidia.com/grid/'
BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch'
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
ALT_VGPU_MANAGER_KEY = 'vGPU Manager'
RELEASE_DATE_KEY, LATEST_KEY, EOL_KEY = 'Release Date', 'Latest Release in Branch', 'EOL Date'
JSON_RELEASES_KEY = '$releases'
def __driver_versions(html: 'BeautifulSoup'):
def __strip(_: str) -> str:
# removes content after linebreak (e.g. "Hello\n World" to "Hello")
_ = _.strip()
tmp = _.split('\n')
if len(tmp) > 0:
return tmp[0]
return _
# find wrapper for "DriverVersions" and find tables
data = html.find('div', {'id': 'DriverVersions'})
tables = data.findAll('table')
for table in tables:
# parse software-branch (e.g. "vGPU software 17 Releases" and remove " Releases" for "matrix_key")
software_branch = table.parent.find_previous_sibling('button', {'class': 'accordion'}).text.strip()
software_branch = software_branch.replace(' Releases', '')
matrix_key = software_branch.lower()
# driver version info from table-heads (ths) and table-rows (trs)
ths, trs = table.find_all('th'), table.find_all('tr')
headers, releases = [header.text.strip() for header in ths], []
for trs in trs:
tds = trs.find_all('td')
if len(tds) == 0: # skip empty
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
releases.append(x)
# add to matrix
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
def __release_branches(html: 'BeautifulSoup'):
# find wrapper for "AllReleaseBranches" and find table
data = html.find('div', {'id': 'AllReleaseBranches'})
table = data.find('table')
# branch releases info from table-heads (ths) and table-rows (trs)
ths, trs = table.find_all('th'), table.find_all('tr')
headers = [header.text.strip() for header in ths]
for trs in trs:
tds = trs.find_all('td')
if len(tds) == 0: # skip empty
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: cell.text.strip() for i, cell in enumerate(tds)}
# get matrix_key
software_branch = x.get(SOFTWARE_BRANCH_KEY)
matrix_key = software_branch.lower()
# add to matrix
MATRIX.update({matrix_key: MATRIX.get(matrix_key) | x})
def __debug():
# print table head
s = f'{SOFTWARE_BRANCH_KEY:^21} | {BRANCH_STATUS_KEY:^21} | {VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {EOL_KEY:>21}'
print(s)
# iterate over dict & format some variables to not overload table
for idx, (key, branch) in enumerate(MATRIX.items()):
branch_status = branch.get(BRANCH_STATUS_KEY)
branch_status = branch_status.replace('Branch ', '')
branch_status = branch_status.replace('Long-Term Support', 'LTS')
branch_status = branch_status.replace('Production', 'Prod.')
software_branch = branch.get(SOFTWARE_BRANCH_KEY).replace('NVIDIA ', '')
for release in branch.get(JSON_RELEASES_KEY):
version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
linux_driver = release.get(LINUX_DRIVER_KEY)
windows_manager = release.get(WINDOWS_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
windows_driver = release.get(WINDOWS_DRIVER_KEY)
release_date = release.get(RELEASE_DATE_KEY)
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
version = f'{version} *' if is_latest else version
eol = branch.get(EOL_KEY) if is_latest else ''
s = f'{software_branch:^21} | {branch_status:^21} | {version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {eol:>21}'
print(s)
def __dump(filename: str):
import json
file = open(filename, 'w')
json.dump(MATRIX, file)
file.close()
if __name__ == '__main__':
MATRIX = {}
try:
import httpx
from bs4 import BeautifulSoup
except Exception as e:
logger.error(f'Failed to import module: {e}')
logger.info('Run "pip install beautifulsoup4 httpx"')
exit(1)
r = httpx.get(URL)
if r.status_code != 200:
logger.error(f'Error loading "{URL}" with status code {r.status_code}.')
exit(2)
# parse html
soup = BeautifulSoup(r.text, features='html.parser')
# build matrix
__driver_versions(soup)
__release_branches(soup)
# debug output
__debug()
# dump data to file
__dump('../app/static/driver_matrix.json')