Merge branch 'dev' into 'main'

1.1

See merge request oscar.krause/fastapi-dls!14
This commit is contained in:
Oscar Krause 2022-12-29 12:54:37 +01:00
commit 0b7bedde66
18 changed files with 694 additions and 128 deletions

View File

@ -1,5 +1,5 @@
Package: fastapi-dls
Version: 1.0.0
Version: 0.0
Architecture: all
Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de
Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-jose, python3-sqlalchemy, python3-pycryptodome, python3-markdown, uvicorn, openssl

View File

@ -41,10 +41,29 @@ if [[ ! -f $CONFIG_DIR/env ]]; then
echo "> Writing initial config ..."
touch $CONFIG_DIR/env
cat <<EOF >$CONFIG_DIR/env
# Toggle debug mode
#DEBUG=false
# Where the client can find the DLS server
DLS_URL=127.0.0.1
DLS_PORT=443
# CORS configuration
## comma separated list without spaces
#CORS_ORIGINS="https://$DLS_URL:$DLS_PORT"
# Lease expiration in days
LEASE_EXPIRE_DAYS=90
# Database location
## https://docs.sqlalchemy.org/en/14/core/engines.html
DATABASE=sqlite:///$CONFIG_DIR/db.sqlite
# UUIDs for identifying the instance
#SITE_KEY_XID="00000000-0000-0000-0000-000000000000"
#INSTANCE_REF="00000000-0000-0000-0000-000000000000"
# Site-wide signing keys
INSTANCE_KEY_RSA=$CONFIG_DIR/instance.private.pem
INSTANCE_KEY_PUB=$CONFIG_DIR/instance.public.pem
@ -75,7 +94,7 @@ if [[ -f $CONFIG_DIR/webserver.key ]]; then
if [ -x "$(command -v curl)" ]; then
echo "> Testing API ..."
source $CONFIG_DIR/env
curl --insecure -X GET https://$DLS_URL:$DLS_PORT/status
curl --insecure -X GET https://$DLS_URL:$DLS_PORT/-/health
else
echo "> Testing API failed, curl not available. Please test manually!"
fi

49
.PKGBUILD/PKGBUILD Normal file
View File

@ -0,0 +1,49 @@
# Maintainer: samicrusader <hi@samicrusader.me>
# Maintainer: Oscar Krause <oscar.krause@collinwebdesigns.de>
pkgname=fastapi-dls
pkgver=0.0
pkgrel=1
pkgdesc='NVIDIA DLS server implementation with FastAPI'
arch=('any')
url='https://git.collinwebdesigns.de/oscar.krause/fastapi-dls'
license=('MIT')
depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastapi' 'python-dotenv' 'python-dateutil' 'python-sqlalchemy' 'python-pycryptodome' 'uvicorn' 'python-markdown' 'openssl')
provider=("$pkgname")
install="$pkgname.install"
source=('git+file:///builds/oscar.krause/fastapi-dls' # https://gitea.publichub.eu/oscar.krause/fastapi-dls.git
"$pkgname.default"
"$pkgname.service")
sha256sums=('SKIP'
'4c07e9b627853bd4f3a398371912fc72302dac33f43e4cb7e9b79746cc9c9136'
'10cb98d64f8bf37b11a60510793c187cc664e63c895d1205781c21fa2e703f32')
pkgver() {
source $srcdir/$pkgname/version.env
echo ${VERSION}
}
check() {
cd "$srcdir/$pkgname/test"
mkdir "$srcdir/$pkgname/app/cert"
openssl genrsa -out "$srcdir/$pkgname/app/cert/instance.private.pem" 2048
openssl rsa -in "$srcdir/$pkgname/app/cert/instance.private.pem" -outform PEM -pubout -out "$srcdir/$pkgname/app/cert/instance.public.pem"
python "$srcdir/$pkgname/test/main.py"
rm -rf "$srcdir/$pkgname/app/cert"
}
package() {
install -d "$pkgdir/usr/share/doc/$pkgname"
install -d "$pkgdir/var/lib/$pkgname/cert"
cp -r "$srcdir/$pkgname/doc"/* "$pkgdir/usr/share/doc/$pkgname/"
install -Dm644 "$srcdir/$pkgname/README.md" "$pkgdir/usr/share/doc/$pkgname/README.md"
install -Dm644 "$srcdir/$pkgname/version.env" "$pkgdir/usr/share/doc/$pkgname/version.env"
sed -i "s/README.md/\/usr\/share\/doc\/$pkgname\/README.md/g" "$srcdir/$pkgname/app/main.py"
sed -i "s/join(dirname(__file__), 'cert\//join('\/var\/lib\/$pkgname', 'cert\//g" "$srcdir/$pkgname/app/main.py"
install -Dm755 "$srcdir/$pkgname/app/main.py" "$pkgdir/opt/$pkgname/main.py"
install -Dm755 "$srcdir/$pkgname/app/orm.py" "$pkgdir/opt/$pkgname/orm.py"
install -Dm755 "$srcdir/$pkgname/app/util.py" "$pkgdir/opt/$pkgname/util.py"
install -Dm644 "$srcdir/$pkgname.default" "$pkgdir/etc/default/$pkgname"
install -Dm644 "$srcdir/$pkgname.service" "$pkgdir/usr/lib/systemd/system/$pkgname.service"
}

View File

@ -0,0 +1,23 @@
# Toggle FastAPI debug mode
DEBUG=false
# Where the client can find the DLS server
## DLS_URL should be a hostname
DLS_URL="localhost.localdomain"
DLS_PORT=8443
CORS_ORIGINS="https://$DLS_URL:$DLS_PORT"
# Lease expiration in days
LEASE_EXPIRE_DAYS=90
# Database location
## https://docs.sqlalchemy.org/en/14/core/engines.html
DATABASE="sqlite:////var/lib/fastapi-dls/db.sqlite"
# UUIDs for identifying the instance
SITE_KEY_XID="<<sitekey>>"
INSTANCE_REF="<<instanceref>>"
# Site-wide signing keys
INSTANCE_KEY_RSA="/var/lib/fastapi-dls/instance.private.pem"
INSTANCE_KEY_PUB="/var/lib/fastapi-dls/instance.public.pem"

View File

@ -0,0 +1,14 @@
post_install() {
sed -i "s/<<sitekey>>/$(uuidgen)/" /etc/default/fastapi-dls
sed -i "s/<<instanceref>>/$(uuidgen)/" /etc/default/fastapi-dls
echo 'The environment variables for this server can be edited at: /etc/default/fastapi-dls'
echo 'The server can be started with: systemctl start fastapi-dls.service'
echo
echo 'A valid HTTPS certificate needs to be installed to /var/lib/fastapi-dls/cert/webserver.{crt,key}'
echo 'A self-signed certificate can be generated with: openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /var/lib/fastapi-dls/cert/webserver.key -out /var/lib/fastapi-dls/cert/webserver.crt'
echo
echo 'The signing keys for your instance need to be generated as well. Generate them with these commands:'
echo 'openssl genrsa -out /var/lib/fastapi-dls/instance.private.pem 2048'
echo 'openssl rsa -in /var/lib/fastapi-dls/instance.private.pem -outform PEM -pubout -out /var/lib/fastapi-dls/instance.public.pem'
}

View File

@ -0,0 +1,15 @@
[Unit]
Description=FastAPI-DLS
Documentation=https://git.collinwebdesigns.de/oscar.krause/fastapi-dls
After=network.target
[Service]
Type=forking
EnvironmentFile=/etc/default/fastapi-dls
ExecStart=/usr/bin/python /opt/fastapi-dls/main.py
WorkingDir=/opt/fastapi-dls
Restart=on-abort
User=root
[Install]
WantedBy=multi-user.target

View File

@ -1,17 +1,45 @@
cache:
key: one-key-to-rule-them-all
build:debian:
# debian:bullseye-slim
image: debian:bookworm-slim # just to get "python3-jose" working
build:docker:
image: docker:dind
interruptible: true
stage: build
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
- app/**/*
- Dockerfile
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
tags: [ docker ]
before_script:
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env # COMMIT=`git rev-parse HEAD`
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
build:apt:
image: debian:bookworm-slim
interruptible: true
stage: build
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
- app/**/*
- .DEBIAN/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
before_script:
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
- source version.env
# install build dependencies
- apt-get update -qq && apt-get install -qq -y build-essential
- chmod 0755 -R .
# create build directory for .deb sources
- mkdir build
# copy install instructions
- cp -r DEBIAN build/
- cp -r .DEBIAN build/DEBIAN
- chmod -R 0775 build/DEBIAN
# copy app into "/usr/share/fastapi-dls" as "/usr/share/fastapi-dls/app" & copy README.md and version.env
- mkdir -p build/usr/share/fastapi-dls
- cp -r app build/usr/share/fastapi-dls
@ -22,29 +50,55 @@ build:debian:
# cd into "build/"
- cd build/
script:
# set version based on value in "$VERSION" (which is set above from version.env)
- sed -i -E 's/(Version\:\s)0.0/\1'"$VERSION"'/g' DEBIAN/control
# build
- dpkg -b . build.deb
- dpkg -I build.deb
artifacts:
expire_in: 1 week
paths:
- build/build.deb
build:docker:
image: docker:dind
build:pacman:
image: archlinux:base-devel
interruptible: true
stage: build
rules:
- if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
tags: [ docker ]
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
- app/**/*
- .PKGBUILD/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
before_script:
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env # COMMIT=`git rev-parse HEAD`
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
# install build dependencies
- pacman -Syu --noconfirm git
# create a build-user because "makepkg" don't like root user
- useradd --no-create-home --shell=/bin/false build && usermod -L build
- 'echo "build ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers'
- 'echo "root ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers'
- chown -R build:build .
# move .PKGBUILD contents to root directory
- mv .PKGBUILD/* .
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
- pwd
# download dependencies
- source PKGBUILD && pacman -Syu --noconfirm --needed --asdeps "${makedepends[@]}" "${depends[@]}"
# build
- sudo -u build makepkg -s
artifacts:
expire_in: 1 week
paths:
- "*.pkg.tar.zst"
test:
image: python:3.10-slim-bullseye
stage: test
rules:
- if: $CI_COMMIT_BRANCH
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
variables:
DATABASE: sqlite:///../app/db.sqlite
before_script:
@ -57,37 +111,75 @@ test:
script:
- pytest main.py
test:debian:
image: debian:bookworm-slim
.test:linux:
stage: test
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
- app/**/*
- .DEBIAN/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
needs:
- job: build:apt
artifacts: true
variables:
DEBIAN_FRONTEND: noninteractive
needs:
- job: build:debian
artifacts: true
before_script:
- apt-get update -qq && apt-get install -qq -y jq
- apt-get update -qq && apt-get install -qq -y jq curl
script:
# test installation
- apt-get install -q -y ./build/build.deb --fix-missing
- openssl req -x509 -newkey rsa:2048 -nodes -out /etc/fastapi-dls/webserver.crt -keyout /etc/fastapi-dls/webserver.key -days 7 -subj "/C=DE/O=GitLab-CI/OU=Test/CN=localhost"
# copy example config from GitLab-CI-Variables
#- cat ${EXAMPLE_CONFIG} > /etc/fastapi-dls/env
# start service in background
- uvicorn --host 127.0.0.1 --port 443
- cd /usr/share/fastapi-dls/app
- uvicorn main:app
--host 127.0.0.1 --port 443
--app-dir /usr/share/fastapi-dls/app
--ssl-keyfile /etc/fastapi-dls/webserver.key
--ssl-certfile /opt/fastapi-dls/webserver.crt
--ssl-certfile /etc/fastapi-dls/webserver.crt
--proxy-headers &
- FASTAPI_DLS_PID=$!
- echo "Started service with pid $FASTAPI_DLS_PID"
# testing service
- if [ "`curl --insecure -s https://127.0.0.1/status | jq .status`" != "up" ]; then echo "Success"; else "Error"; fi
- if [ "`curl --insecure -s https://127.0.0.1/-/health | jq .status`" != "up" ]; then echo "Success"; else "Error"; fi
# cleanup
- kill $FASTAPI_DLS_PID
- apt-get purge -qq -y fastapi-dls
- apt-get autoremove -qq -y && apt-get clean -qq
test:debian:
extends: .test:linux
image: debian:bookworm-slim
test:ubuntu:
extends: .test:linux
image: ubuntu:22.10
test:archlinux:
image: archlinux:base
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
- app/**/*
- .PKGBUILD/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
needs:
- job: build:pacman
artifacts: true
script:
- pacman -Sy
- pacman -U --noconfirm *.pkg.tar.zst
.deploy:
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
when: never
deploy:docker:
extends: .deploy
stage: deploy
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
@ -109,14 +201,15 @@ deploy:docker:
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
deploy:debian:
deploy:apt:
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
extends: .deploy
image: debian:bookworm-slim
stage: deploy
# rules:
# - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
needs:
- job: build:debian
- job: build:apt
artifacts: true
before_script:
- apt-get update -qq && apt-get install -qq -y curl lsb-release
@ -149,3 +242,49 @@ deploy:debian:
# using generic-package-registry until debian-registry is GA
# https://docs.gitlab.com/ee/user/packages/generic_packages/index.html#publish-a-generic-package-by-using-cicd
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
deploy:pacman:
extends: .deploy
image: archlinux:base-devel
stage: deploy
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
needs:
- job: build:pacman
artifacts: true
script:
- source .PKGBUILD/PKGBUILD
# fastapi-dls-1.0-1-any.pkg.tar.zst
- BUILD_NAME=${pkgname}-${pkgver}-${pkgrel}-any.pkg.tar.zst
- PACKAGE_NAME=${pkgname}
- PACKAGE_VERSION=${pkgver}
- PACKAGE_ARCH=any
- EXPORT_NAME=${BUILD_NAME}
- 'echo "PACKAGE_NAME: ${PACKAGE_NAME}"'
- 'echo "PACKAGE_VERSION: ${PACKAGE_VERSION}"'
- 'echo "PACKAGE_ARCH: ${PACKAGE_ARCH}"'
- 'echo "EXPORT_NAME: ${EXPORT_NAME}"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
release:
image: registry.gitlab.com/gitlab-org/release-cli:latest
stage: .post
rules:
- if: $CI_COMMIT_TAG
when: never
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
before_script:
- source version.env
script:
- echo "Running release-job for $VERSION"
release:
name: $CI_PROJECT_TITLE $version
description: Release of $CI_PROJECT_TITLE version $VERSION
tag_name: $VERSION
ref: $CI_COMMIT_SHA
assets:
links:
- name: 'Package Registry'
url: 'https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages'
- name: 'Container Registry'
url: 'https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry/40'

2
CODEOWNERS Executable file
View File

@ -0,0 +1,2 @@
* @oscar.krause
.PKGBUILD/ @samicrusader

View File

@ -14,5 +14,5 @@ COPY app /app
COPY version.env /version.env
COPY README.md /README.md
HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/status || exit 1
HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/-/health || exit 1
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "443", "--app-dir", "/app", "--proxy-headers", "--ssl-keyfile", "/app/cert/webserver.key", "--ssl-certfile", "/app/cert/webserver.crt"]

View File

@ -9,31 +9,58 @@ Only the clients need a connection to this service on configured port.
## ToDo's
- migrate from `fastapi` to `flask`
- Support http mode for using external https proxy (disable uvicorn ssl for using behind proxy)
## Endpoints
### `GET /`
### [`GET /`](/)
HTML rendered README.md.
Redirect to `/-/readme`.
### `GET /status`
### [`GET /status`](/status) (deprecated: use `/-/health`)
Status endpoint, used for *healthcheck*. Shows also current version and commit hash.
### `GET /docs`
### [`GET /-/health`](/-/health)
OpenAPI specifications rendered from `GET /openapi.json`.
Status endpoint, used for *healthcheck*. Shows also current version and commit hash.
### `GET /-/origins`
### [`GET /-/readme`](/-/readme)
HTML rendered README.md.
### [`GET /-/docs`](/-/docs), [`GET /-/redocs`](/-/redocs)
OpenAPI specifications rendered from `GET /-/openapi.json`.
### [`GET /-/manage`](/-/manage)
Shows a very basic UI to delete origins or leases.
### `GET /-/origins?leases=false`
List registered origins.
### `GET /-/leases`
| Query Parameter | Default | Usage |
|-----------------|---------|--------------------------------------|
| `leases` | `false` | Include referenced leases per origin |
### `DELETE /-/origins`
Deletes all origins and their leases.
### `GET /-/leases?origin=false`
List current leases.
| Query Parameter | Default | Usage |
|-----------------|---------|-------------------------------------|
| `origin` | `false` | Include referenced origin per lease |
### `DELETE /-/lease/{lease_ref}`
Deletes an lease.
### `GET /client-token`
Generate client token, (see [installation](#installation)).
@ -200,7 +227,7 @@ Packages are available here:
Successful tested with:
- Debian 12 (Bookworm)
- Debian 12 (Bookworm) (works but not recommended because it is currently in *testing* state)
- Ubuntu 22.10 (Kinetic Kudu)
**Run this on your server instance**
@ -218,6 +245,23 @@ apt-get install -f --fix-missing
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
## ArchLinux (using `pacman`)
**Shout out to `samicrusader` who created build file for ArchLinux!**
Packages are available here:
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages)
```shell
pacman -Sy
FILENAME=/opt/fastapi-dls.pkg.tar.zst
url -o $FILENAME <download-url>
pacman -U --noconfirm fastapi-dls.pkg.tar.zst
```
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
## Let's Encrypt Certificate
If you're using installation via docker, you can use `traefik`. Please refer to their documentation.
@ -236,18 +280,18 @@ After first success you have to replace `--issue` with `--renew`.
# Configuration
| Variable | Default | Usage |
|---------------------|----------------------------------------|---------------------------------------------------------------------------------------|
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
| `DATABASE` | `sqlite:///db.sqlite` | See [official dataset docs](https://dataset.readthedocs.io/en/latest/quickstart.html) |
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) |
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
| `INSTANCE_REF` | `00000000-0000-0000-0000-000000000000` | Instance identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key |
| Variable | Default | Usage |
|---------------------|----------------------------------------|-------------------------------------------------------------------------------------|
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) |
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
| `INSTANCE_REF` | `00000000-0000-0000-0000-000000000000` | Instance identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key |
# Setup (Client)
@ -376,3 +420,10 @@ Dec 20 17:53:34 ubuntu-grid-server nvidia-gridd[10354]: License acquired success
```
</details>
# Credits
Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to @samicrusader who created build file for ArchLinux.

View File

@ -3,12 +3,11 @@ from base64 import b64encode as b64enc
from hashlib import sha256
from uuid import uuid4
from os.path import join, dirname
from os import getenv
from os import getenv as env
from dotenv import load_dotenv
from fastapi import FastAPI, HTTPException
from fastapi.requests import Request
from fastapi.encoders import jsonable_encoder
import json
from datetime import datetime
from dateutil.relativedelta import relativedelta
@ -16,56 +15,33 @@ from calendar import timegm
from jose import jws, jwk, jwt
from jose.constants import ALGORITHMS
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse, HTMLResponse
from starlette.responses import StreamingResponse, JSONResponse, HTMLResponse, Response, RedirectResponse
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
from orm import Origin, Lease, init as db_init
from util import load_key, load_file
from orm import Origin, Lease, init as db_init, migrate
logger = logging.getLogger()
load_dotenv('../version.env')
VERSION, COMMIT, DEBUG = getenv('VERSION', 'unknown'), getenv('COMMIT', 'unknown'), bool(getenv('DEBUG', False))
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
config = dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
db_init(db), migrate(db)
def load_file(filename) -> bytes:
with open(filename, 'rb') as file:
content = file.read()
return content
def load_key(filename) -> RsaKey:
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
# todo: initialize certificate (or should be done by user, and passed through "volumes"?)
__details = dict(
title='FastAPI-DLS',
description='Minimal Delegated License Service (DLS).',
version=VERSION,
)
app, db = FastAPI(**__details), create_engine(str(getenv('DATABASE', 'sqlite:///db.sqlite')))
db_init(db)
DLS_URL = str(getenv('DLS_URL', 'localhost'))
DLS_PORT = int(getenv('DLS_PORT', '443'))
SITE_KEY_XID = str(getenv('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
INSTANCE_REF = str(getenv('INSTANCE_REF', '00000000-0000-0000-0000-000000000000'))
INSTANCE_KEY_RSA = load_key(str(getenv('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
INSTANCE_KEY_PUB = load_key(str(getenv('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
DLS_URL = str(env('DLS_URL', 'localhost'))
DLS_PORT = int(env('DLS_PORT', '443'))
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
INSTANCE_REF = str(env('INSTANCE_REF', '00000000-0000-0000-0000-000000000000'))
INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
TOKEN_EXPIRE_DELTA = relativedelta(hours=1) # days=1
LEASE_EXPIRE_DELTA = relativedelta(days=int(getenv('LEASE_EXPIRE_DAYS', 90)))
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)))
CORS_ORIGINS = getenv('CORS_ORIGINS').split(',') if (getenv('CORS_ORIGINS')) else f'https://{DLS_URL}' # todo: prevent static https
CORS_ORIGINS = env('CORS_ORIGINS').split(',') if (env('CORS_ORIGINS')) else f'https://{DLS_URL}' # todo: prevent static https
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
@ -88,36 +64,104 @@ def get_token(request: Request) -> dict:
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
@app.get('/')
@app.get('/', summary='* Index')
async def index():
return RedirectResponse('/-/readme')
@app.get('/status', summary='* Status', description='Returns current service status, version (incl. git-commit) and some variables.', deprecated=True)
async def status(request: Request):
return JSONResponse({'status': 'up', 'version': VERSION, 'commit': COMMIT, 'debug': DEBUG})
@app.get('/-/health', summary='* Health')
async def _health(request: Request):
return JSONResponse({'status': 'up', 'version': VERSION, 'commit': COMMIT, 'debug': DEBUG})
@app.get('/-/readme', summary='* Readme')
async def _readme():
from markdown import markdown
content = load_file('../README.md').decode('utf-8')
return HTMLResponse(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
@app.get('/status')
async def status(request: Request):
return JSONResponse({'status': 'up', 'version': VERSION, 'commit': COMMIT, 'debug': DEBUG})
@app.get('/-/manage', summary='* Management UI')
async def _manage(request: Request):
response = '''
<!DOCTYPE html>
<html>
<head>
<title>FastAPI-DLS Management</title>
</head>
<body>
<button onclick="deleteOrigins()">delete origins and their leases</button>
<button onclick="deleteLease()">delete specific lease</button>
<script>
function deleteOrigins() {
var xhr = new XMLHttpRequest();
xhr.open("DELETE", '/-/origins', true);
xhr.send();
}
function deleteLease(lease_ref) {
if(lease_ref === undefined)
lease_ref = window.prompt("Please enter 'lease_ref' which should be deleted");
if(lease_ref === null || lease_ref === "")
return
var xhr = new XMLHttpRequest();
xhr.open("DELETE", `/-/lease/${lease_ref}`, true);
xhr.send();
}
</script>
</body>
</html>
'''
return HTMLResponse(response)
@app.get('/-/origins')
async def _origins(request: Request):
@app.get('/-/origins', summary='* Origins')
async def _origins(request: Request, leases: bool = False):
session = sessionmaker(bind=db)()
response = list(map(lambda x: jsonable_encoder(x), session.query(Origin).all()))
response = []
for origin in session.query(Origin).all():
x = origin.serialize()
if leases:
x['leases'] = list(map(lambda _: _.serialize(), Lease.find_by_origin_ref(db, origin.origin_ref)))
response.append(x)
session.close()
return JSONResponse(response)
@app.get('/-/leases')
async def _leases(request: Request):
@app.delete('/-/origins', summary='* Origins')
async def _origins_delete(request: Request):
Origin.delete(db)
return Response(status_code=201)
@app.get('/-/leases', summary='* Leases')
async def _leases(request: Request, origin: bool = False):
session = sessionmaker(bind=db)()
response = list(map(lambda x: jsonable_encoder(x), session.query(Lease).all()))
response = []
for lease in session.query(Lease).all():
x = lease.serialize()
if origin:
# assume that each lease has a valid origin record
x['origin'] = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first().serialize()
response.append(x)
session.close()
return JSONResponse(response)
@app.delete('/-/lease/{lease_ref}', summary='* Lease')
async def _lease_delete(request: Request, lease_ref: str):
if Lease.delete(db, lease_ref) == 1:
return Response(status_code=201)
raise HTTPException(status_code=404, detail='lease not found')
# venv/lib/python3.9/site-packages/nls_core_service_instance/service_instance_token_manager.py
@app.get('/client-token')
@app.get('/client-token', summary='* Client-Token')
async def client_token():
cur_time = datetime.utcnow()
exp_time = cur_time + relativedelta(years=12)
@ -130,7 +174,7 @@ async def client_token():
"nbf": timegm(cur_time.timetuple()),
"exp": timegm(exp_time.timetuple()),
"update_mode": "ABSOLUTE",
"scope_ref_list": [str(uuid4())],
"scope_ref_list": [str(uuid4())], # this is our LEASE_REF
"fulfillment_class_ref_list": [],
"service_instance_configuration": {
"nls_service_instance_ref": INSTANCE_REF,

View File

@ -1,6 +1,6 @@
import datetime
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, UniqueConstraint, update, and_, delete, inspect
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker
@ -21,6 +21,15 @@ class Origin(Base):
def __repr__(self):
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
def serialize(self) -> dict:
return {
'origin_ref': self.origin_ref,
'hostname': self.hostname,
'guest_driver_version': self.guest_driver_version,
'os_platform': self.os_platform,
'os_version': self.os_version,
}
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
@ -28,29 +37,41 @@ class Origin(Base):
@staticmethod
def create_or_update(engine: Engine, origin: "Origin"):
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
session = sessionmaker(bind=engine)()
entity = session.query(Origin).filter(Origin.origin_ref == origin.origin_ref).first()
print(entity)
if entity is None:
session.add(origin)
else:
values = dict(
x = dict(
hostname=origin.hostname,
guest_driver_version=origin.guest_driver_version,
os_platform=origin.os_platform,
os_version=origin.os_version,
os_version=origin.os_version
)
session.execute(update(Origin).where(Origin.origin_ref == origin.origin_ref).values(**values))
session.execute(update(Origin).where(Origin.origin_ref == origin.origin_ref).values(**x))
session.commit()
session.flush()
session.close()
@staticmethod
def delete(engine: Engine, origins: ["Origin"] = None) -> int:
session = sessionmaker(bind=engine)()
if origins is None:
deletions = session.query(Origin).delete()
else:
deletions = session.query(Origin).filter(Origin.origin_ref in origins).delete()
session.commit()
session.close()
return deletions
class Lease(Base):
__tablename__ = "lease"
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref), primary_key=True, nullable=False, index=True) # uuid4
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
lease_created = Column(DATETIME(), nullable=False)
lease_expires = Column(DATETIME(), nullable=False)
lease_updated = Column(DATETIME(), nullable=False)
@ -58,6 +79,15 @@ class Lease(Base):
def __repr__(self):
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
def serialize(self) -> dict:
return {
'lease_ref': self.lease_ref,
'origin_ref': self.origin_ref,
'lease_created': self.lease_created.isoformat(),
'lease_expires': self.lease_expires.isoformat(),
'lease_updated': self.lease_updated.isoformat(),
}
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
@ -65,43 +95,54 @@ class Lease(Base):
@staticmethod
def create_or_update(engine: Engine, lease: "Lease"):
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
entity = session.query(Lease).filter(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).first()
session = sessionmaker(bind=engine)()
entity = session.query(Lease).filter(Lease.lease_ref == lease.lease_ref).first()
if entity is None:
if lease.lease_updated is None:
lease.lease_updated = lease.lease_created
session.add(lease)
else:
values = dict(lease_expires=lease.lease_expires, lease_updated=lease.lease_updated)
session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**values))
x = dict(origin_ref=lease.origin_ref, lease_expires=lease.lease_expires, lease_updated=lease.lease_updated)
session.execute(update(Lease).where(Lease.lease_ref == lease.lease_ref).values(**x))
session.commit()
session.flush()
session.close()
@staticmethod
def find_by_origin_ref(engine: Engine, origin_ref: str) -> ["Lease"]:
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
session = sessionmaker(bind=engine)()
entities = session.query(Lease).filter(Lease.origin_ref == origin_ref).all()
session.close()
return entities
@staticmethod
def find_by_origin_ref_and_lease_ref(engine: Engine, origin_ref: str, lease_ref: str) -> "Lease":
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
session = sessionmaker(bind=engine)()
entity = session.query(Lease).filter(and_(Lease.origin_ref == origin_ref, Lease.lease_ref == lease_ref)).first()
session.close()
return entity
@staticmethod
def renew(engine: Engine, lease: "Lease", lease_expires: datetime.datetime, lease_updated: datetime.datetime):
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
values = dict(lease_expires=lease.lease_expires, lease_updated=lease.lease_updated)
session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**values))
session = sessionmaker(bind=engine)()
x = dict(lease_expires=lease.lease_expires, lease_updated=lease.lease_updated)
session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**x))
session.commit()
session.close()
@staticmethod
def cleanup(engine: Engine, origin_ref: str) -> int:
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
session = sessionmaker(bind=engine)()
deletions = session.query(Lease).filter(Lease.origin_ref == origin_ref).delete()
session.commit()
session.close()
return deletions
@staticmethod
def delete(engine: Engine, lease_ref: str) -> int:
session = sessionmaker(bind=engine)()
deletions = session.query(Lease).filter(Lease.lease_ref == lease_ref).delete()
session.commit()
session.close()
return deletions
@ -113,4 +154,21 @@ def init(engine: Engine):
for table in tables:
if not db.dialect.has_table(engine.connect(), table.__tablename__):
session.execute(str(table.create_statement(engine)))
session.commit()
session.close()
def migrate(engine: Engine):
db = inspect(engine)
def upgrade_1_0_to_1_1():
x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
x = next(_ for _ in x if _['name'] == 'origin_ref')
if x['primary_key'] > 0:
print('Found old database schema with "origin_ref" as primary-key in "lease" table. Dropping table!')
print(' Your leases are recreated on next renewal!')
print(' If an error message appears on the client, you can ignore it.')
Lease.__table__.drop(bind=engine)
init(engine)
upgrade_1_0_to_1_1()

21
app/util.py Normal file
View File

@ -0,0 +1,21 @@
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
def load_file(filename) -> bytes:
with open(filename, 'rb') as file:
content = file.read()
return content
def load_key(filename) -> RsaKey:
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
def generate_key() -> RsaKey:
return RSA.generate(bits=2048)

View File

@ -1,6 +1,13 @@
from base64 import b64encode as b64enc
from hashlib import sha256
from calendar import timegm
from datetime import datetime
from os.path import dirname, join
from uuid import uuid4
from jose import jwt
from dateutil.relativedelta import relativedelta
from jose import jwt, jwk
from jose.constants import ALGORITHMS
from starlette.testclient import TestClient
import sys
@ -9,10 +16,21 @@ sys.path.append('../')
sys.path.append('../app')
from app import main
from app.util import generate_key, load_key
client = TestClient(main.app)
ORIGIN_REF = str(uuid4())
ORIGIN_REF, LEASE_REF = str(uuid4()), str(uuid4())
SECRET = "HelloWorld"
# INSTANCE_KEY_RSA = generate_key()
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
INSTANCE_KEY_RSA = load_key(str(join(dirname(__file__), '../app/cert/instance.private.pem')))
INSTANCE_KEY_PUB = load_key(str(join(dirname(__file__), '../app/cert/instance.public.pem')))
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def test_index():
@ -26,11 +44,43 @@ def test_status():
assert response.json()['status'] == 'up'
def test_health():
response = client.get('/-/health')
assert response.status_code == 200
assert response.json()['status'] == 'up'
def test_readme():
response = client.get('/-/readme')
assert response.status_code == 200
def test_manage():
response = client.get('/-/manage')
assert response.status_code == 200
def test_client_token():
response = client.get('/client-token')
assert response.status_code == 200
def test_origins():
pass
def test_origins_delete():
pass
def test_leases():
pass
def test_lease_delete():
pass
def test_auth_v1_origin():
payload = {
"registration_pending": False,
@ -52,9 +102,30 @@ def test_auth_v1_origin():
assert response.json()['origin_ref'] == ORIGIN_REF
def auth_v1_origin_update():
payload = {
"registration_pending": False,
"environment": {
"guest_driver_version": "guest_driver_version",
"hostname": "myhost",
"ip_address_list": ["192.168.1.123"],
"os_version": "os_version",
"os_platform": "os_platform",
"fingerprint": {"mac_address_list": ["ff:ff:ff:ff:ff:ff"]},
"host_driver_version": "host_driver_version"
},
"update_pending": False,
"candidate_origin_ref": ORIGIN_REF,
}
response = client.post('/auth/v1/origin/update', json=payload)
assert response.status_code == 200
assert response.json()['origin_ref'] == ORIGIN_REF
def test_auth_v1_code():
payload = {
"code_challenge": "0wmaiAMAlTIDyz4Fgt2/j0tXnGv72TYbbLs4ISRCZlY",
"code_challenge": b64enc(sha256(SECRET.encode('utf-8')).digest()).rstrip(b'=').decode('utf-8'),
"origin_ref": ORIGIN_REF,
}
@ -66,20 +137,80 @@ def test_auth_v1_code():
def test_auth_v1_token():
pass
cur_time = datetime.utcnow()
access_expires_on = cur_time + relativedelta(hours=1)
payload = {
"iat": timegm(cur_time.timetuple()),
"exp": timegm(access_expires_on.timetuple()),
"challenge": b64enc(sha256(SECRET.encode('utf-8')).digest()).rstrip(b'=').decode('utf-8'),
"origin_ref": ORIGIN_REF,
"key_ref": "00000000-0000-0000-0000-000000000000",
"kid": "00000000-0000-0000-0000-000000000000"
}
payload = {
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')},
algorithm=ALGORITHMS.RS256),
"code_verifier": SECRET,
}
response = client.post('/auth/v1/token', json=payload)
assert response.status_code == 200
token = response.json()['auth_token']
payload = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
assert payload['origin_ref'] == ORIGIN_REF
def test_leasing_v1_lessor():
pass
payload = {
'fulfillment_context': {
'fulfillment_class_ref_list': []
},
'lease_proposal_list': [{
'license_type_qualifiers': {'count': 1},
'product': {'name': 'NVIDIA RTX Virtual Workstation'}
}],
'proposal_evaluation_mode': 'ALL_OF',
'scope_ref_list': [LEASE_REF]
}
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
bearer_token = f'Bearer {bearer_token}'
response = client.post('/leasing/v1/lessor', json=payload, headers={'authorization': bearer_token})
assert response.status_code == 200
lease_result_list = response.json()['lease_result_list']
assert len(lease_result_list) == 1
assert lease_result_list[0]['lease']['ref'] == LEASE_REF
def test_leasing_v1_lessor_lease():
pass
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
bearer_token = f'Bearer {bearer_token}'
response = client.get('/leasing/v1/lessor/leases', headers={'authorization': bearer_token})
assert response.status_code == 200
active_lease_list = response.json()['active_lease_list']
assert len(active_lease_list) == 1
assert active_lease_list[0] == LEASE_REF
def test_leasing_v1_lease_renew():
pass
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
bearer_token = f'Bearer {bearer_token}'
response = client.put(f'/leasing/v1/lease/{LEASE_REF}', headers={'authorization': bearer_token})
assert response.status_code == 200
assert response.json()['lease_ref'] == LEASE_REF
def test_leasing_v1_lessor_lease_remove():
pass
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
bearer_token = f'Bearer {bearer_token}'
response = client.delete('/leasing/v1/lessor/leases', headers={'authorization': bearer_token})
assert response.status_code == 200
released_lease_list = response.json()['released_lease_list']
assert len(released_lease_list) == 1
assert released_lease_list[0] == LEASE_REF

View File

@ -1 +1 @@
VERSION=1.0.0
VERSION=1.1