forked from oscar.krause/fastapi-dls
Compare commits
7 Commits
main
...
archlinux-
Author | SHA1 | Date | |
---|---|---|---|
73265631a2 | |||
8cf7370b4d | |||
982ec714d3 | |||
07cf0d8042 | |||
49138fb2e9 | |||
f27d1c383e | |||
334f6a704f |
@ -1,27 +0,0 @@
|
|||||||
# Toggle debug mode
|
|
||||||
#DEBUG=false
|
|
||||||
|
|
||||||
# Where the client can find the DLS server
|
|
||||||
DLS_URL=127.0.0.1
|
|
||||||
DLS_PORT=443
|
|
||||||
|
|
||||||
# CORS configuration
|
|
||||||
## comma separated list without spaces
|
|
||||||
#CORS_ORIGINS="https://$DLS_URL:$DLS_PORT"
|
|
||||||
|
|
||||||
# Lease expiration in days
|
|
||||||
LEASE_EXPIRE_DAYS=90
|
|
||||||
LEASE_RENEWAL_PERIOD=0.2
|
|
||||||
|
|
||||||
# Database location
|
|
||||||
## https://docs.sqlalchemy.org/en/14/core/engines.html
|
|
||||||
DATABASE=sqlite:////etc/fastapi-dls/db.sqlite
|
|
||||||
|
|
||||||
# UUIDs for identifying the instance
|
|
||||||
#SITE_KEY_XID="00000000-0000-0000-0000-000000000000"
|
|
||||||
#INSTANCE_REF="10000000-0000-0000-0000-000000000001"
|
|
||||||
#ALLOTMENT_REF="20000000-0000-0000-0000-000000000001"
|
|
||||||
|
|
||||||
# Site-wide signing keys
|
|
||||||
INSTANCE_KEY_RSA=/etc/fastapi-dls/instance.private.pem
|
|
||||||
INSTANCE_KEY_PUB=/etc/fastapi-dls/instance.public.pem
|
|
@ -1,25 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Service for fastapi-dls
|
|
||||||
Documentation=https://git.collinwebdesigns.de/oscar.krause/fastapi-dls
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
User=www-data
|
|
||||||
Group=www-data
|
|
||||||
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
|
||||||
WorkingDirectory=/usr/share/fastapi-dls/app
|
|
||||||
EnvironmentFile=/etc/fastapi-dls/env
|
|
||||||
ExecStart=uvicorn main:app \
|
|
||||||
--env-file /etc/fastapi-dls/env \
|
|
||||||
--host $DLS_URL --port $DLS_PORT \
|
|
||||||
--app-dir /usr/share/fastapi-dls/app \
|
|
||||||
--ssl-keyfile /etc/fastapi-dls/webserver.key \
|
|
||||||
--ssl-certfile /etc/fastapi-dls/webserver.crt \
|
|
||||||
--proxy-headers
|
|
||||||
Restart=always
|
|
||||||
KillSignal=SIGQUIT
|
|
||||||
Type=simple
|
|
||||||
NotifyAccess=all
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
@ -1,60 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
WORKING_DIR=/usr/share/fastapi-dls
|
|
||||||
CONFIG_DIR=/etc/fastapi-dls
|
|
||||||
|
|
||||||
if [ ! -f $CONFIG_DIR/instance.private.pem ]; then
|
|
||||||
echo "> Create dls-instance keypair ..."
|
|
||||||
openssl genrsa -out $CONFIG_DIR/instance.private.pem 2048
|
|
||||||
openssl rsa -in $CONFIG_DIR/instance.private.pem -outform PEM -pubout -out $CONFIG_DIR/instance.public.pem
|
|
||||||
else
|
|
||||||
echo "> Create dls-instance keypair skipped! (exists)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
[ -f $CONFIG_DIR/webserver.key ] && default_answer="N" || default_answer="Y"
|
|
||||||
[ $default_answer == "Y" ] && V="Y/n" || V="y/N"
|
|
||||||
read -p "> Do you wish to create self-signed webserver certificate? [${V}]" yn
|
|
||||||
yn=${yn:-$default_answer} # ${parameter:-word} If parameter is unset or null, the expansion of word is substituted. Otherwise, the value of parameter is substituted.
|
|
||||||
case $yn in
|
|
||||||
[Yy]*)
|
|
||||||
echo "> Generating keypair ..."
|
|
||||||
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $CONFIG_DIR/webserver.key -out $CONFIG_DIR/webserver.crt
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
[Nn]*) echo "> Generating keypair skipped! (exists)"; break ;;
|
|
||||||
*) echo "Please answer [y] or [n]." ;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -f $CONFIG_DIR/webserver.key ]; then
|
|
||||||
echo "> Starting service ..."
|
|
||||||
systemctl start fastapi-dls.service
|
|
||||||
|
|
||||||
if [ -x "$(command -v curl)" ]; then
|
|
||||||
echo "> Testing API ..."
|
|
||||||
source $CONFIG_DIR/env
|
|
||||||
curl --insecure -X GET https://$DLS_URL:$DLS_PORT/-/health
|
|
||||||
else
|
|
||||||
echo "> Testing API failed, curl not available. Please test manually!"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
chown -R www-data:www-data $CONFIG_DIR
|
|
||||||
chown -R www-data:www-data $WORKING_DIR
|
|
||||||
|
|
||||||
cat <<EOF
|
|
||||||
|
|
||||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
|
||||||
# #
|
|
||||||
# fastapi-dls is now installed. #
|
|
||||||
# #
|
|
||||||
# Service should be up and running. #
|
|
||||||
# Webservice is listen to https://localhost #
|
|
||||||
# #
|
|
||||||
# Configuration is stored in /etc/fastapi-dls/env. #
|
|
||||||
# #
|
|
||||||
# #
|
|
||||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
|
||||||
|
|
||||||
EOF
|
|
@ -1,9 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# is removed automatically
|
|
||||||
#if [ "$1" = purge ] && [ -d /usr/share/fastapi-dls ]; then
|
|
||||||
# echo "> Removing app."
|
|
||||||
# rm -r /usr/share/fastapi-dls
|
|
||||||
#fi
|
|
||||||
|
|
||||||
echo -e "> Done."
|
|
@ -1,8 +1,8 @@
|
|||||||
|
# Maintainer: samicrusader <hi@samicrusader.me>
|
||||||
# Maintainer: Oscar Krause <oscar.krause@collinwebdesigns.de>
|
# Maintainer: Oscar Krause <oscar.krause@collinwebdesigns.de>
|
||||||
# Contributor: samicrusader <hi@samicrusader.me>
|
|
||||||
|
|
||||||
pkgname=fastapi-dls
|
pkgname=fastapi-dls
|
||||||
pkgver=1.1
|
pkgver=1.0
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc='NVIDIA DLS server implementation with FastAPI'
|
pkgdesc='NVIDIA DLS server implementation with FastAPI'
|
||||||
arch=('any')
|
arch=('any')
|
||||||
@ -11,21 +11,12 @@ license=('MIT')
|
|||||||
depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastapi' 'python-dotenv' 'python-dateutil' 'python-sqlalchemy' 'python-pycryptodome' 'uvicorn' 'python-markdown' 'openssl')
|
depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastapi' 'python-dotenv' 'python-dateutil' 'python-sqlalchemy' 'python-pycryptodome' 'uvicorn' 'python-markdown' 'openssl')
|
||||||
provider=("$pkgname")
|
provider=("$pkgname")
|
||||||
install="$pkgname.install"
|
install="$pkgname.install"
|
||||||
backup=('etc/default/fastapi-dls')
|
|
||||||
source=('git+file:///builds/oscar.krause/fastapi-dls' # https://gitea.publichub.eu/oscar.krause/fastapi-dls.git
|
source=('git+file:///builds/oscar.krause/fastapi-dls' # https://gitea.publichub.eu/oscar.krause/fastapi-dls.git
|
||||||
"$pkgname.default"
|
"$pkgname.default"
|
||||||
"$pkgname.service"
|
"$pkgname.service")
|
||||||
"$pkgname.tmpfiles")
|
|
||||||
sha256sums=('SKIP'
|
sha256sums=('SKIP'
|
||||||
'fbd015449a30c0ae82733289a56eb98151dcfab66c91b37fe8e202e39f7a5edb'
|
'd8b2216b67a2f8f35ad6f07c825839794f7c34456a72caadd9fc110810348d90'
|
||||||
'2719338541104c537453a65261c012dda58e1dbee99154cf4f33b526ee6ca22e'
|
'10cb98d64f8bf37b11a60510793c187cc664e63c895d1205781c21fa2e703f32')
|
||||||
'3dc60140c08122a8ec0e7fa7f0937eb8c1288058890ba09478420fc30ce9e30c')
|
|
||||||
|
|
||||||
pkgver() {
|
|
||||||
echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > $srcdir/$pkgname/version.env
|
|
||||||
source $srcdir/$pkgname/version.env
|
|
||||||
echo $VERSION
|
|
||||||
}
|
|
||||||
|
|
||||||
check() {
|
check() {
|
||||||
cd "$srcdir/$pkgname/test"
|
cd "$srcdir/$pkgname/test"
|
||||||
@ -41,14 +32,11 @@ package() {
|
|||||||
install -d "$pkgdir/var/lib/$pkgname/cert"
|
install -d "$pkgdir/var/lib/$pkgname/cert"
|
||||||
cp -r "$srcdir/$pkgname/doc"/* "$pkgdir/usr/share/doc/$pkgname/"
|
cp -r "$srcdir/$pkgname/doc"/* "$pkgdir/usr/share/doc/$pkgname/"
|
||||||
install -Dm644 "$srcdir/$pkgname/README.md" "$pkgdir/usr/share/doc/$pkgname/README.md"
|
install -Dm644 "$srcdir/$pkgname/README.md" "$pkgdir/usr/share/doc/$pkgname/README.md"
|
||||||
install -Dm644 "$srcdir/$pkgname/version.env" "$pkgdir/usr/share/doc/$pkgname/version.env"
|
|
||||||
|
|
||||||
sed -i "s/README.md/\/usr\/share\/doc\/$pkgname\/README.md/g" "$srcdir/$pkgname/app/main.py"
|
sed -i "s/README.md/\/usr\/share\/doc\/$pkgname\/README.md/g" "$srcdir/$pkgname/app/main.py"
|
||||||
sed -i "s/join(dirname(__file__), 'cert\//join('\/var\/lib\/$pkgname', 'cert\//g" "$srcdir/$pkgname/app/main.py"
|
sed -i "s/join(dirname(__file__), 'cert\//join('\/var\/lib\/$pkgname', 'cert\//g" "$srcdir/$pkgname/app/main.py"
|
||||||
install -Dm755 "$srcdir/$pkgname/app/main.py" "$pkgdir/opt/$pkgname/main.py"
|
install -Dm755 "$srcdir/$pkgname/app/main.py" "$pkgdir/opt/$pkgname/main.py"
|
||||||
install -Dm755 "$srcdir/$pkgname/app/orm.py" "$pkgdir/opt/$pkgname/orm.py"
|
install -Dm755 "$srcdir/$pkgname/app/orm.py" "$pkgdir/opt/$pkgname/orm.py"
|
||||||
install -Dm755 "$srcdir/$pkgname/app/util.py" "$pkgdir/opt/$pkgname/util.py"
|
|
||||||
install -Dm644 "$srcdir/$pkgname.default" "$pkgdir/etc/default/$pkgname"
|
install -Dm644 "$srcdir/$pkgname.default" "$pkgdir/etc/default/$pkgname"
|
||||||
install -Dm644 "$srcdir/$pkgname.service" "$pkgdir/usr/lib/systemd/system/$pkgname.service"
|
install -Dm644 "$srcdir/$pkgname.service" "$pkgdir/usr/lib/systemd/system/$pkgname.service"
|
||||||
install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"
|
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,6 @@ DEBUG=false
|
|||||||
|
|
||||||
# Where the client can find the DLS server
|
# Where the client can find the DLS server
|
||||||
## DLS_URL should be a hostname
|
## DLS_URL should be a hostname
|
||||||
LISTEN_IP="0.0.0.0"
|
|
||||||
DLS_URL="localhost.localdomain"
|
DLS_URL="localhost.localdomain"
|
||||||
DLS_PORT=8443
|
DLS_PORT=8443
|
||||||
CORS_ORIGINS="https://$DLS_URL:$DLS_PORT"
|
CORS_ORIGINS="https://$DLS_URL:$DLS_PORT"
|
||||||
@ -12,7 +11,7 @@ CORS_ORIGINS="https://$DLS_URL:$DLS_PORT"
|
|||||||
LEASE_EXPIRE_DAYS=90
|
LEASE_EXPIRE_DAYS=90
|
||||||
|
|
||||||
# Database location
|
# Database location
|
||||||
## https://docs.sqlalchemy.org/en/14/core/engines.html
|
## See https://dataset.readthedocs.io/en/latest/quickstart.html for details
|
||||||
DATABASE="sqlite:////var/lib/fastapi-dls/db.sqlite"
|
DATABASE="sqlite:////var/lib/fastapi-dls/db.sqlite"
|
||||||
|
|
||||||
# UUIDs for identifying the instance
|
# UUIDs for identifying the instance
|
||||||
@ -22,7 +21,3 @@ INSTANCE_REF="<<instanceref>>"
|
|||||||
# Site-wide signing keys
|
# Site-wide signing keys
|
||||||
INSTANCE_KEY_RSA="/var/lib/fastapi-dls/instance.private.pem"
|
INSTANCE_KEY_RSA="/var/lib/fastapi-dls/instance.private.pem"
|
||||||
INSTANCE_KEY_PUB="/var/lib/fastapi-dls/instance.public.pem"
|
INSTANCE_KEY_PUB="/var/lib/fastapi-dls/instance.public.pem"
|
||||||
|
|
||||||
# TLS certificate
|
|
||||||
INSTANCE_SSL_CERT="/var/lib/fastapi-dls/cert/webserver.crt"
|
|
||||||
INSTANCE_SSL_KEY="/var/lib/fastapi-dls/cert/webserver.key"
|
|
||||||
|
@ -4,13 +4,12 @@ Documentation=https://git.collinwebdesigns.de/oscar.krause/fastapi-dls
|
|||||||
After=network.target
|
After=network.target
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=simple
|
Type=forking
|
||||||
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
|
||||||
EnvironmentFile=/etc/default/fastapi-dls
|
EnvironmentFile=/etc/default/fastapi-dls
|
||||||
ExecStart=/usr/bin/uvicorn main:app --proxy-headers --env-file=/etc/default/fastapi-dls --host=${LISTEN_IP} --port=${DLS_PORT} --app-dir=/opt/fastapi-dls --ssl-keyfile=${INSTANCE_SSL_KEY} --ssl-certfile=${INSTANCE_SSL_CERT}
|
ExecStart=/usr/bin/python /opt/fastapi-dls/main.py
|
||||||
|
WorkingDir=/opt/fastapi-dls
|
||||||
Restart=on-abort
|
Restart=on-abort
|
||||||
User=http
|
User=root
|
||||||
Group=http
|
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
d /var/lib/fastapi-dls 0755 http http
|
|
||||||
d /var/lib/fastapi-dls/cert 0755 http http
|
|
@ -1,48 +0,0 @@
|
|||||||
<?xml version="1.0"?>
|
|
||||||
<Container version="2">
|
|
||||||
<Name>FastAPI-DLS</Name>
|
|
||||||
<Repository>collinwebdesigns/fastapi-dls:latest</Repository>
|
|
||||||
<Registry>https://hub.docker.com/r/collinwebdesigns/fastapi-dls</Registry>
|
|
||||||
<Network>br0</Network>
|
|
||||||
<MyIP></MyIP>
|
|
||||||
<Shell>sh</Shell>
|
|
||||||
<Privileged>false</Privileged>
|
|
||||||
<Support/>
|
|
||||||
<Project/>
|
|
||||||
<Overview>Source:
|
|
||||||
https://git.collinwebdesigns.de/oscar.krause/fastapi-dls#docker
|
|
||||||

|
|
||||||
Make sure you create these certificates before starting the container for the first time:
|
|
||||||
```
|
|
||||||
# Check https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/main/#docker for more information:
|
|
||||||
WORKING_DIR=/mnt/user/appdata/fastapi-dls/cert
|
|
||||||
mkdir -p $WORKING_DIR
|
|
||||||
cd $WORKING_DIR
|
|
||||||
# create instance private and public key for singing JWT's
|
|
||||||
openssl genrsa -out $WORKING_DIR/instance.private.pem 2048 
|
|
||||||
openssl rsa -in $WORKING_DIR/instance.private.pem -outform PEM -pubout -out $WORKING_DIR/instance.public.pem
|
|
||||||
# create ssl certificate for integrated webserver (uvicorn) - because clients rely on ssl
|
|
||||||
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $WORKING_DIR/webserver.key -out $WORKING_DIR/webserver.crt
|
|
||||||
```
|
|
||||||
</Overview>
|
|
||||||
<Category/>
|
|
||||||
<WebUI>https://[IP]:[PORT:443]</WebUI>
|
|
||||||
<TemplateURL/>
|
|
||||||
<Icon>https://git.collinwebdesigns.de/uploads/-/system/project/avatar/106/png-transparent-nvidia-grid-logo-business-nvidia-electronics-text-trademark.png?width=64</Icon>
|
|
||||||
<ExtraParams>--restart always</ExtraParams>
|
|
||||||
<PostArgs/>
|
|
||||||
<CPUset/>
|
|
||||||
<DateInstalled>1679161568</DateInstalled>
|
|
||||||
<DonateText/>
|
|
||||||
<DonateLink/>
|
|
||||||
<Requires/>
|
|
||||||
<Config Name="HTTPS Port" Target="" Default="443" Mode="tcp" Description="Same as DLS Port below." Type="Port" Display="always-hide" Required="true" Mask="false">443</Config>
|
|
||||||
<Config Name="App Cert" Target="/app/cert" Default="/mnt/user/appdata/fastapi-dls/cert" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/cert</Config>
|
|
||||||
<Config Name="DLS Port" Target="DSL_PORT" Default="443" Mode="" Description="Choose port you want to use. Make sure to change the HTTPS port above to match it." Type="Variable" Display="always-hide" Required="true" Mask="false">443</Config>
|
|
||||||
<Config Name="App database" Target="/app/database" Default="/mnt/user/appdata/fastapi-dls/data" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/data</Config>
|
|
||||||
<Config Name="DSL IP" Target="DLS_URL" Default="localhost" Mode="" Description="Put your container's IP (or your host's IP if it's shared)." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
|
|
||||||
<Config Name="Time Zone" Target="TZ" Default="" Mode="" Description="Format example: America/New_York. MUST MATCH YOUR CURRENT TIMEZONE AND THE GUEST VMS TIMEZONE! Otherwise you'll get into issues, read the guide above." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
|
|
||||||
<Config Name="Database" Target="DATABASE" Default="sqlite:////app/database/db.sqlite" Mode="" Description="Set to sqlite:////app/database/db.sqlite" Type="Variable" Display="advanced-hide" Required="true" Mask="false">sqlite:////app/database/db.sqlite</Config>
|
|
||||||
<Config Name="Debug" Target="DEBUG" Default="true" Mode="" Description="true to enable debugging, false to disable them." Type="Variable" Display="advanced-hide" Required="false" Mask="false">true</Config>
|
|
||||||
<Config Name="Lease" Target="LEASE_EXPIRE_DAYS" Default="90" Mode="" Description="90 days is the maximum value." Type="Variable" Display="advanced" Required="false" Mask="false">90</Config>
|
|
||||||
</Container>
|
|
@ -1,197 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# This script automates the licensing of the vGPU guest driver
|
|
||||||
# on Unraid boot. Set the Schedule to: "At Startup of Array".
|
|
||||||
#
|
|
||||||
# Relies on FastAPI-DLS for the licensing.
|
|
||||||
# It assumes FeatureType=1 (vGPU), change it as you see fit in line <114>
|
|
||||||
#
|
|
||||||
# Requires `eflutils` to be installed in the system for `nvidia-gridd` to run
|
|
||||||
# To Install it:
|
|
||||||
# 1) You might find it here: https://packages.slackware.com/ (choose the 64bit version of Slackware)
|
|
||||||
# 2) Download the package and put it in /boot/extra to be installed on boot
|
|
||||||
# 3) a. Reboot to install it, OR
|
|
||||||
# b. Run `upgradepkg --install-new /boot/extra/elfutils*`
|
|
||||||
# [i]: Make sure to have only one version of elfutils, otherwise you might run into issues
|
|
||||||
|
|
||||||
# Sources and docs:
|
|
||||||
# https://docs.nvidia.com/grid/15.0/grid-vgpu-user-guide/index.html#configuring-nls-licensed-client-on-linux
|
|
||||||
#
|
|
||||||
|
|
||||||
################################################
|
|
||||||
# MAKE SURE YOU CHANGE THESE VARIABLES #
|
|
||||||
################################################
|
|
||||||
|
|
||||||
###### CHANGE ME!
|
|
||||||
# IP and PORT of FastAPI-DLS
|
|
||||||
DLS_IP=192.168.0.123
|
|
||||||
DLS_PORT=443
|
|
||||||
# Token folder, must be on a filesystem that supports
|
|
||||||
# linux filesystem permissions (eg: ext4,xfs,btrfs...)
|
|
||||||
TOKEN_PATH=/mnt/user/system/nvidia
|
|
||||||
PING=$(which ping)
|
|
||||||
|
|
||||||
# Check if the License is applied
|
|
||||||
if [[ "$(nvidia-smi -q | grep "Expiry")" == *Expiry* ]]; then
|
|
||||||
echo " [i] Your vGPU Guest drivers are already licensed."
|
|
||||||
echo " [i] $(nvidia-smi -q | grep "Expiry")"
|
|
||||||
echo " [<] Exiting..."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if the FastAPI-DLS server is reachable
|
|
||||||
# Check if the License is applied
|
|
||||||
MAX_RETRIES=30
|
|
||||||
for i in $(seq 1 $MAX_RETRIES); do
|
|
||||||
echo -ne "\r [>] Attempt $i to connect to $DLS_IP."
|
|
||||||
if ping -c 1 $DLS_IP >/dev/null 2>&1; then
|
|
||||||
echo -e "\n [*] Connection successful."
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
if [ $i -eq $MAX_RETRIES ]; then
|
|
||||||
echo -e "\n [!] Connection failed after $MAX_RETRIES attempts."
|
|
||||||
echo -e "\n [<] Exiting..."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
# Check if the token folder exists
|
|
||||||
if [ -d "${TOKEN_PATH}" ]; then
|
|
||||||
echo " [*] Token Folder exists. Proceeding..."
|
|
||||||
else
|
|
||||||
echo " [!] Token Folder does not exists or not ready yet. Exiting."
|
|
||||||
echo " [!] Token Folder Specified: ${TOKEN_PATH}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if elfutils are installed, otherwise nvidia-gridd service
|
|
||||||
# wont start
|
|
||||||
if [ "$(grep -R "elfutils" /var/log/packages/* | wc -l)" != 0 ]; then
|
|
||||||
echo " [*] Elfutils is installed, proceeding..."
|
|
||||||
else
|
|
||||||
echo " [!] Elfutils is not installed, downloading and installing..."
|
|
||||||
echo " [!] Downloading elfutils to /boot/extra"
|
|
||||||
echo " [i] This script will download elfutils from slackware64-15.0 repository."
|
|
||||||
echo " [i] If you have a different version of Unraid (6.11.5), you might want to"
|
|
||||||
echo " [i] download and install a suitable version manually from the slackware"
|
|
||||||
echo " [i] repository, and put it in /boot/extra to be install on boot."
|
|
||||||
echo " [i] You may also install it by running: "
|
|
||||||
echo " [i] upgradepkg --install-new /path/to/elfutils-*.txz"
|
|
||||||
echo ""
|
|
||||||
echo " [>] Downloading elfutils from slackware64-15.0 repository:"
|
|
||||||
wget -q -nc --show-progress --progress=bar:force:noscroll -P /boot/extra https://slackware.uk/slackware/slackware64-15.0/slackware64/l/elfutils-0.186-x86_64-1.txz 2>/dev/null \
|
|
||||||
|| { echo " [!] Error while downloading elfutils, please download it and install it manually."; exit 1; }
|
|
||||||
echo ""
|
|
||||||
if upgradepkg --install-new /boot/extra/elfutils-0.186-x86_64-1.txz
|
|
||||||
then
|
|
||||||
echo " [*] Elfutils installed and will be installed automatically on boot"
|
|
||||||
else
|
|
||||||
echo " [!] Error while installing, check logs..."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo " [~] Sleeping for 60 seconds before continuing..."
|
|
||||||
echo " [i] The script is waiting until the boot process settles down."
|
|
||||||
|
|
||||||
for i in {60..1}; do
|
|
||||||
printf "\r [~] %d seconds remaining" "$i"
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
printf "\n"
|
|
||||||
|
|
||||||
create_token () {
|
|
||||||
echo " [>] Creating new token..."
|
|
||||||
if ${PING} -c1 ${DLS_IP} > /dev/null 2>&1
|
|
||||||
then
|
|
||||||
# curl --insecure -L -X GET https://${DLS_IP}:${DLS_PORT}/-/client-token -o ${TOKEN_PATH}/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok || { echo " [!] Could not get the token, please check the server."; exit 1;}
|
|
||||||
wget -q -nc -4c --no-check-certificate --show-progress --progress=bar:force:noscroll -O "${TOKEN_PATH}"/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok https://${DLS_IP}:${DLS_PORT}/-/client-token \
|
|
||||||
|| { echo " [!] Could not get the token, please check the server."; exit 1;}
|
|
||||||
chmod 744 "${TOKEN_PATH}"/*.tok || { echo " [!] Could not chmod the tokens."; exit 1; }
|
|
||||||
echo ""
|
|
||||||
echo " [*] Token downloaded and stored in ${TOKEN_PATH}."
|
|
||||||
else
|
|
||||||
echo " [!] Could not get token, DLS server unavailable ."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_run () {
|
|
||||||
echo " [>] Setting up gridd.conf"
|
|
||||||
cp /etc/nvidia/gridd.conf.template /etc/nvidia/gridd.conf || { echo " [!] Error configuring gridd.conf, did you install the drivers correctly?"; exit 1; }
|
|
||||||
sed -i 's/FeatureType=0/FeatureType=1/g' /etc/nvidia/gridd.conf
|
|
||||||
echo "ClientConfigTokenPath=${TOKEN_PATH}" >> /etc/nvidia/gridd.conf
|
|
||||||
echo " [>] Creating /var/lib/nvidia folder structure"
|
|
||||||
mkdir -p /var/lib/nvidia/GridLicensing
|
|
||||||
echo " [>] Starting nvidia-gridd"
|
|
||||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
|
||||||
echo " [!] nvidia-gridd service is running. Closing."
|
|
||||||
sh /usr/lib/nvidia/sysv/nvidia-gridd stop
|
|
||||||
stop_exit_code=$?
|
|
||||||
if [ $stop_exit_code -eq 0 ]; then
|
|
||||||
echo " [*] nvidia-gridd service stopped successfully."
|
|
||||||
else
|
|
||||||
echo " [!] Error while stopping nvidia-gridd service."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Kill the service if it does not close
|
|
||||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
|
||||||
kill -9 "$(pgrep nvidia-gridd)" || {
|
|
||||||
echo " [!] Error while closing nvidia-gridd service"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo " [*] Restarting nvidia-gridd service."
|
|
||||||
sh /usr/lib/nvidia/sysv/nvidia-gridd start
|
|
||||||
|
|
||||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
|
||||||
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
|
|
||||||
else
|
|
||||||
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
sh /usr/lib/nvidia/sysv/nvidia-gridd start
|
|
||||||
|
|
||||||
if pgrep nvidia-gridd >/dev/null 2>&1; then
|
|
||||||
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
|
|
||||||
else
|
|
||||||
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
for token in "${TOKEN_PATH}"/*; do
|
|
||||||
if [ "${token: -4}" == ".tok" ]
|
|
||||||
then
|
|
||||||
echo " [*] Tokens found..."
|
|
||||||
setup_run
|
|
||||||
else
|
|
||||||
echo " [!] No Tokens found..."
|
|
||||||
create_token
|
|
||||||
setup_run
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
if nvidia-smi -q | grep "Expiry" >/dev/null 2>&1; then
|
|
||||||
echo " [>] vGPU licensed!"
|
|
||||||
echo " [i] $(nvidia-smi -q | grep "Expiry")"
|
|
||||||
break
|
|
||||||
else
|
|
||||||
echo -ne " [>] vGPU not licensed yet... Checking again in 5 seconds\c"
|
|
||||||
for i in {1..5}; do
|
|
||||||
sleep 1
|
|
||||||
echo -ne ".\c"
|
|
||||||
done
|
|
||||||
echo -ne "\r\c"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
echo " [>] Done..."
|
|
||||||
exit 0
|
|
259
.gitlab-ci.yml
259
.gitlab-ci.yml
@ -1,10 +1,3 @@
|
|||||||
include:
|
|
||||||
- template: Jobs/Code-Quality.gitlab-ci.yml
|
|
||||||
- template: Jobs/Secret-Detection.gitlab-ci.yml
|
|
||||||
- template: Jobs/SAST.gitlab-ci.yml
|
|
||||||
- template: Jobs/Container-Scanning.gitlab-ci.yml
|
|
||||||
- template: Jobs/Dependency-Scanning.gitlab-ci.yml
|
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
key: one-key-to-rule-them-all
|
key: one-key-to-rule-them-all
|
||||||
|
|
||||||
@ -13,98 +6,48 @@ build:docker:
|
|||||||
interruptible: true
|
interruptible: true
|
||||||
stage: build
|
stage: build
|
||||||
rules:
|
rules:
|
||||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
- if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
||||||
changes:
|
|
||||||
- app/**/*
|
|
||||||
- Dockerfile
|
|
||||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
|
||||||
tags: [ docker ]
|
tags: [ docker ]
|
||||||
before_script:
|
before_script:
|
||||||
- docker buildx inspect
|
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env # COMMIT=`git rev-parse HEAD`
|
||||||
- docker buildx create --use
|
|
||||||
script:
|
script:
|
||||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||||
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_BUILD_REF_NAME:$CI_BUILD_REF
|
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
|
||||||
- docker buildx build --progress=plain --platform linux/amd64,linux/arm64 --build-arg VERSION=$CI_BUILD_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE --push .
|
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
|
||||||
- docker buildx imagetools inspect $IMAGE
|
|
||||||
- echo "CS_IMAGE=$IMAGE" > container_scanning.env
|
|
||||||
artifacts:
|
|
||||||
reports:
|
|
||||||
dotenv: container_scanning.env
|
|
||||||
|
|
||||||
build:apt:
|
build:apt:
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim # just to get "python3-jose" working
|
||||||
interruptible: true
|
|
||||||
stage: build
|
stage: build
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_TAG
|
|
||||||
variables:
|
|
||||||
VERSION: $CI_BUILD_REF_NAME
|
|
||||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
|
||||||
changes:
|
|
||||||
- app/**/*
|
|
||||||
- .DEBIAN/**/*
|
|
||||||
- .gitlab-ci.yml
|
|
||||||
variables:
|
|
||||||
VERSION: "0.0.1"
|
|
||||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
|
||||||
variables:
|
|
||||||
VERSION: "0.0.1"
|
|
||||||
before_script:
|
before_script:
|
||||||
- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
|
|
||||||
# install build dependencies
|
|
||||||
- apt-get update -qq && apt-get install -qq -y build-essential
|
- apt-get update -qq && apt-get install -qq -y build-essential
|
||||||
# create build directory for .deb sources
|
# create build directory for .deb sources
|
||||||
- mkdir build
|
- mkdir build
|
||||||
# copy install instructions
|
# copy install instructions
|
||||||
- cp -r .DEBIAN build/DEBIAN
|
- cp -r DEBIAN build/
|
||||||
- chmod -R 0775 build/DEBIAN
|
|
||||||
# copy app into "/usr/share/fastapi-dls" as "/usr/share/fastapi-dls/app" & copy README.md and version.env
|
# copy app into "/usr/share/fastapi-dls" as "/usr/share/fastapi-dls/app" & copy README.md and version.env
|
||||||
- mkdir -p build/usr/share/fastapi-dls
|
- mkdir -p build/usr/share/fastapi-dls
|
||||||
- cp -r app build/usr/share/fastapi-dls
|
- cp -r app build/usr/share/fastapi-dls
|
||||||
- cp README.md version.env build/usr/share/fastapi-dls
|
- cp README.md version.env build/usr/share/fastapi-dls
|
||||||
# create conf file
|
# create conf file
|
||||||
- mkdir -p build/etc/fastapi-dls
|
- mkdir -p build/etc/fastapi-dls
|
||||||
- cp .DEBIAN/env.default build/etc/fastapi-dls/env
|
- touch build/etc/fastapi-dls/env
|
||||||
# create service file
|
|
||||||
- mkdir -p build/etc/systemd/system
|
|
||||||
- cp .DEBIAN/fastapi-dls.service build/etc/systemd/system/fastapi-dls.service
|
|
||||||
# cd into "build/"
|
# cd into "build/"
|
||||||
- cd build/
|
- cd build/
|
||||||
script:
|
script:
|
||||||
# set version based on value in "$CI_BUILD_REF_NAME"
|
|
||||||
- sed -i -E 's/(Version\:\s)0.0/\1'"$VERSION"'/g' DEBIAN/control
|
|
||||||
# build
|
|
||||||
- dpkg -b . build.deb
|
- dpkg -b . build.deb
|
||||||
- dpkg -I build.deb
|
|
||||||
artifacts:
|
artifacts:
|
||||||
expire_in: 1 week
|
expire_in: 1 week
|
||||||
paths:
|
paths:
|
||||||
- build/build.deb
|
- build/build.deb
|
||||||
|
|
||||||
build:pacman:
|
build:pamac:
|
||||||
image: archlinux:base-devel
|
image: archlinux:base-devel
|
||||||
interruptible: true
|
|
||||||
stage: build
|
stage: build
|
||||||
rules:
|
rules:
|
||||||
- if: $CI_COMMIT_TAG
|
- if: $CI_COMMIT_BRANCH == "archlinux-makepkg"
|
||||||
variables:
|
|
||||||
VERSION: $CI_BUILD_REF_NAME
|
|
||||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
|
||||||
changes:
|
|
||||||
- app/**/*
|
|
||||||
- .PKGBUILD/**/*
|
|
||||||
- .gitlab-ci.yml
|
|
||||||
variables:
|
|
||||||
VERSION: "0.0.1"
|
|
||||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
|
||||||
variables:
|
|
||||||
VERSION: "0.0.1"
|
|
||||||
before_script:
|
before_script:
|
||||||
#- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
|
|
||||||
# install build dependencies
|
|
||||||
- pacman -Syu --noconfirm git
|
- pacman -Syu --noconfirm git
|
||||||
# create a build-user because "makepkg" don't like root user
|
# "makepkg" don't likes root user
|
||||||
- useradd --no-create-home --shell=/bin/false build && usermod -L build
|
- useradd --no-create-home --shell=/bin/false build && usermod -L build
|
||||||
- 'echo "build ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers'
|
- 'echo "build ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers'
|
||||||
- 'echo "root ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers'
|
- 'echo "root ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers'
|
||||||
@ -116,19 +59,15 @@ build:pacman:
|
|||||||
# download dependencies
|
# download dependencies
|
||||||
- source PKGBUILD && pacman -Syu --noconfirm --needed --asdeps "${makedepends[@]}" "${depends[@]}"
|
- source PKGBUILD && pacman -Syu --noconfirm --needed --asdeps "${makedepends[@]}" "${depends[@]}"
|
||||||
# build
|
# build
|
||||||
- sudo --preserve-env -u build makepkg -s
|
- sudo -u build makepkg -s
|
||||||
artifacts:
|
artifacts:
|
||||||
expire_in: 1 week
|
expire_in: 1 week
|
||||||
paths:
|
paths:
|
||||||
- "*.pkg.tar.zst"
|
- "*.pkg.tar.zst"
|
||||||
|
|
||||||
test:
|
test:
|
||||||
image: python:3.11-slim-bullseye
|
image: python:3.10-slim-bullseye
|
||||||
stage: test
|
stage: test
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_BRANCH
|
|
||||||
- if: $CI_COMMIT_TAG
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
variables:
|
variables:
|
||||||
DATABASE: sqlite:///../app/db.sqlite
|
DATABASE: sqlite:///../app/db.sqlite
|
||||||
before_script:
|
before_script:
|
||||||
@ -139,46 +78,32 @@ test:
|
|||||||
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
|
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
|
||||||
- cd test
|
- cd test
|
||||||
script:
|
script:
|
||||||
- python -m pytest main.py --junitxml=report.xml
|
- pytest main.py
|
||||||
artifacts:
|
|
||||||
reports:
|
|
||||||
dotenv: version.env
|
|
||||||
junit: ['**/report.xml']
|
|
||||||
|
|
||||||
.test:linux:
|
.test:linux:
|
||||||
stage: test
|
stage: test
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
|
||||||
changes:
|
|
||||||
- app/**/*
|
|
||||||
- .DEBIAN/**/*
|
|
||||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
|
||||||
needs:
|
needs:
|
||||||
- job: build:apt
|
- job: build:apt
|
||||||
artifacts: true
|
artifacts: true
|
||||||
variables:
|
variables:
|
||||||
DEBIAN_FRONTEND: noninteractive
|
DEBIAN_FRONTEND: noninteractive
|
||||||
before_script:
|
before_script:
|
||||||
- apt-get update -qq && apt-get install -qq -y jq curl
|
- apt-get update -qq && apt-get install -qq -y jq
|
||||||
script:
|
script:
|
||||||
# test installation
|
# test installation
|
||||||
- apt-get install -q -y ./build/build.deb --fix-missing
|
- apt-get install -q -y ./build/build.deb --fix-missing
|
||||||
- openssl req -x509 -newkey rsa:2048 -nodes -out /etc/fastapi-dls/webserver.crt -keyout /etc/fastapi-dls/webserver.key -days 7 -subj "/C=DE/O=GitLab-CI/OU=Test/CN=localhost"
|
|
||||||
# copy example config from GitLab-CI-Variables
|
# copy example config from GitLab-CI-Variables
|
||||||
#- cat ${EXAMPLE_CONFIG} > /etc/fastapi-dls/env
|
#- cat ${EXAMPLE_CONFIG} > /etc/fastapi-dls/env
|
||||||
# start service in background
|
# start service in background
|
||||||
- cd /usr/share/fastapi-dls/app
|
- uvicorn --host 127.0.0.1 --port 443
|
||||||
- uvicorn main:app
|
|
||||||
--host 127.0.0.1 --port 443
|
|
||||||
--app-dir /usr/share/fastapi-dls/app
|
--app-dir /usr/share/fastapi-dls/app
|
||||||
--ssl-keyfile /etc/fastapi-dls/webserver.key
|
--ssl-keyfile /etc/fastapi-dls/webserver.key
|
||||||
--ssl-certfile /etc/fastapi-dls/webserver.crt
|
--ssl-certfile /opt/fastapi-dls/webserver.crt
|
||||||
--proxy-headers &
|
--proxy-headers &
|
||||||
- FASTAPI_DLS_PID=$!
|
- FASTAPI_DLS_PID=$!
|
||||||
- echo "Started service with pid $FASTAPI_DLS_PID"
|
- echo "Started service with pid $FASTAPI_DLS_PID"
|
||||||
- cat /etc/fastapi-dls/env
|
|
||||||
# testing service
|
# testing service
|
||||||
- if [ "`curl --insecure -s https://127.0.0.1/-/health | jq .status`" != "up" ]; then echo "Success"; else "Error"; fi
|
- if [ "`curl --insecure -s https://127.0.0.1/status | jq .status`" != "up" ]; then echo "Success"; else "Error"; fi
|
||||||
# cleanup
|
# cleanup
|
||||||
- kill $FASTAPI_DLS_PID
|
- kill $FASTAPI_DLS_PID
|
||||||
- apt-get purge -qq -y fastapi-dls
|
- apt-get purge -qq -y fastapi-dls
|
||||||
@ -192,101 +117,41 @@ test:ubuntu:
|
|||||||
extends: .test:linux
|
extends: .test:linux
|
||||||
image: ubuntu:22.10
|
image: ubuntu:22.10
|
||||||
|
|
||||||
test:archlinux:
|
|
||||||
image: archlinux:base
|
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
|
||||||
changes:
|
|
||||||
- app/**/*
|
|
||||||
- .PKGBUILD/**/*
|
|
||||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
|
||||||
needs:
|
|
||||||
- job: build:pacman
|
|
||||||
artifacts: true
|
|
||||||
script:
|
|
||||||
- pacman -Sy
|
|
||||||
- pacman -U --noconfirm *.pkg.tar.zst
|
|
||||||
|
|
||||||
code_quality:
|
|
||||||
rules:
|
|
||||||
- if: $CODE_QUALITY_DISABLED
|
|
||||||
when: never
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
|
|
||||||
secret_detection:
|
|
||||||
rules:
|
|
||||||
- if: $SECRET_DETECTION_DISABLED
|
|
||||||
when: never
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
before_script:
|
|
||||||
- git config --global --add safe.directory $CI_PROJECT_DIR
|
|
||||||
|
|
||||||
semgrep-sast:
|
|
||||||
rules:
|
|
||||||
- if: $SAST_DISABLED
|
|
||||||
when: never
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
|
|
||||||
test_coverage:
|
|
||||||
extends: test
|
|
||||||
allow_failure: true
|
|
||||||
rules:
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
script:
|
|
||||||
- pip install pytest pytest-cov
|
|
||||||
- coverage run -m pytest main.py
|
|
||||||
- coverage report
|
|
||||||
- coverage xml
|
|
||||||
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
|
|
||||||
artifacts:
|
|
||||||
reports:
|
|
||||||
coverage_report:
|
|
||||||
coverage_format: cobertura
|
|
||||||
path: '**/coverage.xml'
|
|
||||||
|
|
||||||
container_scanning:
|
|
||||||
dependencies: [ build:docker ]
|
|
||||||
rules:
|
|
||||||
- if: $CONTAINER_SCANNING_DISABLED
|
|
||||||
when: never
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
|
|
||||||
gemnasium-python-dependency_scanning:
|
|
||||||
rules:
|
|
||||||
- if: $DEPENDENCY_SCANNING_DISABLED
|
|
||||||
when: never
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
|
|
||||||
.deploy:
|
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_TAG
|
|
||||||
|
|
||||||
deploy:docker:
|
deploy:docker:
|
||||||
extends: .deploy
|
|
||||||
stage: deploy
|
stage: deploy
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||||
|
- changes:
|
||||||
|
- Dockerfile
|
||||||
|
- requirements.txt
|
||||||
|
- app/**/*
|
||||||
before_script:
|
before_script:
|
||||||
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_BUILD_REF_NAME"
|
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
|
||||||
|
- source version.env
|
||||||
|
- echo "Building docker image for commit ${COMMIT} with version ${VERSION}"
|
||||||
script:
|
script:
|
||||||
- echo "========== GitLab-Registry =========="
|
- echo "GitLab-Registry"
|
||||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||||
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_BUILD_REF_NAME
|
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
|
||||||
- docker build . --build-arg VERSION=$CI_BUILD_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_BUILD_REF_NAME
|
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
|
||||||
- docker build . --build-arg VERSION=$CI_BUILD_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest
|
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
|
||||||
- docker push $IMAGE:$CI_BUILD_REF_NAME
|
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
|
||||||
- docker push $IMAGE:latest
|
- echo "Docker-Hub"
|
||||||
- echo "========== Docker-Hub =========="
|
|
||||||
- docker login -u $PUBLIC_REGISTRY_USER -p $PUBLIC_REGISTRY_TOKEN
|
- docker login -u $PUBLIC_REGISTRY_USER -p $PUBLIC_REGISTRY_TOKEN
|
||||||
- IMAGE=$PUBLIC_REGISTRY_USER/$CI_PROJECT_NAME
|
- docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
|
||||||
- docker build . --build-arg VERSION=$CI_BUILD_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_BUILD_REF_NAME
|
- docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
|
||||||
- docker build . --build-arg VERSION=$CI_BUILD_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest
|
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
|
||||||
- docker push $IMAGE:$CI_BUILD_REF_NAME
|
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
|
||||||
- docker push $IMAGE:latest
|
|
||||||
|
|
||||||
deploy:apt:
|
deploy:apt:
|
||||||
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
|
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
|
||||||
extends: .deploy
|
|
||||||
image: debian:bookworm-slim
|
image: debian:bookworm-slim
|
||||||
stage: deploy
|
stage: deploy
|
||||||
|
rules:
|
||||||
|
#- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||||
|
- changes:
|
||||||
|
- DEBIAN/**/*
|
||||||
|
- app/**/*
|
||||||
needs:
|
needs:
|
||||||
- job: build:apt
|
- job: build:apt
|
||||||
artifacts: true
|
artifacts: true
|
||||||
@ -321,45 +186,3 @@ deploy:apt:
|
|||||||
# using generic-package-registry until debian-registry is GA
|
# using generic-package-registry until debian-registry is GA
|
||||||
# https://docs.gitlab.com/ee/user/packages/generic_packages/index.html#publish-a-generic-package-by-using-cicd
|
# https://docs.gitlab.com/ee/user/packages/generic_packages/index.html#publish-a-generic-package-by-using-cicd
|
||||||
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
|
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
|
||||||
|
|
||||||
deploy:pacman:
|
|
||||||
extends: .deploy
|
|
||||||
image: archlinux:base-devel
|
|
||||||
stage: deploy
|
|
||||||
needs:
|
|
||||||
- job: build:pacman
|
|
||||||
artifacts: true
|
|
||||||
script:
|
|
||||||
- source .PKGBUILD/PKGBUILD
|
|
||||||
- source version.env
|
|
||||||
# fastapi-dls-1.0-1-any.pkg.tar.zst
|
|
||||||
- BUILD_NAME=${pkgname}-${CI_BUILD_REF_NAME}-${pkgrel}-any.pkg.tar.zst
|
|
||||||
- PACKAGE_NAME=${pkgname}
|
|
||||||
- PACKAGE_VERSION=${CI_BUILD_REF_NAME}
|
|
||||||
- PACKAGE_ARCH=any
|
|
||||||
- EXPORT_NAME=${BUILD_NAME}
|
|
||||||
- 'echo "PACKAGE_NAME: ${PACKAGE_NAME}"'
|
|
||||||
- 'echo "PACKAGE_VERSION: ${PACKAGE_VERSION}"'
|
|
||||||
- 'echo "PACKAGE_ARCH: ${PACKAGE_ARCH}"'
|
|
||||||
- 'echo "EXPORT_NAME: ${EXPORT_NAME}"'
|
|
||||||
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
|
|
||||||
|
|
||||||
release:
|
|
||||||
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
|
||||||
stage: .post
|
|
||||||
needs: [ test ]
|
|
||||||
rules:
|
|
||||||
- if: $CI_COMMIT_TAG
|
|
||||||
script:
|
|
||||||
- echo "Running release-job for $CI_COMMIT_TAG"
|
|
||||||
release:
|
|
||||||
name: $CI_PROJECT_TITLE $CI_COMMIT_TAG
|
|
||||||
description: Release of $CI_PROJECT_TITLE version $CI_COMMIT_TAG
|
|
||||||
tag_name: $CI_COMMIT_TAG
|
|
||||||
ref: $CI_COMMIT_SHA
|
|
||||||
assets:
|
|
||||||
links:
|
|
||||||
- name: 'Package Registry'
|
|
||||||
url: 'https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages'
|
|
||||||
- name: 'Container Registry'
|
|
||||||
url: 'https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry/40'
|
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
* @oscar.krause
|
|
||||||
.PKGBUILD/ @samicrusader
|
|
@ -1,5 +1,5 @@
|
|||||||
Package: fastapi-dls
|
Package: fastapi-dls
|
||||||
Version: 0.0
|
Version: 1.0.0
|
||||||
Architecture: all
|
Architecture: all
|
||||||
Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de
|
Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de
|
||||||
Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-jose, python3-sqlalchemy, python3-pycryptodome, python3-markdown, uvicorn, openssl
|
Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-jose, python3-sqlalchemy, python3-pycryptodome, python3-markdown, uvicorn, openssl
|
101
DEBIAN/postinst
Normal file
101
DEBIAN/postinst
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
WORKING_DIR=/usr/share/fastapi-dls
|
||||||
|
CONFIG_DIR=/etc/fastapi-dls
|
||||||
|
|
||||||
|
echo "> Create config directory ..."
|
||||||
|
mkdir -p $CONFIG_DIR
|
||||||
|
|
||||||
|
echo "> Install service ..."
|
||||||
|
cat <<EOF >/etc/systemd/system/fastapi-dls.service
|
||||||
|
[Unit]
|
||||||
|
Description=Service for fastapi-dls
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User=www-data
|
||||||
|
Group=www-data
|
||||||
|
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
||||||
|
WorkingDirectory=$WORKING_DIR/app
|
||||||
|
EnvironmentFile=$CONFIG_DIR/env
|
||||||
|
ExecStart=uvicorn main:app \\
|
||||||
|
--env-file /etc/fastapi-dls/env \\
|
||||||
|
--host \$DLS_URL --port \$DLS_PORT \\
|
||||||
|
--app-dir $WORKING_DIR/app \\
|
||||||
|
--ssl-keyfile /etc/fastapi-dls/webserver.key \\
|
||||||
|
--ssl-certfile /etc/fastapi-dls/webserver.crt \\
|
||||||
|
--proxy-headers
|
||||||
|
Restart=always
|
||||||
|
KillSignal=SIGQUIT
|
||||||
|
Type=simple
|
||||||
|
NotifyAccess=all
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
systemctl daemon-reload
|
||||||
|
|
||||||
|
if [[ ! -f $CONFIG_DIR/env ]]; then
|
||||||
|
echo "> Writing initial config ..."
|
||||||
|
touch $CONFIG_DIR/env
|
||||||
|
cat <<EOF >$CONFIG_DIR/env
|
||||||
|
DLS_URL=127.0.0.1
|
||||||
|
DLS_PORT=443
|
||||||
|
LEASE_EXPIRE_DAYS=90
|
||||||
|
DATABASE=sqlite:///$CONFIG_DIR/db.sqlite
|
||||||
|
INSTANCE_KEY_RSA=$CONFIG_DIR/instance.private.pem
|
||||||
|
INSTANCE_KEY_PUB=$CONFIG_DIR/instance.public.pem
|
||||||
|
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "> Create dls-instance keypair ..."
|
||||||
|
openssl genrsa -out $CONFIG_DIR/instance.private.pem 2048
|
||||||
|
openssl rsa -in $CONFIG_DIR/instance.private.pem -outform PEM -pubout -out $CONFIG_DIR/instance.public.pem
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
read -p "> Do you wish to create self-signed webserver certificate? [Y/n]" yn
|
||||||
|
yn=${yn:-y} # ${parameter:-word} If parameter is unset or null, the expansion of word is substituted. Otherwise, the value of parameter is substituted.
|
||||||
|
case $yn in
|
||||||
|
[Yy]*)
|
||||||
|
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $CONFIG_DIR/webserver.key -out $CONFIG_DIR/webserver.crt
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
[Nn]*) break ;;
|
||||||
|
*) echo "Please answer [y] or [n]." ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -f $CONFIG_DIR/webserver.key ]]; then
|
||||||
|
echo "> Starting service ..."
|
||||||
|
systemctl start fastapi-dls.service
|
||||||
|
|
||||||
|
if [ -x "$(command -v curl)" ]; then
|
||||||
|
echo "> Testing API ..."
|
||||||
|
source $CONFIG_DIR/env
|
||||||
|
curl --insecure -X GET https://$DLS_URL:$DLS_PORT/status
|
||||||
|
else
|
||||||
|
echo "> Testing API failed, curl not available. Please test manually!"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
chown -R www-data:www-data $CONFIG_DIR
|
||||||
|
chown -R www-data:www-data $WORKING_DIR
|
||||||
|
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
# #
|
||||||
|
# fastapi-dls is now installed. #
|
||||||
|
# #
|
||||||
|
# Service should be up and running. #
|
||||||
|
# Webservice is listen to https://localhost #
|
||||||
|
# #
|
||||||
|
# Configuration is stored in ${CONFIG_DIR}/env #
|
||||||
|
# #
|
||||||
|
# #
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
|
||||||
|
EOF
|
8
DEBIAN/postrm
Executable file
8
DEBIAN/postrm
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [[ -f /etc/systemd/system/fastapi-dls.service ]]; then
|
||||||
|
echo "> Removing service file."
|
||||||
|
rm /etc/systemd/system/fastapi-dls.service
|
||||||
|
fi
|
||||||
|
|
||||||
|
# todo
|
@ -1,3 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
echo -e "> Starting uninstallation of 'fastapi-dls'!"
|
echo -e "> Starting uninstallation of 'fastapi-dls'!"
|
||||||
|
|
||||||
|
# todo
|
@ -1,8 +1,4 @@
|
|||||||
FROM python:3.11-alpine
|
FROM python:3.10-alpine
|
||||||
|
|
||||||
ARG VERSION
|
|
||||||
ARG COMMIT=""
|
|
||||||
RUN echo -e "VERSION=$VERSION\nCOMMIT=$COMMIT" > /version.env
|
|
||||||
|
|
||||||
COPY requirements.txt /tmp/requirements.txt
|
COPY requirements.txt /tmp/requirements.txt
|
||||||
|
|
||||||
@ -15,7 +11,8 @@ RUN apk update \
|
|||||||
&& apk del build-deps
|
&& apk del build-deps
|
||||||
|
|
||||||
COPY app /app
|
COPY app /app
|
||||||
|
COPY version.env /version.env
|
||||||
COPY README.md /README.md
|
COPY README.md /README.md
|
||||||
|
|
||||||
HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/-/health || exit 1
|
HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/status || exit 1
|
||||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "443", "--app-dir", "/app", "--proxy-headers", "--ssl-keyfile", "/app/cert/webserver.key", "--ssl-certfile", "/app/cert/webserver.crt"]
|
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "443", "--app-dir", "/app", "--proxy-headers", "--ssl-keyfile", "/app/cert/webserver.key", "--ssl-certfile", "/app/cert/webserver.crt"]
|
||||||
|
17
FAQ.md
17
FAQ.md
@ -1,17 +0,0 @@
|
|||||||
# FAQ
|
|
||||||
|
|
||||||
## `Failed to acquire license from <ip> (Info: <license> - Error: The allowed time to process response has expired)`
|
|
||||||
|
|
||||||
- Did your timezone settings are correct on fastapi-dls **and your guest**?
|
|
||||||
|
|
||||||
- Did you download the client-token more than an hour ago?
|
|
||||||
|
|
||||||
Please download a new client-token. The guest have to register within an hour after client-token was created.
|
|
||||||
|
|
||||||
|
|
||||||
## `jose.exceptions.JWTError: Signature verification failed.`
|
|
||||||
|
|
||||||
- Did you recreated `instance.public.pem` / `instance.private.pem`?
|
|
||||||
|
|
||||||
Then you have to download a **new** client-token on each of your guests.
|
|
||||||
|
|
448
README.md
448
README.md
@ -2,36 +2,47 @@
|
|||||||
|
|
||||||
Minimal Delegated License Service (DLS).
|
Minimal Delegated License Service (DLS).
|
||||||
|
|
||||||
Compatibility tested with official DLS 2.0.1.
|
|
||||||
|
|
||||||
This service can be used without internet connection.
|
This service can be used without internet connection.
|
||||||
Only the clients need a connection to this service on configured port.
|
Only the clients need a connection to this service on configured port.
|
||||||
|
|
||||||
**Official Links**
|
|
||||||
|
|
||||||
- https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
|
|
||||||
- https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
|
|
||||||
- https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
|
|
||||||
|
|
||||||
*All other repositories are forks! (which is no bad - just for information and bug reports)*
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
[[_TOC_]]
|
[[_TOC_]]
|
||||||
|
|
||||||
|
## ToDo's
|
||||||
|
|
||||||
|
- Support http mode for using external https proxy (disable uvicorn ssl for using behind proxy)
|
||||||
|
|
||||||
|
## Endpoints
|
||||||
|
|
||||||
|
### `GET /`
|
||||||
|
|
||||||
|
HTML rendered README.md.
|
||||||
|
|
||||||
|
### `GET /status`
|
||||||
|
|
||||||
|
Status endpoint, used for *healthcheck*. Shows also current version and commit hash.
|
||||||
|
|
||||||
|
### `GET /docs`
|
||||||
|
|
||||||
|
OpenAPI specifications rendered from `GET /openapi.json`.
|
||||||
|
|
||||||
|
### `GET /-/origins`
|
||||||
|
|
||||||
|
List registered origins.
|
||||||
|
|
||||||
|
### `GET /-/leases`
|
||||||
|
|
||||||
|
List current leases.
|
||||||
|
|
||||||
|
### `GET /client-token`
|
||||||
|
|
||||||
|
Generate client token, (see [installation](#installation)).
|
||||||
|
|
||||||
|
### Others
|
||||||
|
|
||||||
|
There are some more internal api endpoints for handling authentication and lease process.
|
||||||
|
|
||||||
# Setup (Service)
|
# Setup (Service)
|
||||||
|
|
||||||
**System requirements**
|
|
||||||
|
|
||||||
- 256mb ram
|
|
||||||
- 4gb hdd
|
|
||||||
|
|
||||||
Tested with Ubuntu 22.10 (from Proxmox templates), actually its consuming 100mb ram and 750mb hdd.
|
|
||||||
|
|
||||||
**Prepare your system**
|
|
||||||
|
|
||||||
- Make sure your timezone is set correct on you fastapi-dls server and your client
|
|
||||||
|
|
||||||
## Docker
|
## Docker
|
||||||
|
|
||||||
Docker-Images are available here:
|
Docker-Images are available here:
|
||||||
@ -39,8 +50,6 @@ Docker-Images are available here:
|
|||||||
- [Docker-Hub](https://hub.docker.com/repository/docker/collinwebdesigns/fastapi-dls): `collinwebdesigns/fastapi-dls:latest`
|
- [Docker-Hub](https://hub.docker.com/repository/docker/collinwebdesigns/fastapi-dls): `collinwebdesigns/fastapi-dls:latest`
|
||||||
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry): `registry.git.collinwebdesigns.de/oscar.krause/fastapi-dls/main:latest`
|
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry): `registry.git.collinwebdesigns.de/oscar.krause/fastapi-dls/main:latest`
|
||||||
|
|
||||||
The images include database drivers for `postgres`, `mysql`, `mariadb` and `sqlite`.
|
|
||||||
|
|
||||||
**Run this on the Docker-Host**
|
**Run this on the Docker-Host**
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@ -56,8 +65,6 @@ openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $WORKING_DIR/webse
|
|||||||
|
|
||||||
**Start container**
|
**Start container**
|
||||||
|
|
||||||
To test if everything is set up properly you can start container as following:
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
docker volume create dls-db
|
docker volume create dls-db
|
||||||
docker run -e DLS_URL=`hostname -i` -e DLS_PORT=443 -p 443:443 -v $WORKING_DIR:/app/cert -v dls-db:/app/database collinwebdesigns/fastapi-dls:latest
|
docker run -e DLS_URL=`hostname -i` -e DLS_PORT=443 -p 443:443 -v $WORKING_DIR:/app/cert -v dls-db:/app/database collinwebdesigns/fastapi-dls:latest
|
||||||
@ -65,18 +72,14 @@ docker run -e DLS_URL=`hostname -i` -e DLS_PORT=443 -p 443:443 -v $WORKING_DIR:/
|
|||||||
|
|
||||||
**Docker-Compose / Deploy stack**
|
**Docker-Compose / Deploy stack**
|
||||||
|
|
||||||
Goto [`docker-compose.yml`](docker-compose.yml) for more advanced example (with reverse proxy usage).
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
version: '3.9'
|
version: '3.9'
|
||||||
|
|
||||||
x-dls-variables: &dls-variables
|
x-dls-variables: &dls-variables
|
||||||
TZ: Europe/Berlin # REQUIRED, set your timezone correctly on fastapi-dls AND YOUR CLIENTS !!!
|
DLS_URL: localhost # REQUIRED
|
||||||
DLS_URL: localhost # REQUIRED, change to your ip or hostname
|
|
||||||
DLS_PORT: 443
|
DLS_PORT: 443
|
||||||
LEASE_EXPIRE_DAYS: 90 # 90 days is maximum
|
LEASE_EXPIRE_DAYS: 90
|
||||||
DATABASE: sqlite:////app/database/db.sqlite
|
DATABASE: sqlite:////app/database/db.sqlite
|
||||||
DEBUG: false
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
dls:
|
dls:
|
||||||
@ -89,22 +92,15 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- /opt/docker/fastapi-dls/cert:/app/cert
|
- /opt/docker/fastapi-dls/cert:/app/cert
|
||||||
- dls-db:/app/database
|
- dls-db:/app/database
|
||||||
logging: # optional, for those who do not need logs
|
|
||||||
driver: "json-file"
|
|
||||||
options:
|
|
||||||
max-file: 5
|
|
||||||
max-size: 10m
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
dls-db:
|
dls-db:
|
||||||
```
|
```
|
||||||
|
|
||||||
## Debian/Ubuntu (manual method using `git clone` and python virtual environment)
|
## Debian/Ubuntu (manual method using `git clone`)
|
||||||
|
|
||||||
Tested on `Debian 11 (bullseye)`, Ubuntu may also work.
|
Tested on `Debian 11 (bullseye)`, Ubuntu may also work.
|
||||||
|
|
||||||
**Make sure you are logged in as root.**
|
|
||||||
|
|
||||||
**Install requirements**
|
**Install requirements**
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@ -129,7 +125,7 @@ chown -R www-data:www-data $WORKING_DIR
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
WORKING_DIR=/opt/fastapi-dls/app/cert
|
WORKING_DIR=/opt/fastapi-dls/app/cert
|
||||||
mkdir -p $WORKING_DIR
|
mkdir $WORKING_DIR
|
||||||
cd $WORKING_DIR
|
cd $WORKING_DIR
|
||||||
# create instance private and public key for singing JWT's
|
# create instance private and public key for singing JWT's
|
||||||
openssl genrsa -out $WORKING_DIR/instance.private.pem 2048
|
openssl genrsa -out $WORKING_DIR/instance.private.pem 2048
|
||||||
@ -145,15 +141,12 @@ This is only to test whether the service starts successfully.
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
cd /opt/fastapi-dls/app
|
cd /opt/fastapi-dls/app
|
||||||
sudo -u www-data /opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app
|
|
||||||
# or
|
|
||||||
su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
|
su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
|
||||||
```
|
```
|
||||||
|
|
||||||
**Create config file**
|
**Create config file**
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
mkdir /etc/fastapi-dls
|
|
||||||
cat <<EOF >/etc/fastapi-dls/env
|
cat <<EOF >/etc/fastapi-dls/env
|
||||||
DLS_URL=127.0.0.1
|
DLS_URL=127.0.0.1
|
||||||
DLS_PORT=443
|
DLS_PORT=443
|
||||||
@ -198,110 +191,6 @@ EOF
|
|||||||
Now you have to run `systemctl daemon-reload`. After that you can start service
|
Now you have to run `systemctl daemon-reload`. After that you can start service
|
||||||
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
||||||
|
|
||||||
## openSUSE Leap (manual method using `git clone` and python virtual environment)
|
|
||||||
|
|
||||||
Tested on `openSUSE Leap 15.4`, openSUSE Tumbleweed may also work.
|
|
||||||
|
|
||||||
**Install requirements**
|
|
||||||
|
|
||||||
```shell
|
|
||||||
zypper in -y python310 python3-virtualenv python3-pip
|
|
||||||
```
|
|
||||||
|
|
||||||
**Install FastAPI-DLS**
|
|
||||||
|
|
||||||
```shell
|
|
||||||
BASE_DIR=/opt/fastapi-dls
|
|
||||||
SERVICE_USER=dls
|
|
||||||
mkdir -p ${BASE_DIR}
|
|
||||||
cd ${BASE_DIR}
|
|
||||||
git clone https://git.collinwebdesigns.de/oscar.krause/fastapi-dls .
|
|
||||||
python3.10 -m venv venv
|
|
||||||
source venv/bin/activate
|
|
||||||
pip install -r requirements.txt
|
|
||||||
deactivate
|
|
||||||
useradd -r ${SERVICE_USER} -M -d /opt/fastapi-dls
|
|
||||||
chown -R ${SERVICE_USER} ${BASE_DIR}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Create keypair and webserver certificate**
|
|
||||||
|
|
||||||
```shell
|
|
||||||
CERT_DIR=${BASE_DIR}/app/cert
|
|
||||||
SERVICE_USER=dls
|
|
||||||
mkdir ${CERT_DIR}
|
|
||||||
cd ${CERT_DIR}
|
|
||||||
# create instance private and public key for singing JWT's
|
|
||||||
openssl genrsa -out ${CERT_DIR}/instance.private.pem 2048
|
|
||||||
openssl rsa -in ${CERT_DIR}/instance.private.pem -outform PEM -pubout -out ${CERT_DIR}/instance.public.pem
|
|
||||||
# create ssl certificate for integrated webserver (uvicorn) - because clients rely on ssl
|
|
||||||
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout ${CERT_DIR}/webserver.key -out ${CERT_DIR}/webserver.crt
|
|
||||||
chown -R ${SERVICE_USER} ${CERT_DIR}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Test Service**
|
|
||||||
|
|
||||||
This is only to test whether the service starts successfully.
|
|
||||||
|
|
||||||
```shell
|
|
||||||
BASE_DIR=/opt/fastapi-dls
|
|
||||||
SERVICE_USER=dls
|
|
||||||
cd ${BASE_DIR}
|
|
||||||
sudo -u ${SERVICE_USER} ${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app
|
|
||||||
# or
|
|
||||||
su - ${SERVICE_USER} -c "${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Create config file**
|
|
||||||
|
|
||||||
```shell
|
|
||||||
BASE_DIR=/opt/fastapi-dls
|
|
||||||
cat <<EOF >/etc/fastapi-dls/env
|
|
||||||
# Adjust DSL_URL as needed (accessing from LAN won't work with 127.0.0.1)
|
|
||||||
DLS_URL=127.0.0.1
|
|
||||||
DLS_PORT=443
|
|
||||||
LEASE_EXPIRE_DAYS=90
|
|
||||||
DATABASE=sqlite:///${BASE_DIR}/app/db.sqlite
|
|
||||||
|
|
||||||
EOF
|
|
||||||
```
|
|
||||||
|
|
||||||
**Create service**
|
|
||||||
|
|
||||||
```shell
|
|
||||||
BASE_DIR=/opt/fastapi-dls
|
|
||||||
SERVICE_USER=dls
|
|
||||||
cat <<EOF >/etc/systemd/system/fastapi-dls.service
|
|
||||||
[Unit]
|
|
||||||
Description=Service for fastapi-dls vGPU licensing service
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
User=${SERVICE_USER}
|
|
||||||
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
|
||||||
WorkingDirectory=${BASE_DIR}/app
|
|
||||||
EnvironmentFile=/etc/fastapi-dls/env
|
|
||||||
ExecStart=${BASE_DIR}/venv/bin/uvicorn main:app \\
|
|
||||||
--env-file /etc/fastapi-dls/env \\
|
|
||||||
--host \$DLS_URL --port \$DLS_PORT \\
|
|
||||||
--app-dir ${BASE_DIR}/app \\
|
|
||||||
--ssl-keyfile ${BASE_DIR}/app/cert/webserver.key \\
|
|
||||||
--ssl-certfile ${BASE_DIR}/app/cert/webserver.crt \\
|
|
||||||
--proxy-headers
|
|
||||||
Restart=always
|
|
||||||
KillSignal=SIGQUIT
|
|
||||||
Type=simple
|
|
||||||
NotifyAccess=all
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
|
|
||||||
EOF
|
|
||||||
```
|
|
||||||
|
|
||||||
Now you have to run `systemctl daemon-reload`. After that you can start service
|
|
||||||
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
|
||||||
|
|
||||||
## Debian/Ubuntu (using `dpkg`)
|
## Debian/Ubuntu (using `dpkg`)
|
||||||
|
|
||||||
Packages are available here:
|
Packages are available here:
|
||||||
@ -313,11 +202,6 @@ Successful tested with:
|
|||||||
- Debian 12 (Bookworm) (works but not recommended because it is currently in *testing* state)
|
- Debian 12 (Bookworm) (works but not recommended because it is currently in *testing* state)
|
||||||
- Ubuntu 22.10 (Kinetic Kudu)
|
- Ubuntu 22.10 (Kinetic Kudu)
|
||||||
|
|
||||||
Not working with:
|
|
||||||
|
|
||||||
- Debian 11 (Bullseye) and lower (missing `python-jose` dependency)
|
|
||||||
- Ubuntu 22.04 (Jammy Jellyfish) (not supported as for 15.01.2023 due to [fastapi - uvicorn version missmatch](https://bugs.launchpad.net/ubuntu/+source/fastapi/+bug/1970557))
|
|
||||||
|
|
||||||
**Run this on your server instance**
|
**Run this on your server instance**
|
||||||
|
|
||||||
First go to [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages) and select your
|
First go to [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages) and select your
|
||||||
@ -333,41 +217,7 @@ apt-get install -f --fix-missing
|
|||||||
|
|
||||||
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
||||||
|
|
||||||
## ArchLinux (using `pacman`)
|
## Let's Encrypt Certificate
|
||||||
|
|
||||||
**Shout out to `samicrusader` who created build file for ArchLinux!**
|
|
||||||
|
|
||||||
Packages are available here:
|
|
||||||
|
|
||||||
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages)
|
|
||||||
|
|
||||||
```shell
|
|
||||||
pacman -Sy
|
|
||||||
FILENAME=/opt/fastapi-dls.pkg.tar.zst
|
|
||||||
|
|
||||||
curl -o $FILENAME <download-url>
|
|
||||||
# or
|
|
||||||
wget -O $FILENAME <download-url>
|
|
||||||
|
|
||||||
pacman -U --noconfirm fastapi-dls.pkg.tar.zst
|
|
||||||
```
|
|
||||||
|
|
||||||
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
|
||||||
|
|
||||||
## unRAID
|
|
||||||
|
|
||||||
1. Download [this xml file](.UNRAID/FastAPI-DLS.xml)
|
|
||||||
2. Put it in /boot/config/plugins/dockerMan/templates-user/
|
|
||||||
3. Go to Docker page, scroll down to `Add Container`, click on Template list and choose `FastAPI-DLS`
|
|
||||||
4. Open terminal/ssh, follow the instructions in overview description
|
|
||||||
5. Setup your container `IP`, `Port`, `DLS_URL` and `DLS_PORT`
|
|
||||||
6. Apply and let it boot up
|
|
||||||
|
|
||||||
*Unraid users must also make sure they have Host access to custom networks enabled if unraid is the vgpu guest*.
|
|
||||||
|
|
||||||
Continue [here](#unraid-guest) for docker guest setup.
|
|
||||||
|
|
||||||
## Let's Encrypt Certificate (optional)
|
|
||||||
|
|
||||||
If you're using installation via docker, you can use `traefik`. Please refer to their documentation.
|
If you're using installation via docker, you can use `traefik`. Please refer to their documentation.
|
||||||
|
|
||||||
@ -385,29 +235,18 @@ After first success you have to replace `--issue` with `--renew`.
|
|||||||
|
|
||||||
# Configuration
|
# Configuration
|
||||||
|
|
||||||
| Variable | Default | Usage |
|
| Variable | Default | Usage |
|
||||||
|------------------------|----------------------------------------|------------------------------------------------------------------------------------------------------|
|
|---------------------|----------------------------------------|---------------------------------------------------------------------------------------|
|
||||||
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
|
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
|
||||||
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
|
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
|
||||||
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
|
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
|
||||||
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
|
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
|
||||||
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
|
| `DATABASE` | `sqlite:///db.sqlite` | See [official dataset docs](https://dataset.readthedocs.io/en/latest/quickstart.html) |
|
||||||
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 |
|
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) |
|
||||||
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
|
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
|
||||||
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 |
|
| `INSTANCE_REF` | `00000000-0000-0000-0000-000000000000` | Instance identification uuid |
|
||||||
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
|
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs |
|
||||||
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
|
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key |
|
||||||
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
|
|
||||||
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 |
|
|
||||||
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 |
|
|
||||||
|
|
||||||
\*1 For example, if the lease period is one day and the renewal period is 20%, the client attempts to renew its license
|
|
||||||
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
|
|
||||||
client has 19.2 hours in which to re-establish connectivity before its license expires.
|
|
||||||
|
|
||||||
\*3 Always use `https`, since guest-drivers only support secure connections!
|
|
||||||
|
|
||||||
\*4 If you recreate instance keys you need to **recreate client-token for each guest**!
|
|
||||||
|
|
||||||
# Setup (Client)
|
# Setup (Client)
|
||||||
|
|
||||||
@ -415,149 +254,25 @@ client has 19.2 hours in which to re-establish connectivity before its license e
|
|||||||
|
|
||||||
Successfully tested with this package versions:
|
Successfully tested with this package versions:
|
||||||
|
|
||||||
| vGPU Suftware | vGPU Manager | Linux Driver | Windows Driver | Release Date |
|
- `14.3` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `513.91`)
|
||||||
|---------------|--------------|--------------|----------------|---------------|
|
- `14.4` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `514.08`)
|
||||||
| `15.2` | `525.105.14` | `525.105.17` | `528.89` | March 2023 |
|
- `15.0` (Linux-Host: `525.60.12`, Linux-Guest: `525.60.13`, Windows-Guest: `527.41`)
|
||||||
| `15.1` | `525.85.07` | `525.85.05` | `528.24` | January 2023 |
|
|
||||||
| `15.0` | `525.60.12` | `525.60.13` | `527.41` | December 2022 |
|
|
||||||
| `14.4` | `510.108.03` | `510.108.03` | `514.08` | December 2022 |
|
|
||||||
| `14.3` | `510.108.03` | `510.108.03` | `513.91` | November 2022 |
|
|
||||||
|
|
||||||
- https://docs.nvidia.com/grid/index.html
|
|
||||||
|
|
||||||
## Linux
|
## Linux
|
||||||
|
|
||||||
Download *client-token* and place it into `/etc/nvidia/ClientConfigToken`:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
curl --insecure -L -X GET https://<dls-hostname-or-ip>/-/client-token -o /etc/nvidia/ClientConfigToken/client_configuration_token_$(date '+%d-%m-%Y-%H-%M-%S').tok
|
|
||||||
# or
|
|
||||||
wget --no-check-certificate -O /etc/nvidia/ClientConfigToken/client_configuration_token_$(date '+%d-%m-%Y-%H-%M-%S').tok https://<dls-hostname-or-ip>/-/client-token
|
|
||||||
```
|
|
||||||
|
|
||||||
Restart `nvidia-gridd` service:
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
curl --insecure -X GET https://<dls-hostname-or-ip>/client-token -o /etc/nvidia/ClientConfigToken/client_configuration_token.tok
|
||||||
service nvidia-gridd restart
|
service nvidia-gridd restart
|
||||||
```
|
|
||||||
|
|
||||||
Check licensing status:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
nvidia-smi -q | grep "License"
|
nvidia-smi -q | grep "License"
|
||||||
```
|
```
|
||||||
|
|
||||||
Output should be something like:
|
|
||||||
|
|
||||||
```text
|
|
||||||
vGPU Software Licensed Product
|
|
||||||
License Status : Licensed (Expiry: YYYY-M-DD hh:mm:ss GMT)
|
|
||||||
```
|
|
||||||
|
|
||||||
Done. For more information check [troubleshoot section](#troubleshoot).
|
|
||||||
|
|
||||||
## Windows
|
## Windows
|
||||||
|
|
||||||
**Power-Shell** (run as administrator!)
|
Download file and place it into `C:\Program Files\NVIDIA Corporation\vGPU Licensing\ClientConfigToken`.
|
||||||
|
Now restart `NvContainerLocalSystem` service.
|
||||||
Download *client-token* and place it into `C:\Program Files\NVIDIA Corporation\vGPU Licensing\ClientConfigToken`:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
curl.exe --insecure -L -X GET https://<dls-hostname-or-ip>/-/client-token -o "C:\Program Files\NVIDIA Corporation\vGPU Licensing\ClientConfigToken\client_configuration_token_$($(Get-Date).tostring('dd-MM-yy-hh-mm-ss')).tok"
|
|
||||||
```
|
|
||||||
|
|
||||||
Restart `NvContainerLocalSystem` service:
|
|
||||||
|
|
||||||
```Shell
|
|
||||||
Restart-Service NVDisplay.ContainerLocalSystem
|
|
||||||
```
|
|
||||||
|
|
||||||
Check licensing status:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
& 'nvidia-smi' -q | Select-String "License"
|
|
||||||
```
|
|
||||||
|
|
||||||
Output should be something like:
|
|
||||||
|
|
||||||
```text
|
|
||||||
vGPU Software Licensed Product
|
|
||||||
License Status : Licensed (Expiry: YYYY-M-DD hh:mm:ss GMT)
|
|
||||||
```
|
|
||||||
|
|
||||||
Done. For more information check [troubleshoot section](#troubleshoot).
|
|
||||||
|
|
||||||
## unRAID Guest
|
|
||||||
|
|
||||||
1. Make sure you create a folder in a linux filesystem (BTRFS/XFS/EXT4...), I recommend `/mnt/user/system/nvidia` (this is where docker and libvirt preferences are saved, so it's a good place to have that)
|
|
||||||
2. Edit the script to put your `DLS_IP`, `DLS_PORT` and `TOKEN_PATH`, properly
|
|
||||||
3. Install `User Scripts` plugin from *Community Apps* (the Apps page, or google User Scripts Unraid if you're not using CA)
|
|
||||||
4. Go to `Settings > Users Scripts > Add New Script`
|
|
||||||
5. Give it a name (the name must not contain spaces preferably)
|
|
||||||
6. Click on the *gear icon* to the left of the script name then edit script
|
|
||||||
7. Paste the script and save
|
|
||||||
8. Set schedule to `At First Array Start Only`
|
|
||||||
9. Click on Apply
|
|
||||||
|
|
||||||
|
|
||||||
# Endpoints
|
|
||||||
|
|
||||||
### `GET /`
|
|
||||||
|
|
||||||
Redirect to `/-/readme`.
|
|
||||||
|
|
||||||
### `GET /-/health`
|
|
||||||
|
|
||||||
Status endpoint, used for *healthcheck*.
|
|
||||||
|
|
||||||
### `GET /-/config`
|
|
||||||
|
|
||||||
Shows current runtime environment variables and their values.
|
|
||||||
|
|
||||||
### `GET /-/readme`
|
|
||||||
|
|
||||||
HTML rendered README.md.
|
|
||||||
|
|
||||||
### `GET /-/manage`
|
|
||||||
|
|
||||||
Shows a very basic UI to delete origins or leases.
|
|
||||||
|
|
||||||
### `GET /-/origins?leases=false`
|
|
||||||
|
|
||||||
List registered origins.
|
|
||||||
|
|
||||||
| Query Parameter | Default | Usage |
|
|
||||||
|-----------------|---------|--------------------------------------|
|
|
||||||
| `leases` | `false` | Include referenced leases per origin |
|
|
||||||
|
|
||||||
### `DELETE /-/origins`
|
|
||||||
|
|
||||||
Deletes all origins and their leases.
|
|
||||||
|
|
||||||
### `GET /-/leases?origin=false`
|
|
||||||
|
|
||||||
List current leases.
|
|
||||||
|
|
||||||
| Query Parameter | Default | Usage |
|
|
||||||
|-----------------|---------|-------------------------------------|
|
|
||||||
| `origin` | `false` | Include referenced origin per lease |
|
|
||||||
|
|
||||||
### `DELETE /-/lease/{lease_ref}`
|
|
||||||
|
|
||||||
Deletes an lease.
|
|
||||||
|
|
||||||
### `GET /-/client-token`
|
|
||||||
|
|
||||||
Generate client token, (see [installation](#installation)).
|
|
||||||
|
|
||||||
### Others
|
|
||||||
|
|
||||||
There are many other internal api endpoints for handling authentication and lease process.
|
|
||||||
|
|
||||||
# Troubleshoot
|
# Troubleshoot
|
||||||
|
|
||||||
**Please make sure that fastapi-dls and your guests are on the same timezone!**
|
|
||||||
|
|
||||||
## Linux
|
## Linux
|
||||||
|
|
||||||
Logs are available with `journalctl -u nvidia-gridd -f`.
|
Logs are available with `journalctl -u nvidia-gridd -f`.
|
||||||
@ -576,9 +291,6 @@ This message can be ignored.
|
|||||||
|
|
||||||
- Ref. https://github.com/encode/uvicorn/issues/441
|
- Ref. https://github.com/encode/uvicorn/issues/441
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>Log example</summary>
|
|
||||||
|
|
||||||
```
|
```
|
||||||
WARNING:uvicorn.error:Invalid HTTP request received.
|
WARNING:uvicorn.error:Invalid HTTP request received.
|
||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
@ -597,8 +309,6 @@ Traceback (most recent call last):
|
|||||||
h11._util.RemoteProtocolError: no request line received
|
h11._util.RemoteProtocolError: no request line received
|
||||||
```
|
```
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
## Windows
|
## Windows
|
||||||
|
|
||||||
### Required cipher on Windows Guests (e.g. managed by domain controller with GPO)
|
### Required cipher on Windows Guests (e.g. managed by domain controller with GPO)
|
||||||
@ -665,45 +375,3 @@ Dec 20 17:53:34 ubuntu-grid-server nvidia-gridd[10354]: License acquired success
|
|||||||
```
|
```
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
### Error on releasing leases on shutdown (can be ignored and/or fixed with reverse proxy)
|
|
||||||
|
|
||||||
The driver wants to release current leases on shutting down windows. This endpoint needs to be a http endpoint.
|
|
||||||
The error message can safely be ignored (since we have no license limitation :P) and looks like this:
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>Log example</summary>
|
|
||||||
|
|
||||||
```
|
|
||||||
<1>:NLS initialized
|
|
||||||
<1>:License acquired successfully. (Info: 192.168.178.110, NVIDIA RTX Virtual Workstation; Expiry: 2023-3-30 23:0:22 GMT)
|
|
||||||
<0>:Failed to return license to 192.168.178.110 (Error: Generic network communication failure)
|
|
||||||
<0>:End Logging
|
|
||||||
```
|
|
||||||
|
|
||||||
#### log with nginx as reverse proxy (see [docker-compose.yml](docker-compose.yml))
|
|
||||||
|
|
||||||
```
|
|
||||||
<1>:NLS initialized
|
|
||||||
<2>:NLS initialized
|
|
||||||
<1>:Valid GRID license not found. GPU features and performance will be fully degraded. To enable full functionality please configure licensing details.
|
|
||||||
<1>:License acquired successfully. (Info: 192.168.178.33, NVIDIA RTX Virtual Workstation; Expiry: 2023-1-4 16:48:20 GMT)
|
|
||||||
<2>:Valid GRID license not found. GPU features and performance will be fully degraded. To enable full functionality please configure licensing details.
|
|
||||||
<2>:License acquired successfully from local trusted store. (Info: 192.168.178.33, NVIDIA RTX Virtual Workstation; Expiry: 2023-1-4 16:48:20 GMT)
|
|
||||||
<2>:End Logging
|
|
||||||
<1>:End Logging
|
|
||||||
<0>:License returned successfully. (Info: 192.168.178.33)
|
|
||||||
<0>:End Logging
|
|
||||||
```
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
# Credits
|
|
||||||
|
|
||||||
Thanks to vGPU community and all who uses this project and report bugs.
|
|
||||||
|
|
||||||
Special thanks to
|
|
||||||
|
|
||||||
- @samicrusader who created build file for ArchLinux
|
|
||||||
- @cyrus who wrote the section for openSUSE
|
|
||||||
- @midi who wrote the section for unRAID
|
|
||||||
|
27
ROADMAP.md
27
ROADMAP.md
@ -1,27 +0,0 @@
|
|||||||
# Roadmap
|
|
||||||
|
|
||||||
I am planning to implement the following features in the future.
|
|
||||||
|
|
||||||
|
|
||||||
## HA - High Availability
|
|
||||||
|
|
||||||
Support Failover-Mode (secondary ip address) as in official DLS.
|
|
||||||
|
|
||||||
**Note**: There is no Load-Balancing / Round-Robin HA Mode supported! If you want to use that, consider to use
|
|
||||||
Docker-Swarm with shared/cluster database (e.g. postgres).
|
|
||||||
|
|
||||||
*See [ha branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ha) for current status.*
|
|
||||||
|
|
||||||
|
|
||||||
## UI - User Interface
|
|
||||||
|
|
||||||
Add a user interface to manage origins and leases.
|
|
||||||
|
|
||||||
*See [ui branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ui) for current status.*
|
|
||||||
|
|
||||||
|
|
||||||
## Config Database
|
|
||||||
|
|
||||||
Instead of using environment variables, configuration files and manually create certificates, store configs and
|
|
||||||
certificates in database (like origins and leases). Also, there should be provided a startup assistant to prefill
|
|
||||||
required attributes and create instance-certificates. This is more user-friendly and should improve fist setup.
|
|
362
app/main.py
362
app/main.py
@ -6,47 +6,42 @@ from os.path import join, dirname
|
|||||||
from os import getenv as env
|
from os import getenv as env
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI, HTTPException
|
||||||
from fastapi.requests import Request
|
from fastapi.requests import Request
|
||||||
from json import loads as json_loads
|
from fastapi.encoders import jsonable_encoder
|
||||||
from datetime import datetime, timedelta
|
import json
|
||||||
|
from datetime import datetime
|
||||||
from dateutil.relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
from calendar import timegm
|
from calendar import timegm
|
||||||
from jose import jws, jwk, jwt, JWTError
|
from jose import jws, jwk, jwt
|
||||||
from jose.constants import ALGORITHMS
|
from jose.constants import ALGORITHMS
|
||||||
from starlette.middleware.cors import CORSMiddleware
|
from starlette.middleware.cors import CORSMiddleware
|
||||||
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
|
from starlette.responses import StreamingResponse, JSONResponse, HTMLResponse
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from util import load_key, load_file
|
from app.util import load_key, load_file
|
||||||
from orm import Origin, Lease, init as db_init, migrate
|
from orm import Origin, Lease, init as db_init
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
load_dotenv('../version.env')
|
load_dotenv('../version.env')
|
||||||
|
|
||||||
TZ = datetime.now().astimezone().tzinfo
|
|
||||||
|
|
||||||
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
|
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
|
||||||
|
|
||||||
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
|
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION)
|
||||||
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
|
|
||||||
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
|
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
|
||||||
db_init(db), migrate(db)
|
db_init(db)
|
||||||
|
|
||||||
# everything prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service
|
|
||||||
DLS_URL = str(env('DLS_URL', 'localhost'))
|
DLS_URL = str(env('DLS_URL', 'localhost'))
|
||||||
DLS_PORT = int(env('DLS_PORT', '443'))
|
DLS_PORT = int(env('DLS_PORT', '443'))
|
||||||
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
|
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
|
||||||
INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001'))
|
INSTANCE_REF = str(env('INSTANCE_REF', '00000000-0000-0000-0000-000000000000'))
|
||||||
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001'))
|
|
||||||
INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
|
INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
|
||||||
INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
|
INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
|
||||||
TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0)))
|
TOKEN_EXPIRE_DELTA = relativedelta(hours=1) # days=1
|
||||||
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
|
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)))
|
||||||
LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
|
|
||||||
LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
|
CORS_ORIGINS = env('CORS_ORIGINS').split(',') if (env('CORS_ORIGINS')) else f'https://{DLS_URL}' # todo: prevent static https
|
||||||
CLIENT_TOKEN_EXPIRE_DELTA = relativedelta(years=12)
|
|
||||||
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
|
|
||||||
|
|
||||||
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||||
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||||
@ -56,148 +51,52 @@ app.add_middleware(
|
|||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=CORS_ORIGINS,
|
allow_origins=CORS_ORIGINS,
|
||||||
allow_credentials=True,
|
allow_credentials=True,
|
||||||
allow_methods=['*'],
|
allow_methods=["*"],
|
||||||
allow_headers=['*'],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.basicConfig()
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
|
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
def __get_token(request: Request) -> dict:
|
def get_token(request: Request) -> dict:
|
||||||
authorization_header = request.headers.get('authorization')
|
authorization_header = request.headers['authorization']
|
||||||
token = authorization_header.split(' ')[1]
|
token = authorization_header.split(' ')[1]
|
||||||
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
||||||
|
|
||||||
|
|
||||||
@app.get('/', summary='Index')
|
@app.get('/')
|
||||||
async def index():
|
async def index():
|
||||||
return RedirectResponse('/-/readme')
|
|
||||||
|
|
||||||
|
|
||||||
@app.get('/-/', summary='* Index')
|
|
||||||
async def _index():
|
|
||||||
return RedirectResponse('/-/readme')
|
|
||||||
|
|
||||||
|
|
||||||
@app.get('/-/health', summary='* Health')
|
|
||||||
async def _health():
|
|
||||||
return JSONr({'status': 'up'})
|
|
||||||
|
|
||||||
|
|
||||||
@app.get('/-/config', summary='* Config', description='returns environment variables.')
|
|
||||||
async def _config():
|
|
||||||
return JSONr({
|
|
||||||
'VERSION': str(VERSION),
|
|
||||||
'COMMIT': str(COMMIT),
|
|
||||||
'DEBUG': str(DEBUG),
|
|
||||||
'DLS_URL': str(DLS_URL),
|
|
||||||
'DLS_PORT': str(DLS_PORT),
|
|
||||||
'SITE_KEY_XID': str(SITE_KEY_XID),
|
|
||||||
'INSTANCE_REF': str(INSTANCE_REF),
|
|
||||||
'ALLOTMENT_REF': [str(ALLOTMENT_REF)],
|
|
||||||
'TOKEN_EXPIRE_DELTA': str(TOKEN_EXPIRE_DELTA),
|
|
||||||
'LEASE_EXPIRE_DELTA': str(LEASE_EXPIRE_DELTA),
|
|
||||||
'LEASE_RENEWAL_PERIOD': str(LEASE_RENEWAL_PERIOD),
|
|
||||||
'CORS_ORIGINS': str(CORS_ORIGINS),
|
|
||||||
'TZ': str(TZ),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@app.get('/-/readme', summary='* Readme')
|
|
||||||
async def _readme():
|
|
||||||
from markdown import markdown
|
from markdown import markdown
|
||||||
content = load_file('../README.md').decode('utf-8')
|
content = load_file('../README.md').decode('utf-8')
|
||||||
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
|
return HTMLResponse(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
|
||||||
|
|
||||||
|
|
||||||
@app.get('/-/manage', summary='* Management UI')
|
@app.get('/status')
|
||||||
async def _manage(request: Request):
|
async def status(request: Request):
|
||||||
response = '''
|
return JSONResponse({'status': 'up', 'version': VERSION, 'commit': COMMIT, 'debug': DEBUG})
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>FastAPI-DLS Management</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<button onclick="deleteOrigins()">delete ALL origins and their leases</button>
|
|
||||||
<button onclick="deleteLease()">delete specific lease</button>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
function deleteOrigins() {
|
|
||||||
const response = confirm('Are you sure you want to delete all origins and their leases?');
|
|
||||||
|
|
||||||
if (response) {
|
|
||||||
var xhr = new XMLHttpRequest();
|
|
||||||
xhr.open("DELETE", '/-/origins', true);
|
|
||||||
xhr.send();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function deleteLease(lease_ref) {
|
|
||||||
if(lease_ref === undefined)
|
|
||||||
lease_ref = window.prompt("Please enter 'lease_ref' which should be deleted");
|
|
||||||
if(lease_ref === null || lease_ref === "")
|
|
||||||
return
|
|
||||||
var xhr = new XMLHttpRequest();
|
|
||||||
xhr.open("DELETE", `/-/lease/${lease_ref}`, true);
|
|
||||||
xhr.send();
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
'''
|
|
||||||
return HTMLr(response)
|
|
||||||
|
|
||||||
|
|
||||||
@app.get('/-/origins', summary='* Origins')
|
@app.get('/-/origins')
|
||||||
async def _origins(request: Request, leases: bool = False):
|
async def _origins(request: Request):
|
||||||
session = sessionmaker(bind=db)()
|
session = sessionmaker(bind=db)()
|
||||||
response = []
|
response = list(map(lambda x: jsonable_encoder(x), session.query(Origin).all()))
|
||||||
for origin in session.query(Origin).all():
|
|
||||||
x = origin.serialize()
|
|
||||||
if leases:
|
|
||||||
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
|
|
||||||
x['leases'] = list(map(lambda _: _.serialize(**serialize), Lease.find_by_origin_ref(db, origin.origin_ref)))
|
|
||||||
response.append(x)
|
|
||||||
session.close()
|
session.close()
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
@app.delete('/-/origins', summary='* Origins')
|
@app.get('/-/leases')
|
||||||
async def _origins_delete(request: Request):
|
async def _leases(request: Request):
|
||||||
Origin.delete(db)
|
|
||||||
return Response(status_code=201)
|
|
||||||
|
|
||||||
|
|
||||||
@app.get('/-/leases', summary='* Leases')
|
|
||||||
async def _leases(request: Request, origin: bool = False):
|
|
||||||
session = sessionmaker(bind=db)()
|
session = sessionmaker(bind=db)()
|
||||||
response = []
|
response = list(map(lambda x: jsonable_encoder(x), session.query(Lease).all()))
|
||||||
for lease in session.query(Lease).all():
|
|
||||||
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
|
|
||||||
x = lease.serialize(**serialize)
|
|
||||||
if origin:
|
|
||||||
lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first()
|
|
||||||
if lease_origin is not None:
|
|
||||||
x['origin'] = lease_origin.serialize()
|
|
||||||
response.append(x)
|
|
||||||
session.close()
|
session.close()
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
@app.delete('/-/lease/{lease_ref}', summary='* Lease')
|
|
||||||
async def _lease_delete(request: Request, lease_ref: str):
|
|
||||||
if Lease.delete(db, lease_ref) == 1:
|
|
||||||
return Response(status_code=201)
|
|
||||||
return JSONr(status_code=404, content={'status': 404, 'detail': 'lease not found'})
|
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_core_service_instance/service_instance_token_manager.py
|
# venv/lib/python3.9/site-packages/nls_core_service_instance/service_instance_token_manager.py
|
||||||
@app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance')
|
@app.get('/client-token')
|
||||||
async def _client_token():
|
async def client_token():
|
||||||
cur_time = datetime.utcnow()
|
cur_time = datetime.utcnow()
|
||||||
exp_time = cur_time + CLIENT_TOKEN_EXPIRE_DELTA
|
exp_time = cur_time + relativedelta(years=12)
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"jti": str(uuid4()),
|
"jti": str(uuid4()),
|
||||||
@ -207,7 +106,7 @@ async def _client_token():
|
|||||||
"nbf": timegm(cur_time.timetuple()),
|
"nbf": timegm(cur_time.timetuple()),
|
||||||
"exp": timegm(exp_time.timetuple()),
|
"exp": timegm(exp_time.timetuple()),
|
||||||
"update_mode": "ABSOLUTE",
|
"update_mode": "ABSOLUTE",
|
||||||
"scope_ref_list": [ALLOTMENT_REF],
|
"scope_ref_list": [str(uuid4())],
|
||||||
"fulfillment_class_ref_list": [],
|
"fulfillment_class_ref_list": [],
|
||||||
"service_instance_configuration": {
|
"service_instance_configuration": {
|
||||||
"nls_service_instance_ref": INSTANCE_REF,
|
"nls_service_instance_ref": INSTANCE_REF,
|
||||||
@ -233,32 +132,33 @@ async def _client_token():
|
|||||||
content = jws.sign(payload, key=jwt_encode_key, headers=None, algorithm=ALGORITHMS.RS256)
|
content = jws.sign(payload, key=jwt_encode_key, headers=None, algorithm=ALGORITHMS.RS256)
|
||||||
|
|
||||||
response = StreamingResponse(iter([content]), media_type="text/plain")
|
response = StreamingResponse(iter([content]), media_type="text/plain")
|
||||||
filename = f'client_configuration_token_{datetime.now().strftime("%d-%m-%y-%H-%M-%S")}.tok'
|
filename = f'client_configuration_token_{datetime.now().strftime("%d-%m-%y-%H-%M-%S")}'
|
||||||
response.headers["Content-Disposition"] = f'attachment; filename={filename}'
|
response.headers["Content-Disposition"] = f'attachment; filename={filename}'
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py
|
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py
|
||||||
@app.post('/auth/v1/origin', description='find or create an origin')
|
# {"candidate_origin_ref":"00112233-4455-6677-8899-aabbccddeeff","environment":{"fingerprint":{"mac_address_list":["ff:ff:ff:ff:ff:ff"]},"hostname":"my-hostname","ip_address_list":["192.168.178.123","fe80::","fe80::1%enp6s18"],"guest_driver_version":"510.85.02","os_platform":"Debian GNU/Linux 11 (bullseye) 11","os_version":"11 (bullseye)"},"registration_pending":false,"update_pending":false}
|
||||||
|
@app.post('/auth/v1/origin')
|
||||||
async def auth_v1_origin(request: Request):
|
async def auth_v1_origin(request: Request):
|
||||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
||||||
|
|
||||||
origin_ref = j.get('candidate_origin_ref')
|
origin_ref = j['candidate_origin_ref']
|
||||||
logging.info(f'> [ origin ]: {origin_ref}: {j}')
|
logging.info(f'> [ origin ]: {origin_ref}: {j}')
|
||||||
|
|
||||||
data = Origin(
|
data = Origin(
|
||||||
origin_ref=origin_ref,
|
origin_ref=origin_ref,
|
||||||
hostname=j.get('environment').get('hostname'),
|
hostname=j['environment']['hostname'],
|
||||||
guest_driver_version=j.get('environment').get('guest_driver_version'),
|
guest_driver_version=j['environment']['guest_driver_version'],
|
||||||
os_platform=j.get('environment').get('os_platform'), os_version=j.get('environment').get('os_version'),
|
os_platform=j['environment']['os_platform'], os_version=j['environment']['os_version'],
|
||||||
)
|
)
|
||||||
|
|
||||||
Origin.create_or_update(db, data)
|
Origin.create_or_update(db, data)
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
"origin_ref": origin_ref,
|
"origin_ref": origin_ref,
|
||||||
"environment": j.get('environment'),
|
"environment": j['environment'],
|
||||||
"svc_port_set_list": None,
|
"svc_port_set_list": None,
|
||||||
"node_url_list": None,
|
"node_url_list": None,
|
||||||
"node_query_order": None,
|
"node_query_order": None,
|
||||||
@ -266,42 +166,44 @@ async def auth_v1_origin(request: Request):
|
|||||||
"sync_timestamp": cur_time.isoformat()
|
"sync_timestamp": cur_time.isoformat()
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py
|
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py
|
||||||
@app.post('/auth/v1/origin/update', description='update an origin evidence')
|
# { "environment" : { "guest_driver_version" : "guest_driver_version", "hostname" : "myhost", "ip_address_list" : [ "192.168.1.129" ], "os_version" : "os_version", "os_platform" : "os_platform", "fingerprint" : { "mac_address_list" : [ "e4:b9:7a:e5:7b:ff" ] }, "host_driver_version" : "host_driver_version" }, "origin_ref" : "00112233-4455-6677-8899-aabbccddeeff" }
|
||||||
|
@app.post('/auth/v1/origin/update')
|
||||||
async def auth_v1_origin_update(request: Request):
|
async def auth_v1_origin_update(request: Request):
|
||||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
||||||
|
|
||||||
origin_ref = j.get('origin_ref')
|
origin_ref = j['origin_ref']
|
||||||
logging.info(f'> [ update ]: {origin_ref}: {j}')
|
logging.info(f'> [ update ]: {origin_ref}: {j}')
|
||||||
|
|
||||||
data = Origin(
|
data = Origin(
|
||||||
origin_ref=origin_ref,
|
origin_ref=origin_ref,
|
||||||
hostname=j.get('environment').get('hostname'),
|
hostname=j['environment']['hostname'],
|
||||||
guest_driver_version=j.get('environment').get('guest_driver_version'),
|
guest_driver_version=j['environment']['guest_driver_version'],
|
||||||
os_platform=j.get('environment').get('os_platform'), os_version=j.get('environment').get('os_version'),
|
os_platform=j['environment']['os_platform'], os_version=j['environment']['os_version'],
|
||||||
)
|
)
|
||||||
|
|
||||||
Origin.create_or_update(db, data)
|
Origin.create_or_update(db, data)
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
"environment": j.get('environment'),
|
"environment": j['environment'],
|
||||||
"prompts": None,
|
"prompts": None,
|
||||||
"sync_timestamp": cur_time.isoformat()
|
"sync_timestamp": cur_time.isoformat()
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_auth_controller.py
|
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_auth_controller.py
|
||||||
# venv/lib/python3.9/site-packages/nls_core_auth/auth.py - CodeResponse
|
# venv/lib/python3.9/site-packages/nls_core_auth/auth.py - CodeResponse
|
||||||
@app.post('/auth/v1/code', description='get an authorization code')
|
# {"code_challenge":"...","origin_ref":"00112233-4455-6677-8899-aabbccddeeff"}
|
||||||
|
@app.post('/auth/v1/code')
|
||||||
async def auth_v1_code(request: Request):
|
async def auth_v1_code(request: Request):
|
||||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
||||||
|
|
||||||
origin_ref = j.get('origin_ref')
|
origin_ref = j['origin_ref']
|
||||||
logging.info(f'> [ code ]: {origin_ref}: {j}')
|
logging.info(f'> [ code ]: {origin_ref}: {j}')
|
||||||
|
|
||||||
delta = relativedelta(minutes=15)
|
delta = relativedelta(minutes=15)
|
||||||
@ -310,8 +212,8 @@ async def auth_v1_code(request: Request):
|
|||||||
payload = {
|
payload = {
|
||||||
'iat': timegm(cur_time.timetuple()),
|
'iat': timegm(cur_time.timetuple()),
|
||||||
'exp': timegm(expires.timetuple()),
|
'exp': timegm(expires.timetuple()),
|
||||||
'challenge': j.get('code_challenge'),
|
'challenge': j['code_challenge'],
|
||||||
'origin_ref': j.get('origin_ref'),
|
'origin_ref': j['origin_ref'],
|
||||||
'key_ref': SITE_KEY_XID,
|
'key_ref': SITE_KEY_XID,
|
||||||
'kid': SITE_KEY_XID
|
'kid': SITE_KEY_XID
|
||||||
}
|
}
|
||||||
@ -324,27 +226,23 @@ async def auth_v1_code(request: Request):
|
|||||||
"prompts": None
|
"prompts": None
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_auth_controller.py
|
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_auth_controller.py
|
||||||
# venv/lib/python3.9/site-packages/nls_core_auth/auth.py - TokenResponse
|
# venv/lib/python3.9/site-packages/nls_core_auth/auth.py - TokenResponse
|
||||||
@app.post('/auth/v1/token', description='exchange auth code and verifier for token')
|
# {"auth_code":"...","code_verifier":"..."}
|
||||||
|
@app.post('/auth/v1/token')
|
||||||
async def auth_v1_token(request: Request):
|
async def auth_v1_token(request: Request):
|
||||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
||||||
|
payload = jwt.decode(token=j['auth_code'], key=jwt_decode_key)
|
||||||
|
|
||||||
try:
|
origin_ref = payload['origin_ref']
|
||||||
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key)
|
|
||||||
except JWTError as e:
|
|
||||||
return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)})
|
|
||||||
|
|
||||||
origin_ref = payload.get('origin_ref')
|
|
||||||
logging.info(f'> [ auth ]: {origin_ref}: {j}')
|
logging.info(f'> [ auth ]: {origin_ref}: {j}')
|
||||||
|
|
||||||
# validate the code challenge
|
# validate the code challenge
|
||||||
challenge = b64enc(sha256(j.get('code_verifier').encode('utf-8')).digest()).rstrip(b'=').decode('utf-8')
|
if payload['challenge'] != b64enc(sha256(j['code_verifier'].encode('utf-8')).digest()).rstrip(b'=').decode('utf-8'):
|
||||||
if payload.get('challenge') != challenge:
|
raise HTTPException(status_code=401, detail='expected challenge did not match verifier')
|
||||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'})
|
|
||||||
|
|
||||||
access_expires_on = cur_time + TOKEN_EXPIRE_DELTA
|
access_expires_on = cur_time + TOKEN_EXPIRE_DELTA
|
||||||
|
|
||||||
@ -367,44 +265,36 @@ async def auth_v1_token(request: Request):
|
|||||||
"sync_timestamp": cur_time.isoformat(),
|
"sync_timestamp": cur_time.isoformat(),
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
|
# {'fulfillment_context': {'fulfillment_class_ref_list': []}, 'lease_proposal_list': [{'license_type_qualifiers': {'count': 1}, 'product': {'name': 'NVIDIA RTX Virtual Workstation'}}], 'proposal_evaluation_mode': 'ALL_OF', 'scope_ref_list': ['00112233-4455-6677-8899-aabbccddeeff']}
|
||||||
@app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin')
|
@app.post('/leasing/v1/lessor')
|
||||||
async def leasing_v1_lessor(request: Request):
|
async def leasing_v1_lessor(request: Request):
|
||||||
j, token, cur_time = json_loads((await request.body()).decode('utf-8')), __get_token(request), datetime.utcnow()
|
j, token, cur_time = json.loads((await request.body()).decode('utf-8')), get_token(request), datetime.utcnow()
|
||||||
|
|
||||||
try:
|
origin_ref = token['origin_ref']
|
||||||
token = __get_token(request)
|
scope_ref_list = j['scope_ref_list']
|
||||||
except JWTError:
|
|
||||||
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
|
|
||||||
|
|
||||||
origin_ref = token.get('origin_ref')
|
|
||||||
scope_ref_list = j.get('scope_ref_list')
|
|
||||||
logging.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}')
|
logging.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}')
|
||||||
|
|
||||||
lease_result_list = []
|
lease_result_list = []
|
||||||
for scope_ref in scope_ref_list:
|
for scope_ref in scope_ref_list:
|
||||||
# if scope_ref not in [ALLOTMENT_REF]:
|
|
||||||
# return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]')
|
|
||||||
|
|
||||||
lease_ref = str(uuid4())
|
|
||||||
expires = cur_time + LEASE_EXPIRE_DELTA
|
expires = cur_time + LEASE_EXPIRE_DELTA
|
||||||
lease_result_list.append({
|
lease_result_list.append({
|
||||||
"ordinal": 0,
|
"ordinal": 0,
|
||||||
# https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html
|
# https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html
|
||||||
"lease": {
|
"lease": {
|
||||||
"ref": lease_ref,
|
"ref": scope_ref,
|
||||||
"created": cur_time.isoformat(),
|
"created": cur_time.isoformat(),
|
||||||
"expires": expires.isoformat(),
|
"expires": expires.isoformat(),
|
||||||
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
|
# The percentage of the lease period that must elapse before a licensed client can renew a license
|
||||||
|
"recommended_lease_renewal": 0.15,
|
||||||
"offline_lease": "true",
|
"offline_lease": "true",
|
||||||
"license_type": "CONCURRENT_COUNTED_SINGLE"
|
"license_type": "CONCURRENT_COUNTED_SINGLE"
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
data = Lease(origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
|
data = Lease(origin_ref=origin_ref, lease_ref=scope_ref, lease_created=cur_time, lease_expires=expires)
|
||||||
Lease.create_or_update(db, data)
|
Lease.create_or_update(db, data)
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
@ -414,16 +304,16 @@ async def leasing_v1_lessor(request: Request):
|
|||||||
"prompts": None
|
"prompts": None
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
|
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
|
||||||
# venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql
|
# venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql
|
||||||
@app.get('/leasing/v1/lessor/leases', description='get active leases for current origin')
|
@app.get('/leasing/v1/lessor/leases')
|
||||||
async def leasing_v1_lessor_lease(request: Request):
|
async def leasing_v1_lessor_lease(request: Request):
|
||||||
token, cur_time = __get_token(request), datetime.utcnow()
|
token, cur_time = get_token(request), datetime.utcnow()
|
||||||
|
|
||||||
origin_ref = token.get('origin_ref')
|
origin_ref = token['origin_ref']
|
||||||
|
|
||||||
active_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
active_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
||||||
logging.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases')
|
logging.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases')
|
||||||
@ -434,27 +324,26 @@ async def leasing_v1_lessor_lease(request: Request):
|
|||||||
"prompts": None
|
"prompts": None
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py
|
|
||||||
# venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py
|
# venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py
|
||||||
@app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease')
|
@app.put('/leasing/v1/lease/{lease_ref}')
|
||||||
async def leasing_v1_lease_renew(request: Request, lease_ref: str):
|
async def leasing_v1_lease_renew(request: Request, lease_ref: str):
|
||||||
token, cur_time = __get_token(request), datetime.utcnow()
|
token, cur_time = get_token(request), datetime.utcnow()
|
||||||
|
|
||||||
origin_ref = token.get('origin_ref')
|
origin_ref = token['origin_ref']
|
||||||
logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
|
logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
|
||||||
|
|
||||||
entity = Lease.find_by_origin_ref_and_lease_ref(db, origin_ref, lease_ref)
|
entity = Lease.find_by_origin_ref_and_lease_ref(db, origin_ref, lease_ref)
|
||||||
if entity is None:
|
if entity is None:
|
||||||
return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'})
|
raise HTTPException(status_code=404, detail='requested lease not available')
|
||||||
|
|
||||||
expires = cur_time + LEASE_EXPIRE_DELTA
|
expires = cur_time + LEASE_EXPIRE_DELTA
|
||||||
response = {
|
response = {
|
||||||
"lease_ref": lease_ref,
|
"lease_ref": lease_ref,
|
||||||
"expires": expires.isoformat(),
|
"expires": expires.isoformat(),
|
||||||
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
|
"recommended_lease_renewal": 0.16,
|
||||||
"offline_lease": True,
|
"offline_lease": True,
|
||||||
"prompts": None,
|
"prompts": None,
|
||||||
"sync_timestamp": cur_time.isoformat(),
|
"sync_timestamp": cur_time.isoformat(),
|
||||||
@ -462,41 +351,14 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
|
|||||||
|
|
||||||
Lease.renew(db, entity, expires, cur_time)
|
Lease.renew(db, entity, expires, cur_time)
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py
|
@app.delete('/leasing/v1/lessor/leases')
|
||||||
@app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease')
|
|
||||||
async def leasing_v1_lease_delete(request: Request, lease_ref: str):
|
|
||||||
token, cur_time = __get_token(request), datetime.utcnow()
|
|
||||||
|
|
||||||
origin_ref = token.get('origin_ref')
|
|
||||||
logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
|
|
||||||
|
|
||||||
entity = Lease.find_by_lease_ref(db, lease_ref)
|
|
||||||
if entity.origin_ref != origin_ref:
|
|
||||||
return JSONr(status_code=403, content={'status': 403, 'detail': 'access or operation forbidden'})
|
|
||||||
if entity is None:
|
|
||||||
return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'})
|
|
||||||
|
|
||||||
if Lease.delete(db, lease_ref) == 0:
|
|
||||||
return JSONr(status_code=404, content={'status': 404, 'detail': 'lease not found'})
|
|
||||||
|
|
||||||
response = {
|
|
||||||
"lease_ref": lease_ref,
|
|
||||||
"prompts": None,
|
|
||||||
"sync_timestamp": cur_time.isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
return JSONr(response)
|
|
||||||
|
|
||||||
|
|
||||||
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
|
|
||||||
@app.delete('/leasing/v1/lessor/leases', description='release all leases')
|
|
||||||
async def leasing_v1_lessor_lease_remove(request: Request):
|
async def leasing_v1_lessor_lease_remove(request: Request):
|
||||||
token, cur_time = __get_token(request), datetime.utcnow()
|
token, cur_time = get_token(request), datetime.utcnow()
|
||||||
|
|
||||||
origin_ref = token.get('origin_ref')
|
origin_ref = token['origin_ref']
|
||||||
|
|
||||||
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
||||||
deletions = Lease.cleanup(db, origin_ref)
|
deletions = Lease.cleanup(db, origin_ref)
|
||||||
@ -509,41 +371,7 @@ async def leasing_v1_lessor_lease_remove(request: Request):
|
|||||||
"prompts": None
|
"prompts": None
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSONr(response)
|
return JSONResponse(response)
|
||||||
|
|
||||||
|
|
||||||
@app.post('/leasing/v1/lessor/shutdown', description='shutdown all leases')
|
|
||||||
async def leasing_v1_lessor_shutdown(request: Request):
|
|
||||||
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
|
|
||||||
|
|
||||||
token = j.get('token')
|
|
||||||
token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
|
||||||
origin_ref = token.get('origin_ref')
|
|
||||||
|
|
||||||
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
|
|
||||||
deletions = Lease.cleanup(db, origin_ref)
|
|
||||||
logging.info(f'> [ shutdown ]: {origin_ref}: removed {deletions} leases')
|
|
||||||
|
|
||||||
response = {
|
|
||||||
"released_lease_list": released_lease_list,
|
|
||||||
"release_failure_list": None,
|
|
||||||
"sync_timestamp": cur_time.isoformat(),
|
|
||||||
"prompts": None
|
|
||||||
}
|
|
||||||
|
|
||||||
return JSONr(response)
|
|
||||||
|
|
||||||
|
|
||||||
@app.on_event('startup')
|
|
||||||
async def app_on_startup():
|
|
||||||
logger.info(f'''
|
|
||||||
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
|
|
||||||
|
|
||||||
Your clients renew their license every {str(Lease.calculate_renewal(LEASE_RENEWAL_PERIOD, LEASE_RENEWAL_DELTA))}.
|
|
||||||
If the renewal fails, the license is {str(LEASE_RENEWAL_DELTA)} valid.
|
|
||||||
|
|
||||||
Your client-token file (.tok) is valid for {str(CLIENT_TOKEN_EXPIRE_DELTA)}.
|
|
||||||
''')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
148
app/orm.py
148
app/orm.py
@ -1,9 +1,9 @@
|
|||||||
from datetime import datetime, timedelta
|
import datetime
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
|
|
||||||
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text
|
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, UniqueConstraint, update, and_, delete, inspect
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|
||||||
@ -13,7 +13,6 @@ class Origin(Base):
|
|||||||
|
|
||||||
origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4
|
origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4
|
||||||
|
|
||||||
# service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF')
|
|
||||||
hostname = Column(VARCHAR(length=256), nullable=True)
|
hostname = Column(VARCHAR(length=256), nullable=True)
|
||||||
guest_driver_version = Column(VARCHAR(length=10), nullable=True)
|
guest_driver_version = Column(VARCHAR(length=10), nullable=True)
|
||||||
os_platform = Column(VARCHAR(length=256), nullable=True)
|
os_platform = Column(VARCHAR(length=256), nullable=True)
|
||||||
@ -22,16 +21,6 @@ class Origin(Base):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
|
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
|
||||||
|
|
||||||
def serialize(self) -> dict:
|
|
||||||
return {
|
|
||||||
'origin_ref': self.origin_ref,
|
|
||||||
# 'service_instance_xid': self.service_instance_xid,
|
|
||||||
'hostname': self.hostname,
|
|
||||||
'guest_driver_version': self.guest_driver_version,
|
|
||||||
'os_platform': self.os_platform,
|
|
||||||
'os_version': self.os_version,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_statement(engine: Engine):
|
def create_statement(engine: Engine):
|
||||||
from sqlalchemy.schema import CreateTable
|
from sqlalchemy.schema import CreateTable
|
||||||
@ -39,41 +28,29 @@ class Origin(Base):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_or_update(engine: Engine, origin: "Origin"):
|
def create_or_update(engine: Engine, origin: "Origin"):
|
||||||
session = sessionmaker(bind=engine)()
|
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
|
||||||
entity = session.query(Origin).filter(Origin.origin_ref == origin.origin_ref).first()
|
entity = session.query(Origin).filter(Origin.origin_ref == origin.origin_ref).first()
|
||||||
|
print(entity)
|
||||||
if entity is None:
|
if entity is None:
|
||||||
session.add(origin)
|
session.add(origin)
|
||||||
else:
|
else:
|
||||||
x = dict(
|
values = dict(
|
||||||
hostname=origin.hostname,
|
hostname=origin.hostname,
|
||||||
guest_driver_version=origin.guest_driver_version,
|
guest_driver_version=origin.guest_driver_version,
|
||||||
os_platform=origin.os_platform,
|
os_platform=origin.os_platform,
|
||||||
os_version=origin.os_version
|
os_version=origin.os_version,
|
||||||
)
|
)
|
||||||
session.execute(update(Origin).where(Origin.origin_ref == origin.origin_ref).values(**x))
|
session.execute(update(Origin).where(Origin.origin_ref == origin.origin_ref).values(**values))
|
||||||
session.commit()
|
|
||||||
session.flush()
|
session.flush()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def delete(engine: Engine, origin_refs: [str] = None) -> int:
|
|
||||||
session = sessionmaker(bind=engine)()
|
|
||||||
if origin_refs is None:
|
|
||||||
deletions = session.query(Origin).delete()
|
|
||||||
else:
|
|
||||||
deletions = session.query(Origin).filter(Origin.origin_ref in origin_refs).delete()
|
|
||||||
session.commit()
|
|
||||||
session.close()
|
|
||||||
return deletions
|
|
||||||
|
|
||||||
|
|
||||||
class Lease(Base):
|
class Lease(Base):
|
||||||
__tablename__ = "lease"
|
__tablename__ = "lease"
|
||||||
|
|
||||||
|
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref), primary_key=True, nullable=False, index=True) # uuid4
|
||||||
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
|
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
|
||||||
|
|
||||||
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
|
|
||||||
# scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF')
|
|
||||||
lease_created = Column(DATETIME(), nullable=False)
|
lease_created = Column(DATETIME(), nullable=False)
|
||||||
lease_expires = Column(DATETIME(), nullable=False)
|
lease_expires = Column(DATETIME(), nullable=False)
|
||||||
lease_updated = Column(DATETIME(), nullable=False)
|
lease_updated = Column(DATETIME(), nullable=False)
|
||||||
@ -81,20 +58,6 @@ class Lease(Base):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
|
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
|
||||||
|
|
||||||
def serialize(self, renewal_period: float, renewal_delta: timedelta) -> dict:
|
|
||||||
lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds())
|
|
||||||
lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'lease_ref': self.lease_ref,
|
|
||||||
'origin_ref': self.origin_ref,
|
|
||||||
# 'scope_ref': self.scope_ref,
|
|
||||||
'lease_created': self.lease_created.isoformat(),
|
|
||||||
'lease_expires': self.lease_expires.isoformat(),
|
|
||||||
'lease_updated': self.lease_updated.isoformat(),
|
|
||||||
'lease_renewal': lease_renewal.isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_statement(engine: Engine):
|
def create_statement(engine: Engine):
|
||||||
from sqlalchemy.schema import CreateTable
|
from sqlalchemy.schema import CreateTable
|
||||||
@ -102,86 +65,46 @@ class Lease(Base):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_or_update(engine: Engine, lease: "Lease"):
|
def create_or_update(engine: Engine, lease: "Lease"):
|
||||||
session = sessionmaker(bind=engine)()
|
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
|
||||||
entity = session.query(Lease).filter(Lease.lease_ref == lease.lease_ref).first()
|
entity = session.query(Lease).filter(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).first()
|
||||||
if entity is None:
|
if entity is None:
|
||||||
if lease.lease_updated is None:
|
if lease.lease_updated is None:
|
||||||
lease.lease_updated = lease.lease_created
|
lease.lease_updated = lease.lease_created
|
||||||
session.add(lease)
|
session.add(lease)
|
||||||
else:
|
else:
|
||||||
x = dict(origin_ref=lease.origin_ref, lease_expires=lease.lease_expires, lease_updated=lease.lease_updated)
|
values = dict(lease_expires=lease.lease_expires, lease_updated=lease.lease_updated)
|
||||||
session.execute(update(Lease).where(Lease.lease_ref == lease.lease_ref).values(**x))
|
session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**values))
|
||||||
session.commit()
|
|
||||||
session.flush()
|
session.flush()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_by_origin_ref(engine: Engine, origin_ref: str) -> ["Lease"]:
|
def find_by_origin_ref(engine: Engine, origin_ref: str) -> ["Lease"]:
|
||||||
session = sessionmaker(bind=engine)()
|
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
|
||||||
entities = session.query(Lease).filter(Lease.origin_ref == origin_ref).all()
|
entities = session.query(Lease).filter(Lease.origin_ref == origin_ref).all()
|
||||||
session.close()
|
session.close()
|
||||||
return entities
|
return entities
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find_by_lease_ref(engine: Engine, lease_ref: str) -> "Lease":
|
|
||||||
session = sessionmaker(bind=engine)()
|
|
||||||
entity = session.query(Lease).filter(Lease.lease_ref == lease_ref).first()
|
|
||||||
session.close()
|
|
||||||
return entity
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_by_origin_ref_and_lease_ref(engine: Engine, origin_ref: str, lease_ref: str) -> "Lease":
|
def find_by_origin_ref_and_lease_ref(engine: Engine, origin_ref: str, lease_ref: str) -> "Lease":
|
||||||
session = sessionmaker(bind=engine)()
|
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
|
||||||
entity = session.query(Lease).filter(and_(Lease.origin_ref == origin_ref, Lease.lease_ref == lease_ref)).first()
|
entity = session.query(Lease).filter(and_(Lease.origin_ref == origin_ref, Lease.lease_ref == lease_ref)).first()
|
||||||
session.close()
|
session.close()
|
||||||
return entity
|
return entity
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def renew(engine: Engine, lease: "Lease", lease_expires: datetime, lease_updated: datetime):
|
def renew(engine: Engine, lease: "Lease", lease_expires: datetime.datetime, lease_updated: datetime.datetime):
|
||||||
session = sessionmaker(bind=engine)()
|
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
|
||||||
x = dict(lease_expires=lease_expires, lease_updated=lease_updated)
|
values = dict(lease_expires=lease.lease_expires, lease_updated=lease.lease_updated)
|
||||||
session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**x))
|
session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**values))
|
||||||
session.commit()
|
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cleanup(engine: Engine, origin_ref: str) -> int:
|
def cleanup(engine: Engine, origin_ref: str) -> int:
|
||||||
session = sessionmaker(bind=engine)()
|
session = sessionmaker(autocommit=True, autoflush=True, bind=engine)()
|
||||||
deletions = session.query(Lease).filter(Lease.origin_ref == origin_ref).delete()
|
deletions = session.query(Lease).filter(Lease.origin_ref == origin_ref).delete()
|
||||||
session.commit()
|
|
||||||
session.close()
|
session.close()
|
||||||
return deletions
|
return deletions
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def delete(engine: Engine, lease_ref: str) -> int:
|
|
||||||
session = sessionmaker(bind=engine)()
|
|
||||||
deletions = session.query(Lease).filter(Lease.lease_ref == lease_ref).delete()
|
|
||||||
session.commit()
|
|
||||||
session.close()
|
|
||||||
return deletions
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def calculate_renewal(renewal_period: float, delta: timedelta) -> timedelta:
|
|
||||||
"""
|
|
||||||
import datetime
|
|
||||||
LEASE_RENEWAL_PERIOD=0.2 # 20%
|
|
||||||
delta = datetime.timedelta(days=1)
|
|
||||||
renew = delta.total_seconds() * LEASE_RENEWAL_PERIOD
|
|
||||||
renew = datetime.timedelta(seconds=renew)
|
|
||||||
expires = delta - renew # 19.2
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
LEASE_RENEWAL_PERIOD=0.15 # 15%
|
|
||||||
delta = datetime.timedelta(days=90)
|
|
||||||
renew = delta.total_seconds() * LEASE_RENEWAL_PERIOD
|
|
||||||
renew = datetime.timedelta(seconds=renew)
|
|
||||||
expires = delta - renew # 76 days, 12:00:00 hours
|
|
||||||
|
|
||||||
"""
|
|
||||||
renew = delta.total_seconds() * renewal_period
|
|
||||||
renew = timedelta(seconds=renew)
|
|
||||||
return renew
|
|
||||||
|
|
||||||
|
|
||||||
def init(engine: Engine):
|
def init(engine: Engine):
|
||||||
tables = [Origin, Lease]
|
tables = [Origin, Lease]
|
||||||
@ -189,32 +112,5 @@ def init(engine: Engine):
|
|||||||
session = sessionmaker(bind=engine)()
|
session = sessionmaker(bind=engine)()
|
||||||
for table in tables:
|
for table in tables:
|
||||||
if not db.dialect.has_table(engine.connect(), table.__tablename__):
|
if not db.dialect.has_table(engine.connect(), table.__tablename__):
|
||||||
session.execute(text(str(table.create_statement(engine))))
|
session.execute(str(table.create_statement(engine)))
|
||||||
session.commit()
|
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
def migrate(engine: Engine):
|
|
||||||
db = inspect(engine)
|
|
||||||
|
|
||||||
def upgrade_1_0_to_1_1():
|
|
||||||
x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
|
|
||||||
x = next(_ for _ in x if _['name'] == 'origin_ref')
|
|
||||||
if x['primary_key'] > 0:
|
|
||||||
print('Found old database schema with "origin_ref" as primary-key in "lease" table. Dropping table!')
|
|
||||||
print(' Your leases are recreated on next renewal!')
|
|
||||||
print(' If an error message appears on the client, you can ignore it.')
|
|
||||||
Lease.__table__.drop(bind=engine)
|
|
||||||
init(engine)
|
|
||||||
|
|
||||||
# def upgrade_1_2_to_1_3():
|
|
||||||
# x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
|
|
||||||
# x = next((_ for _ in x if _['name'] == 'scope_ref'), None)
|
|
||||||
# if x is None:
|
|
||||||
# Lease.scope_ref.compile()
|
|
||||||
# column_name = Lease.scope_ref.name
|
|
||||||
# column_type = Lease.scope_ref.type.compile(engine.dialect)
|
|
||||||
# engine.execute(f'ALTER TABLE "{Lease.__tablename__}" ADD COLUMN "{column_name}" {column_type}')
|
|
||||||
|
|
||||||
upgrade_1_0_to_1_1()
|
|
||||||
# upgrade_1_2_to_1_3()
|
|
||||||
|
29
app/util.py
29
app/util.py
@ -1,28 +1,21 @@
|
|||||||
|
try:
|
||||||
|
# Crypto | Cryptodome on Debian
|
||||||
|
from Crypto.PublicKey import RSA
|
||||||
|
from Crypto.PublicKey.RSA import RsaKey
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
from Cryptodome.PublicKey import RSA
|
||||||
|
from Cryptodome.PublicKey.RSA import RsaKey
|
||||||
|
|
||||||
|
|
||||||
def load_file(filename) -> bytes:
|
def load_file(filename) -> bytes:
|
||||||
with open(filename, 'rb') as file:
|
with open(filename, 'rb') as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
|
||||||
def load_key(filename) -> "RsaKey":
|
def load_key(filename) -> RsaKey:
|
||||||
try:
|
|
||||||
# Crypto | Cryptodome on Debian
|
|
||||||
from Crypto.PublicKey import RSA
|
|
||||||
from Crypto.PublicKey.RSA import RsaKey
|
|
||||||
except ModuleNotFoundError:
|
|
||||||
from Cryptodome.PublicKey import RSA
|
|
||||||
from Cryptodome.PublicKey.RSA import RsaKey
|
|
||||||
|
|
||||||
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
|
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
|
||||||
|
|
||||||
|
|
||||||
def generate_key() -> "RsaKey":
|
def generate_key() -> RsaKey:
|
||||||
try:
|
|
||||||
# Crypto | Cryptodome on Debian
|
|
||||||
from Crypto.PublicKey import RSA
|
|
||||||
from Crypto.PublicKey.RSA import RsaKey
|
|
||||||
except ModuleNotFoundError:
|
|
||||||
from Cryptodome.PublicKey import RSA
|
|
||||||
from Cryptodome.PublicKey.RSA import RsaKey
|
|
||||||
|
|
||||||
return RSA.generate(bits=2048)
|
return RSA.generate(bits=2048)
|
||||||
|
@ -1,26 +0,0 @@
|
|||||||
# Database structure
|
|
||||||
|
|
||||||
## `request_routing.service_instance`
|
|
||||||
|
|
||||||
| xid | org_name |
|
|
||||||
|----------------------------------------|--------------------------|
|
|
||||||
| `10000000-0000-0000-0000-000000000000` | `lic-000000000000000000` |
|
|
||||||
|
|
||||||
- `xid` is used as `SERVICE_INSTANCE_XID`
|
|
||||||
|
|
||||||
## `request_routing.license_allotment_service_instance`
|
|
||||||
|
|
||||||
| xid | service_instance_xid | license_allotment_xid |
|
|
||||||
|----------------------------------------|----------------------------------------|----------------------------------------|
|
|
||||||
| `90000000-0000-0000-0000-000000000001` | `10000000-0000-0000-0000-000000000000` | `80000000-0000-0000-0000-000000000001` |
|
|
||||||
|
|
||||||
- `xid` is only a primary-key and never used as foreign-key or reference
|
|
||||||
- `license_allotment_xid` must be used to fetch `xid`'s from `request_routing.license_allotment_reference`
|
|
||||||
|
|
||||||
## `request_routing.license_allotment_reference`
|
|
||||||
|
|
||||||
| xid | license_allotment_xid |
|
|
||||||
|----------------------------------------|----------------------------------------|
|
|
||||||
| `20000000-0000-0000-0000-000000000001` | `80000000-0000-0000-0000-000000000001` |
|
|
||||||
|
|
||||||
- `xid` is used as `scope_ref_list` on token request
|
|
@ -33,9 +33,6 @@ nvidia-gridd[2986]: License acquired successfully. (Info: license.nvidia.space,
|
|||||||
|
|
||||||
Most variables and configs are stored in `/var/lib/docker/volumes/configurations/_data`.
|
Most variables and configs are stored in `/var/lib/docker/volumes/configurations/_data`.
|
||||||
|
|
||||||
Files can be modified with `docker cp <container-id>:/venv/... /opt/localfile/...` and back.
|
|
||||||
(May you need to fix permissions with `docker exec -u 0 <container-id> chown nonroot:nonroot /venv/...`)
|
|
||||||
|
|
||||||
## Dive / Docker image inspector
|
## Dive / Docker image inspector
|
||||||
|
|
||||||
- `dive dls:appliance`
|
- `dive dls:appliance`
|
||||||
|
@ -1,120 +0,0 @@
|
|||||||
version: '3.9'
|
|
||||||
|
|
||||||
x-dls-variables: &dls-variables
|
|
||||||
DLS_URL: localhost # REQUIRED, change to your ip or hostname
|
|
||||||
DLS_PORT: 443 # must match nginx listen & exposed port
|
|
||||||
LEASE_EXPIRE_DAYS: 90
|
|
||||||
DATABASE: sqlite:////app/database/db.sqlite
|
|
||||||
DEBUG: false
|
|
||||||
|
|
||||||
services:
|
|
||||||
dls:
|
|
||||||
image: collinwebdesigns/fastapi-dls:latest
|
|
||||||
restart: always
|
|
||||||
environment:
|
|
||||||
<<: *dls-variables
|
|
||||||
volumes:
|
|
||||||
- /etc/timezone:/etc/timezone:ro
|
|
||||||
- /opt/docker/fastapi-dls/cert:/app/cert # instance.private.pem, instance.public.pem
|
|
||||||
- db:/app/database
|
|
||||||
entrypoint: ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--app-dir", "/app", "--proxy-headers"]
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "--fail", "http://localhost:8000/-/health"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 3
|
|
||||||
start_period: 30s
|
|
||||||
proxy:
|
|
||||||
image: nginx
|
|
||||||
ports:
|
|
||||||
# thees are ports where nginx (!) is listen to
|
|
||||||
- "80:80" # for "/leasing/v1/lessor/shutdown" used by windows guests, can't be changed!
|
|
||||||
- "443:443" # first part must match "DLS_PORT"
|
|
||||||
volumes:
|
|
||||||
- /etc/timezone:/etc/timezone:ro
|
|
||||||
- /opt/docker/fastapi-dls/cert:/opt/cert
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "--insecure", "--fail", "https://localhost/-/health"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 3
|
|
||||||
start_period: 30s
|
|
||||||
command: |
|
|
||||||
bash -c "bash -s <<\"EOF\"
|
|
||||||
cat > /etc/nginx/nginx.conf <<\"EON\"
|
|
||||||
daemon off;
|
|
||||||
user root;
|
|
||||||
worker_processes auto;
|
|
||||||
|
|
||||||
events {
|
|
||||||
worker_connections 1024;
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
gzip on;
|
|
||||||
gzip_disable "msie6";
|
|
||||||
include /etc/nginx/mime.types;
|
|
||||||
|
|
||||||
upstream dls-backend {
|
|
||||||
server dls:8000; # must match dls listen port
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 443 ssl http2 default_server;
|
|
||||||
listen [::]:443 ssl http2 default_server;
|
|
||||||
|
|
||||||
root /var/www/html;
|
|
||||||
index index.html;
|
|
||||||
server_name _;
|
|
||||||
|
|
||||||
ssl_certificate "/opt/cert/webserver.crt";
|
|
||||||
ssl_certificate_key "/opt/cert/webserver.key";
|
|
||||||
ssl_session_cache shared:SSL:1m;
|
|
||||||
ssl_session_timeout 10m;
|
|
||||||
ssl_protocols TLSv1.3 TLSv1.2;
|
|
||||||
# ssl_ciphers "ECDHE-ECDSA-CHACHA20-POLY1305";
|
|
||||||
# ssl_ciphers PROFILE=SYSTEM;
|
|
||||||
ssl_prefer_server_ciphers on;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_set_header Host $$http_host;
|
|
||||||
proxy_set_header X-Real-IP $$remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $$proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $$scheme;
|
|
||||||
proxy_pass http://dls-backend$$request_uri;
|
|
||||||
}
|
|
||||||
|
|
||||||
location = /-/health {
|
|
||||||
access_log off;
|
|
||||||
add_header 'Content-Type' 'application/json';
|
|
||||||
return 200 '{\"status\":\"up\",\"service\":\"nginx\"}';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 80;
|
|
||||||
listen [::]:80;
|
|
||||||
|
|
||||||
root /var/www/html;
|
|
||||||
index index.html;
|
|
||||||
server_name _;
|
|
||||||
|
|
||||||
location /leasing/v1/lessor/shutdown {
|
|
||||||
proxy_set_header Host $$http_host;
|
|
||||||
proxy_set_header X-Real-IP $$remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $$proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $$scheme;
|
|
||||||
proxy_pass http://dls-backend/leasing/v1/lessor/shutdown;
|
|
||||||
}
|
|
||||||
|
|
||||||
location / {
|
|
||||||
return 301 https://$$host$$request_uri;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
EON
|
|
||||||
nginx
|
|
||||||
EOF"
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
db:
|
|
@ -1,8 +1,8 @@
|
|||||||
fastapi==0.95.1
|
fastapi==0.88.0
|
||||||
uvicorn[standard]==0.22.0
|
uvicorn[standard]==0.20.0
|
||||||
python-jose==3.3.0
|
python-jose==3.3.0
|
||||||
pycryptodome==3.17
|
pycryptodome==3.16.0
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
sqlalchemy==2.0.12
|
sqlalchemy==1.4.45
|
||||||
markdown==3.4.3
|
markdown==3.4.1
|
||||||
python-dotenv==1.0.0
|
python-dotenv==0.21.0
|
||||||
|
126
test/main.py
126
test/main.py
@ -3,7 +3,7 @@ from hashlib import sha256
|
|||||||
from calendar import timegm
|
from calendar import timegm
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from os.path import dirname, join
|
from os.path import dirname, join
|
||||||
from uuid import uuid4, UUID
|
from uuid import uuid4
|
||||||
|
|
||||||
from dateutil.relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
from jose import jwt, jwk
|
from jose import jwt, jwk
|
||||||
@ -16,11 +16,12 @@ sys.path.append('../')
|
|||||||
sys.path.append('../app')
|
sys.path.append('../app')
|
||||||
|
|
||||||
from app import main
|
from app import main
|
||||||
from app.util import load_key
|
from app.util import generate_key, load_key
|
||||||
|
|
||||||
client = TestClient(main.app)
|
client = TestClient(main.app)
|
||||||
|
|
||||||
ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld'
|
ORIGIN_REF, LEASE_REF = str(uuid4()), str(uuid4())
|
||||||
|
SECRET = "HelloWorld"
|
||||||
|
|
||||||
# INSTANCE_KEY_RSA = generate_key()
|
# INSTANCE_KEY_RSA = generate_key()
|
||||||
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
|
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
|
||||||
@ -32,59 +33,22 @@ jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), al
|
|||||||
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||||
|
|
||||||
|
|
||||||
def __bearer_token(origin_ref: str) -> str:
|
|
||||||
token = jwt.encode({"origin_ref": origin_ref}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
|
|
||||||
token = f'Bearer {token}'
|
|
||||||
return token
|
|
||||||
|
|
||||||
|
|
||||||
def test_index():
|
def test_index():
|
||||||
response = client.get('/')
|
response = client.get('/')
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
def test_health():
|
def test_status():
|
||||||
response = client.get('/-/health')
|
response = client.get('/status')
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.json().get('status') == 'up'
|
|
||||||
|
|
||||||
|
|
||||||
def test_config():
|
|
||||||
response = client.get('/-/config')
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
|
|
||||||
def test_readme():
|
|
||||||
response = client.get('/-/readme')
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
|
|
||||||
def test_manage():
|
|
||||||
response = client.get('/-/manage')
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
assert response.json()['status'] == 'up'
|
||||||
|
|
||||||
|
|
||||||
def test_client_token():
|
def test_client_token():
|
||||||
response = client.get('/-/client-token')
|
response = client.get('/client-token')
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
def test_origins():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_origins_delete():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_leases():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_lease_delete():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_auth_v1_origin():
|
def test_auth_v1_origin():
|
||||||
payload = {
|
payload = {
|
||||||
"registration_pending": False,
|
"registration_pending": False,
|
||||||
@ -103,7 +67,7 @@ def test_auth_v1_origin():
|
|||||||
|
|
||||||
response = client.post('/auth/v1/origin', json=payload)
|
response = client.post('/auth/v1/origin', json=payload)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json().get('origin_ref') == ORIGIN_REF
|
assert response.json()['origin_ref'] == ORIGIN_REF
|
||||||
|
|
||||||
|
|
||||||
def auth_v1_origin_update():
|
def auth_v1_origin_update():
|
||||||
@ -124,7 +88,7 @@ def auth_v1_origin_update():
|
|||||||
|
|
||||||
response = client.post('/auth/v1/origin/update', json=payload)
|
response = client.post('/auth/v1/origin/update', json=payload)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json().get('origin_ref') == ORIGIN_REF
|
assert response.json()['origin_ref'] == ORIGIN_REF
|
||||||
|
|
||||||
|
|
||||||
def test_auth_v1_code():
|
def test_auth_v1_code():
|
||||||
@ -136,8 +100,8 @@ def test_auth_v1_code():
|
|||||||
response = client.post('/auth/v1/code', json=payload)
|
response = client.post('/auth/v1/code', json=payload)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
payload = jwt.get_unverified_claims(token=response.json().get('auth_code'))
|
payload = jwt.get_unverified_claims(token=response.json()['auth_code'])
|
||||||
assert payload.get('origin_ref') == ORIGIN_REF
|
assert payload['origin_ref'] == ORIGIN_REF
|
||||||
|
|
||||||
|
|
||||||
def test_auth_v1_token():
|
def test_auth_v1_token():
|
||||||
@ -161,9 +125,9 @@ def test_auth_v1_token():
|
|||||||
response = client.post('/auth/v1/token', json=payload)
|
response = client.post('/auth/v1/token', json=payload)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
token = response.json().get('auth_token')
|
token = response.json()['auth_token']
|
||||||
payload = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
payload = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
||||||
assert payload.get('origin_ref') == ORIGIN_REF
|
assert payload['origin_ref'] == ORIGIN_REF
|
||||||
|
|
||||||
|
|
||||||
def test_leasing_v1_lessor():
|
def test_leasing_v1_lessor():
|
||||||
@ -176,67 +140,45 @@ def test_leasing_v1_lessor():
|
|||||||
'product': {'name': 'NVIDIA RTX Virtual Workstation'}
|
'product': {'name': 'NVIDIA RTX Virtual Workstation'}
|
||||||
}],
|
}],
|
||||||
'proposal_evaluation_mode': 'ALL_OF',
|
'proposal_evaluation_mode': 'ALL_OF',
|
||||||
'scope_ref_list': [ALLOTMENT_REF]
|
'scope_ref_list': [LEASE_REF]
|
||||||
}
|
}
|
||||||
|
|
||||||
response = client.post('/leasing/v1/lessor', json=payload, headers={'authorization': __bearer_token(ORIGIN_REF)})
|
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
|
||||||
|
bearer_token = f'Bearer {bearer_token}'
|
||||||
|
response = client.post('/leasing/v1/lessor', json=payload, headers={'authorization': bearer_token})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
lease_result_list = response.json().get('lease_result_list')
|
lease_result_list = response.json()['lease_result_list']
|
||||||
assert len(lease_result_list) == 1
|
assert len(lease_result_list) == 1
|
||||||
assert len(lease_result_list[0]['lease']['ref']) == 36
|
assert lease_result_list[0]['lease']['ref'] == LEASE_REF
|
||||||
assert str(UUID(lease_result_list[0]['lease']['ref'])) == lease_result_list[0]['lease']['ref']
|
|
||||||
|
|
||||||
return lease_result_list[0]['lease']['ref']
|
|
||||||
|
|
||||||
|
|
||||||
def test_leasing_v1_lessor_lease():
|
def test_leasing_v1_lessor_lease():
|
||||||
response = client.get('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
|
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
|
||||||
|
bearer_token = f'Bearer {bearer_token}'
|
||||||
|
response = client.get('/leasing/v1/lessor/leases', headers={'authorization': bearer_token})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
active_lease_list = response.json().get('active_lease_list')
|
active_lease_list = response.json()['active_lease_list']
|
||||||
assert len(active_lease_list) == 1
|
assert len(active_lease_list) == 1
|
||||||
assert len(active_lease_list[0]) == 36
|
assert active_lease_list[0] == LEASE_REF
|
||||||
assert str(UUID(active_lease_list[0])) == active_lease_list[0]
|
|
||||||
|
|
||||||
|
|
||||||
def test_leasing_v1_lease_renew():
|
def test_leasing_v1_lease_renew():
|
||||||
response = client.get('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
|
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
|
||||||
active_lease_list = response.json().get('active_lease_list')
|
bearer_token = f'Bearer {bearer_token}'
|
||||||
active_lease_ref = active_lease_list[0]
|
response = client.put(f'/leasing/v1/lease/{LEASE_REF}', headers={'authorization': bearer_token})
|
||||||
|
|
||||||
###
|
|
||||||
|
|
||||||
response = client.put(f'/leasing/v1/lease/{active_lease_ref}', headers={'authorization': __bearer_token(ORIGIN_REF)})
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
lease_ref = response.json().get('lease_ref')
|
assert response.json()['lease_ref'] == LEASE_REF
|
||||||
assert len(lease_ref) == 36
|
|
||||||
assert lease_ref == active_lease_ref
|
|
||||||
|
|
||||||
|
|
||||||
def test_leasing_v1_lease_delete():
|
|
||||||
response = client.get('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
|
|
||||||
active_lease_list = response.json().get('active_lease_list')
|
|
||||||
active_lease_ref = active_lease_list[0]
|
|
||||||
|
|
||||||
###
|
|
||||||
|
|
||||||
response = client.delete(f'/leasing/v1/lease/{active_lease_ref}', headers={'authorization': __bearer_token(ORIGIN_REF)})
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
lease_ref = response.json().get('lease_ref')
|
|
||||||
assert len(lease_ref) == 36
|
|
||||||
assert lease_ref == active_lease_ref
|
|
||||||
|
|
||||||
|
|
||||||
def test_leasing_v1_lessor_lease_remove():
|
def test_leasing_v1_lessor_lease_remove():
|
||||||
lease_ref = test_leasing_v1_lessor()
|
bearer_token = jwt.encode({"origin_ref": ORIGIN_REF}, key=jwt_encode_key, algorithm=ALGORITHMS.RS256)
|
||||||
|
bearer_token = f'Bearer {bearer_token}'
|
||||||
response = client.delete('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
|
response = client.delete('/leasing/v1/lessor/leases', headers={'authorization': bearer_token})
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
released_lease_list = response.json().get('released_lease_list')
|
released_lease_list = response.json()['released_lease_list']
|
||||||
assert len(released_lease_list) == 1
|
assert len(released_lease_list) == 1
|
||||||
assert len(released_lease_list[0]) == 36
|
assert released_lease_list[0] == LEASE_REF
|
||||||
assert released_lease_list[0] == lease_ref
|
|
||||||
|
1
version.env
Normal file
1
version.env
Normal file
@ -0,0 +1 @@
|
|||||||
|
VERSION=1.0.0
|
Loading…
Reference in New Issue
Block a user