diff --git a/.gitignore b/.gitignore index 937eb28..3421248 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ venv/ .idea/ app/*.sqlite* app/cert/*.* +.pytest_cache diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c24c913..9c02650 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,7 +1,34 @@ cache: key: one-key-to-rule-them-all -build: +build:debian: + # debian:bullseye-slim + image: debian:bookworm-slim # just to get "python3-jose" working + stage: build + before_script: + - apt-get update -qq && apt-get install -qq -y build-essential + - chmod 0755 -R . + # create build directory for .deb sources + - mkdir build + # copy install instructions + - cp -r DEBIAN build/ + # copy app into "/usr/share/fastapi-dls" as "/usr/share/fastapi-dls/app" & copy README.md and version.env + - mkdir -p build/usr/share/fastapi-dls + - cp -r app build/usr/share/fastapi-dls + - cp README.md version.env build/usr/share/fastapi-dls + # create conf file + - mkdir -p build/etc/fastapi-dls + - touch build/etc/fastapi-dls/env + # cd into "build/" + - cd build/ + script: + - dpkg -b . build.deb + artifacts: + expire_in: 1 week + paths: + - build/build.deb + +build:docker: image: docker:dind interruptible: true stage: build @@ -16,11 +43,51 @@ build: - docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF} test: + image: python:3.10-slim-bullseye stage: test + variables: + DATABASE: sqlite:///../app/db.sqlite + before_script: + - pip install -r requirements.txt + - pip install pytest httpx + - mkdir -p app/cert + - openssl genrsa -out app/cert/instance.private.pem 2048 + - openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem + - cd test script: - - echo "Nothing to do ..." + - pytest main.py -deploy: +test:debian: + image: debian:bookworm-slim + stage: test + variables: + DEBIAN_FRONTEND: noninteractive + needs: + - job: build:debian + artifacts: true + before_script: + - apt-get update -qq && apt-get install -qq -y jq + script: + # test installation + - apt-get install -q -y ./build/build.deb --fix-missing + # copy example config from GitLab-CI-Variables + #- cat ${EXAMPLE_CONFIG} > /etc/fastapi-dls/env + # start service in background + - uvicorn --host 127.0.0.1 --port 443 + --app-dir /usr/share/fastapi-dls/app + --ssl-keyfile /etc/fastapi-dls/webserver.key + --ssl-certfile /opt/fastapi-dls/webserver.crt + --proxy-headers & + - FASTAPI_DLS_PID=$! + - echo "Started service with pid $FASTAPI_DLS_PID" + # testing service + - if [ "`curl --insecure -s https://127.0.0.1/status | jq .status`" != "up" ]; then echo "Success"; else "Error"; fi + # cleanup + - kill $FASTAPI_DLS_PID + - apt-get purge -qq -y fastapi-dls + - apt-get autoremove -qq -y && apt-get clean -qq + +deploy:docker: stage: deploy rules: - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH @@ -41,3 +108,44 @@ deploy: - docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest - docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION} - docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest + +deploy:debian: + # doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package + image: debian:bookworm-slim + stage: deploy +# rules: +# - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + needs: + - job: build:debian + artifacts: true + before_script: + - apt-get update -qq && apt-get install -qq -y curl lsb-release + # create distribution initial + - CODENAME=`lsb_release -cs` + # create repo if not exists + - 'if [ "`curl -s -o /dev/null -w "%{http_code}" --header "JOB-TOKEN: $CI_JOB_TOKEN" -s ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/debian_distributions/${CODENAME}/key.asc`" != "200" ]; then curl --request POST --header "JOB-TOKEN: $CI_JOB_TOKEN" "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/debian_distributions?codename=${CODENAME}"; fi' + script: + # Naming format: _-_.deb + # Version is the version number of the app being packaged + # Release number is the version number of the *packaging* itself. + # The release number might increment if the package maintainer + # updated the packaging, while the version number of the application + # being packaged did not change. + - BUILD_NAME=build/build.deb # inherited by build-stage + - PACKAGE_NAME=`dpkg -I ${BUILD_NAME} | grep "Package:" | awk '{ print $2 }'` + - PACKAGE_VERSION=`dpkg -I ${BUILD_NAME} | grep "Version:" | awk '{ print $2 }'` + - PACKAGE_ARCH=amd64 + #- EXPORT_NAME="${PACKAGE_NAME}_${PACKAGE_VERSION}-0_${PACKAGE_ARCH}.deb" + - EXPORT_NAME="${PACKAGE_NAME}_${PACKAGE_VERSION}_${PACKAGE_ARCH}.deb" + - mv ${BUILD_NAME} ${EXPORT_NAME} + - 'echo "PACKAGE_NAME: ${PACKAGE_NAME}"' + - 'echo "PACKAGE_VERSION: ${PACKAGE_VERSION}"' + - 'echo "PACKAGE_ARCH: ${PACKAGE_ARCH}"' + - 'echo "EXPORT_NAME: ${EXPORT_NAME}"' + # https://docs.gitlab.com/14.3/ee/user/packages/debian_repository/index.html + - URL="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/debian/${EXPORT_NAME}" + - 'echo "URL: ${URL}"' + #- 'curl --request PUT --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} ${URL}' + # using generic-package-registry until debian-registry is GA + # https://docs.gitlab.com/ee/user/packages/generic_packages/index.html#publish-a-generic-package-by-using-cicd + - 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"' diff --git a/DEBIAN/conffiles b/DEBIAN/conffiles new file mode 100644 index 0000000..008d731 --- /dev/null +++ b/DEBIAN/conffiles @@ -0,0 +1 @@ +/etc/fastapi-dls/env diff --git a/DEBIAN/control b/DEBIAN/control new file mode 100644 index 0000000..01db5e9 --- /dev/null +++ b/DEBIAN/control @@ -0,0 +1,9 @@ +Package: fastapi-dls +Version: 1.0.0 +Architecture: all +Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de +Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-jose, python3-sqlalchemy, python3-pycryptodome, python3-markdown, uvicorn, openssl +Recommends: curl +Installed-Size: 10240 +Homepage: https://git.collinwebdesigns.de/oscar.krause/fastapi-dls +Description: Minimal Delegated License Service (DLS). diff --git a/DEBIAN/postinst b/DEBIAN/postinst new file mode 100644 index 0000000..2311b35 --- /dev/null +++ b/DEBIAN/postinst @@ -0,0 +1,101 @@ +#!/bin/bash + +WORKING_DIR=/usr/share/fastapi-dls +CONFIG_DIR=/etc/fastapi-dls + +echo "> Create config directory ..." +mkdir -p $CONFIG_DIR + +echo "> Install service ..." +cat </etc/systemd/system/fastapi-dls.service +[Unit] +Description=Service for fastapi-dls +After=network.target + +[Service] +User=www-data +Group=www-data +AmbientCapabilities=CAP_NET_BIND_SERVICE +WorkingDirectory=$WORKING_DIR/app +EnvironmentFile=$CONFIG_DIR/env +ExecStart=uvicorn main:app \\ + --env-file /etc/fastapi-dls/env \\ + --host \$DLS_URL --port \$DLS_PORT \\ + --app-dir $WORKING_DIR/app \\ + --ssl-keyfile /etc/fastapi-dls/webserver.key \\ + --ssl-certfile /etc/fastapi-dls/webserver.crt \\ + --proxy-headers +Restart=always +KillSignal=SIGQUIT +Type=simple +NotifyAccess=all + +[Install] +WantedBy=multi-user.target + +EOF + +systemctl daemon-reload + +if [[ ! -f $CONFIG_DIR/env ]]; then + echo "> Writing initial config ..." + touch $CONFIG_DIR/env + cat <$CONFIG_DIR/env +DLS_URL=127.0.0.1 +DLS_PORT=443 +LEASE_EXPIRE_DAYS=90 +DATABASE=sqlite:///$CONFIG_DIR/db.sqlite +INSTANCE_KEY_RSA=$CONFIG_DIR/instance.private.pem +INSTANCE_KEY_PUB=$CONFIG_DIR/instance.public.pem + +EOF +fi + +echo "> Create dls-instance keypair ..." +openssl genrsa -out $CONFIG_DIR/instance.private.pem 2048 +openssl rsa -in $CONFIG_DIR/instance.private.pem -outform PEM -pubout -out $CONFIG_DIR/instance.public.pem + +while true; do + read -p "> Do you wish to create self-signed webserver certificate? [Y/n]" yn + yn=${yn:-y} # ${parameter:-word} If parameter is unset or null, the expansion of word is substituted. Otherwise, the value of parameter is substituted. + case $yn in + [Yy]*) + openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $CONFIG_DIR/webserver.key -out $CONFIG_DIR/webserver.crt + break + ;; + [Nn]*) break ;; + *) echo "Please answer [y] or [n]." ;; + esac +done + +if [[ -f $CONFIG_DIR/webserver.key ]]; then + echo "> Starting service ..." + systemctl start fastapi-dls.service + + if [ -x "$(command -v curl)" ]; then + echo "> Testing API ..." + source $CONFIG_DIR/env + curl --insecure -X GET https://$DLS_URL:$DLS_PORT/status + else + echo "> Testing API failed, curl not available. Please test manually!" + fi +fi + +chown -R www-data:www-data $CONFIG_DIR +chown -R www-data:www-data $WORKING_DIR + +cat < Removing service file." + rm /etc/systemd/system/fastapi-dls.service +fi + +# todo diff --git a/DEBIAN/prerm b/DEBIAN/prerm new file mode 100755 index 0000000..296c995 --- /dev/null +++ b/DEBIAN/prerm @@ -0,0 +1,5 @@ +#!/bin/bash + +echo -e "> Starting uninstallation of 'fastapi-dls'!" + +# todo diff --git a/README.md b/README.md index e729b89..095f975 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,13 @@ Minimal Delegated License Service (DLS). This service can be used without internet connection. Only the clients need a connection to this service on configured port. +[[_TOC_]] + +## ToDo's + +- migrate from `fastapi` to `flask` +- Support http mode for using external https proxy (disable uvicorn ssl for using behind proxy) + ## Endpoints ### `GET /` @@ -35,14 +42,14 @@ Generate client token, (see [installation](#installation)). There are some more internal api endpoints for handling authentication and lease process. -# Setup +# Setup (Service) ## Docker Docker-Images are available here: - [Docker-Hub](https://hub.docker.com/repository/docker/collinwebdesigns/fastapi-dls): `collinwebdesigns/fastapi-dls:latest` -- GitLab-Registry: `registry.git.collinwebdesigns.de/oscar.krause/fastapi-dls/main:latest` +- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry): `registry.git.collinwebdesigns.de/oscar.krause/fastapi-dls/main:latest` **Run this on the Docker-Host** @@ -91,7 +98,7 @@ volumes: dls-db: ``` -## Debian +## Debian/Ubuntu (manual method using `git clone`) Tested on `Debian 11 (bullseye)`, Ubuntu may also work. @@ -112,6 +119,7 @@ python3 -m venv venv source venv/bin/activate pip install -r requirements.txt deactivate +chown -R www-data:www-data $WORKING_DIR ``` **Create keypair and webserver certificate** @@ -125,29 +133,28 @@ openssl genrsa -out $WORKING_DIR/instance.private.pem 2048 openssl rsa -in $WORKING_DIR/instance.private.pem -outform PEM -pubout -out $WORKING_DIR/instance.public.pem # create ssl certificate for integrated webserver (uvicorn) - because clients rely on ssl openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $WORKING_DIR/webserver.key -out $WORKING_DIR/webserver.crt +chown -R www-data:www-data $WORKING_DIR ``` **Test Service** +This is only to test whether the service starts successfully. + ```shell cd /opt/fastapi-dls/app -/opt/fastapi-dls/venv/bin/uvicorn main:app \ - --host 127.0.0.1 --port 443 \ - --app-dir /opt/fastapi-dls/app \ - --ssl-keyfile /opt/fastapi-dls/app/cert/webserver.key \ - --ssl-certfile /opt/fastapi-dls/app/cert/webserver.crt \ - --proxy-headers +su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app" ``` **Create config file** ```shell -cat < /etc/fastapi-dls.env +cat </etc/fastapi-dls/env DLS_URL=127.0.0.1 DLS_PORT=443 LEASE_EXPIRE_DAYS=90 DATABASE=sqlite:////opt/fastapi-dls/app/db.sqlite -EOF + +EOF ``` **Create service** @@ -161,43 +168,97 @@ After=network.target [Service] User=www-data Group=www-data +AmbientCapabilities=CAP_NET_BIND_SERVICE WorkingDirectory=/opt/fastapi-dls/app -ExecStart=/opt/fastapi-dls/venv/bin/uvicorn \ - --host $DLS_URL --port $DLS_PORT \ - --app-dir /opt/fastapi-dls/app \ - --ssl-keyfile /opt/fastapi-dls/app/cert/webserver.key \ - --ssl-certfile /opt/fastapi-dls/app/cert/webserver.crt \ +EnvironmentFile=/etc/fastapi-dls/env +ExecStart=/opt/fastapi-dls/venv/bin/uvicorn main:app \\ + --env-file /etc/fastapi-dls/env \\ + --host \$DLS_URL --port \$DLS_PORT \\ + --app-dir /opt/fastapi-dls/app \\ + --ssl-keyfile /opt/fastapi-dls/app/cert/webserver.key \\ + --ssl-certfile /opt/fastapi-dls/app/cert/webserver.crt \\ --proxy-headers -EnvironmentFile=/etc/fastapi-dls.env Restart=always KillSignal=SIGQUIT -Type=notify -StandardError=syslog +Type=simple NotifyAccess=all [Install] WantedBy=multi-user.target + EOF ``` Now you have to run `systemctl daemon-reload`. After that you can start service -with `systemctl start fastapi-dls.service`. +with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`. + +## Debian/Ubuntu (using `dpkg`) + +Packages are available here: + +- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages) + +Successful tested with: + +- Debian 12 (Bookworm) +- Ubuntu 22.10 (Kinetic Kudu) + +**Run this on your server instance** + +First go to [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages) and select your +version. Then you have to copy the download link of the `fastapi-dls_X.Y.Z_amd64.deb` asset. + +```shell +apt-get update +FILENAME=/opt/fastapi-dls.deb +wget -O $FILENAME +dpkg -i $FILENAME +apt-get install -f --fix-missing +``` + +Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`. + +## Let's Encrypt Certificate + +If you're using installation via docker, you can use `traefik`. Please refer to their documentation. + +Note that port 80 must be accessible, and you have to install `socat` if you're using `standalone` mode. + +```shell +acme.sh --issue -d example.com \ + --cert-file /etc/fastapi-dls/webserver.donotuse.crt \ + --key-file /etc/fastapi-dls/webserver.key \ + --fullchain-file /etc/fastapi-dls/webserver.crt \ + --reloadcmd "systemctl restart fastapi-dls.service" +``` + +After first success you have to replace `--issue` with `--renew`. # Configuration -| Variable | Default | Usage | -|---------------------|-----------------------|---------------------------------------------------------------------------------------| -| `DEBUG` | `false` | Toggles `fastapi` debug mode | -| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable | -| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable | -| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days | -| `DATABASE` | `sqlite:///db.sqlite` | See [official dataset docs](https://dataset.readthedocs.io/en/latest/quickstart.html) | -| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) | +| Variable | Default | Usage | +|---------------------|----------------------------------------|---------------------------------------------------------------------------------------| +| `DEBUG` | `false` | Toggles `fastapi` debug mode | +| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable | +| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable | +| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days | +| `DATABASE` | `sqlite:///db.sqlite` | See [official dataset docs](https://dataset.readthedocs.io/en/latest/quickstart.html) | +| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) | +| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid | +| `INSTANCE_REF` | `00000000-0000-0000-0000-000000000000` | Instance identification uuid | +| `INSTANCE_KEY_RSA` | `/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs | +| `INSTANCE_KEY_PUB` | `/cert/instance.public.pem` | Site-wide public key | -# Installation +# Setup (Client) **The token file has to be copied! It's not enough to C&P file contents, because there can be special characters.** +Successfully tested with this package versions: + +- `14.3` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `513.91`) +- `14.4` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `514.08`) +- `15.0` (Linux-Host: `525.60.12`, Linux-Guest: `525.60.13`, Windows-Guest: `527.41`) + ## Linux ```shell @@ -225,10 +286,39 @@ Logs are available in `C:\Users\Public\Documents\Nvidia\LoggingLog.NVDisplay.Con ## Linux -Currently, there are no known issues. +### `uvicorn.error:Invalid HTTP request received.` + +This message can be ignored. + +- Ref. https://github.com/encode/uvicorn/issues/441 + +``` +WARNING:uvicorn.error:Invalid HTTP request received. +Traceback (most recent call last): + File "/usr/lib/python3/dist-packages/uvicorn/protocols/http/h11_impl.py", line 129, in handle_events + event = self.conn.next_event() + File "/usr/lib/python3/dist-packages/h11/_connection.py", line 485, in next_event + exc._reraise_as_remote_protocol_error() + File "/usr/lib/python3/dist-packages/h11/_util.py", line 77, in _reraise_as_remote_protocol_error + raise self + File "/usr/lib/python3/dist-packages/h11/_connection.py", line 467, in next_event + event = self._extract_next_receive_event() + File "/usr/lib/python3/dist-packages/h11/_connection.py", line 409, in _extract_next_receive_event + event = self._reader(self._receive_buffer) + File "/usr/lib/python3/dist-packages/h11/_readers.py", line 84, in maybe_read_from_IDLE_client + raise LocalProtocolError("no request line received") +h11._util.RemoteProtocolError: no request line received +``` ## Windows +### Required cipher on Windows Guests (e.g. managed by domain controller with GPO) + +It is required to enable `SHA1` (`TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA_P521`) +in [windows cipher suite](https://learn.microsoft.com/en-us/windows-server/security/tls/manage-tls). + +### Multiple Display Container LS Instances + On Windows on some machines there are running two or more instances of `NVIDIA Display Container LS`. This causes a problem on licensing flow. As you can see in the logs below, there are two lines with `NLS initialized`, each prefixed with `<1>` and `<2>`. So it is possible, that *daemon 1* fetches a valid license through dls-service, and *daemon 2* diff --git a/app/main.py b/app/main.py index 08c0310..b21c8c1 100644 --- a/app/main.py +++ b/app/main.py @@ -17,9 +17,17 @@ from jose import jws, jwk, jwt from jose.constants import ALGORITHMS from starlette.middleware.cors import CORSMiddleware from starlette.responses import StreamingResponse, JSONResponse, HTMLResponse -import dataset -from Crypto.PublicKey import RSA -from Crypto.PublicKey.RSA import RsaKey +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +try: + # Crypto | Cryptodome on Debian + from Crypto.PublicKey import RSA + from Crypto.PublicKey.RSA import RsaKey +except ModuleNotFoundError: + from Cryptodome.PublicKey import RSA + from Cryptodome.PublicKey.RSA import RsaKey +from orm import Origin, Lease, init as db_init logger = logging.getLogger() load_dotenv('../version.env') @@ -45,21 +53,22 @@ __details = dict( version=VERSION, ) -app, db = FastAPI(**__details), dataset.connect(str(getenv('DATABASE', 'sqlite:///db.sqlite'))) - -TOKEN_EXPIRE_DELTA = relativedelta(hours=1) # days=1 -LEASE_EXPIRE_DELTA = relativedelta(days=int(getenv('LEASE_EXPIRE_DAYS', 90))) +app, db = FastAPI(**__details), create_engine(str(getenv('DATABASE', 'sqlite:///db.sqlite'))) +db_init(db) DLS_URL = str(getenv('DLS_URL', 'localhost')) DLS_PORT = int(getenv('DLS_PORT', '443')) -SITE_KEY_XID = getenv('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000') -INSTANCE_KEY_RSA = load_key(join(dirname(__file__), 'cert/instance.private.pem')) -INSTANCE_KEY_PUB = load_key(join(dirname(__file__), 'cert/instance.public.pem')) +SITE_KEY_XID = str(getenv('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000')) +INSTANCE_REF = str(getenv('INSTANCE_REF', '00000000-0000-0000-0000-000000000000')) +INSTANCE_KEY_RSA = load_key(str(getenv('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem')))) +INSTANCE_KEY_PUB = load_key(str(getenv('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem')))) +TOKEN_EXPIRE_DELTA = relativedelta(hours=1) # days=1 +LEASE_EXPIRE_DELTA = relativedelta(days=int(getenv('LEASE_EXPIRE_DAYS', 90))) CORS_ORIGINS = getenv('CORS_ORIGINS').split(',') if (getenv('CORS_ORIGINS')) else f'https://{DLS_URL}' # todo: prevent static https jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256) -jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS512) +jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256) app.debug = DEBUG app.add_middleware( @@ -93,13 +102,17 @@ async def status(request: Request): @app.get('/-/origins') async def _origins(request: Request): - response = list(map(lambda x: jsonable_encoder(x), db['origin'].all())) + session = sessionmaker(bind=db)() + response = list(map(lambda x: jsonable_encoder(x), session.query(Origin).all())) + session.close() return JSONResponse(response) @app.get('/-/leases') async def _leases(request: Request): - response = list(map(lambda x: jsonable_encoder(x), db['lease'].all())) + session = sessionmaker(bind=db)() + response = list(map(lambda x: jsonable_encoder(x), session.query(Lease).all())) + session.close() return JSONResponse(response) @@ -109,15 +122,6 @@ async def client_token(): cur_time = datetime.utcnow() exp_time = cur_time + relativedelta(years=12) - service_instance_public_key_configuration = { - "service_instance_public_key_me": { - "mod": hex(INSTANCE_KEY_PUB.public_key().n)[2:], - "exp": INSTANCE_KEY_PUB.public_key().e, - }, - "service_instance_public_key_pem": INSTANCE_KEY_PUB.export_key().decode('utf-8'), - "key_retention_mode": "LATEST_ONLY" - } - payload = { "jti": str(uuid4()), "iss": "NLS Service Instance", @@ -129,7 +133,7 @@ async def client_token(): "scope_ref_list": [str(uuid4())], "fulfillment_class_ref_list": [], "service_instance_configuration": { - "nls_service_instance_ref": "00000000-0000-0000-0000-000000000000", + "nls_service_instance_ref": INSTANCE_REF, "svc_port_set_list": [ { "idx": 0, @@ -139,7 +143,14 @@ async def client_token(): ], "node_url_list": [{"idx": 0, "url": DLS_URL, "url_qr": DLS_URL, "svc_port_set_idx": 0}] }, - "service_instance_public_key_configuration": service_instance_public_key_configuration, + "service_instance_public_key_configuration": { + "service_instance_public_key_me": { + "mod": hex(INSTANCE_KEY_PUB.public_key().n)[2:], + "exp": int(INSTANCE_KEY_PUB.public_key().e), + }, + "service_instance_public_key_pem": INSTANCE_KEY_PUB.export_key().decode('utf-8'), + "key_retention_mode": "LATEST_ONLY" + }, } content = jws.sign(payload, key=jwt_encode_key, headers=None, algorithm=ALGORITHMS.RS256) @@ -155,21 +166,20 @@ async def client_token(): # {"candidate_origin_ref":"00112233-4455-6677-8899-aabbccddeeff","environment":{"fingerprint":{"mac_address_list":["ff:ff:ff:ff:ff:ff"]},"hostname":"my-hostname","ip_address_list":["192.168.178.123","fe80::","fe80::1%enp6s18"],"guest_driver_version":"510.85.02","os_platform":"Debian GNU/Linux 11 (bullseye) 11","os_version":"11 (bullseye)"},"registration_pending":false,"update_pending":false} @app.post('/auth/v1/origin') async def auth_v1_origin(request: Request): - j = json.loads((await request.body()).decode('utf-8')) + j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow() origin_ref = j['candidate_origin_ref'] logging.info(f'> [ origin ]: {origin_ref}: {j}') - data = dict( + data = Origin( origin_ref=origin_ref, hostname=j['environment']['hostname'], guest_driver_version=j['environment']['guest_driver_version'], os_platform=j['environment']['os_platform'], os_version=j['environment']['os_version'], ) - db['origin'].upsert(data, ['origin_ref']) + Origin.create_or_update(db, data) - cur_time = datetime.utcnow() response = { "origin_ref": origin_ref, "environment": j['environment'], @@ -183,17 +193,43 @@ async def auth_v1_origin(request: Request): return JSONResponse(response) +# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py +# { "environment" : { "guest_driver_version" : "guest_driver_version", "hostname" : "myhost", "ip_address_list" : [ "192.168.1.129" ], "os_version" : "os_version", "os_platform" : "os_platform", "fingerprint" : { "mac_address_list" : [ "e4:b9:7a:e5:7b:ff" ] }, "host_driver_version" : "host_driver_version" }, "origin_ref" : "00112233-4455-6677-8899-aabbccddeeff" } +@app.post('/auth/v1/origin/update') +async def auth_v1_origin_update(request: Request): + j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow() + + origin_ref = j['origin_ref'] + logging.info(f'> [ update ]: {origin_ref}: {j}') + + data = Origin( + origin_ref=origin_ref, + hostname=j['environment']['hostname'], + guest_driver_version=j['environment']['guest_driver_version'], + os_platform=j['environment']['os_platform'], os_version=j['environment']['os_version'], + ) + + Origin.create_or_update(db, data) + + response = { + "environment": j['environment'], + "prompts": None, + "sync_timestamp": cur_time.isoformat() + } + + return JSONResponse(response) + + # venv/lib/python3.9/site-packages/nls_services_auth/test/test_auth_controller.py # venv/lib/python3.9/site-packages/nls_core_auth/auth.py - CodeResponse # {"code_challenge":"...","origin_ref":"00112233-4455-6677-8899-aabbccddeeff"} @app.post('/auth/v1/code') async def auth_v1_code(request: Request): - j = json.loads((await request.body()).decode('utf-8')) + j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow() origin_ref = j['origin_ref'] logging.info(f'> [ code ]: {origin_ref}: {j}') - cur_time = datetime.utcnow() delta = relativedelta(minutes=15) expires = cur_time + delta @@ -222,7 +258,7 @@ async def auth_v1_code(request: Request): # {"auth_code":"...","code_verifier":"..."} @app.post('/auth/v1/token') async def auth_v1_token(request: Request): - j = json.loads((await request.body()).decode('utf-8')) + j, cur_time = json.loads((await request.body()).decode('utf-8')), datetime.utcnow() payload = jwt.decode(token=j['auth_code'], key=jwt_decode_key) origin_ref = payload['origin_ref'] @@ -232,7 +268,6 @@ async def auth_v1_token(request: Request): if payload['challenge'] != b64enc(sha256(j['code_verifier'].encode('utf-8')).digest()).rstrip(b'=').decode('utf-8'): raise HTTPException(status_code=401, detail='expected challenge did not match verifier') - cur_time = datetime.utcnow() access_expires_on = cur_time + TOKEN_EXPIRE_DELTA new_payload = { @@ -260,13 +295,12 @@ async def auth_v1_token(request: Request): # {'fulfillment_context': {'fulfillment_class_ref_list': []}, 'lease_proposal_list': [{'license_type_qualifiers': {'count': 1}, 'product': {'name': 'NVIDIA RTX Virtual Workstation'}}], 'proposal_evaluation_mode': 'ALL_OF', 'scope_ref_list': ['00112233-4455-6677-8899-aabbccddeeff']} @app.post('/leasing/v1/lessor') async def leasing_v1_lessor(request: Request): - j, token = json.loads((await request.body()).decode('utf-8')), get_token(request) + j, token, cur_time = json.loads((await request.body()).decode('utf-8')), get_token(request), datetime.utcnow() origin_ref = token['origin_ref'] scope_ref_list = j['scope_ref_list'] logging.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}') - cur_time = datetime.utcnow() lease_result_list = [] for scope_ref in scope_ref_list: expires = cur_time + LEASE_EXPIRE_DELTA @@ -284,8 +318,8 @@ async def leasing_v1_lessor(request: Request): } }) - data = dict(origin_ref=origin_ref, lease_ref=scope_ref, lease_created=cur_time, lease_expires=expires) - db['lease'].insert_ignore(data, ['origin_ref', 'lease_ref']) # todo: handle update + data = Lease(origin_ref=origin_ref, lease_ref=scope_ref, lease_created=cur_time, lease_expires=expires) + Lease.create_or_update(db, data) response = { "lease_result_list": lease_result_list, @@ -301,14 +335,13 @@ async def leasing_v1_lessor(request: Request): # venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql @app.get('/leasing/v1/lessor/leases') async def leasing_v1_lessor_lease(request: Request): - token = get_token(request) + token, cur_time = get_token(request), datetime.utcnow() origin_ref = token['origin_ref'] - active_lease_list = list(map(lambda x: x['lease_ref'], db['lease'].find(origin_ref=origin_ref))) + active_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref))) logging.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases') - cur_time = datetime.utcnow() response = { "active_lease_list": active_lease_list, "sync_timestamp": cur_time.isoformat(), @@ -321,15 +354,15 @@ async def leasing_v1_lessor_lease(request: Request): # venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py @app.put('/leasing/v1/lease/{lease_ref}') async def leasing_v1_lease_renew(request: Request, lease_ref: str): - token = get_token(request) + token, cur_time = get_token(request), datetime.utcnow() origin_ref = token['origin_ref'] logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}') - if db['lease'].count(origin_ref=origin_ref, lease_ref=lease_ref) == 0: + entity = Lease.find_by_origin_ref_and_lease_ref(db, origin_ref, lease_ref) + if entity is None: raise HTTPException(status_code=404, detail='requested lease not available') - cur_time = datetime.utcnow() expires = cur_time + LEASE_EXPIRE_DELTA response = { "lease_ref": lease_ref, @@ -340,29 +373,28 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str): "sync_timestamp": cur_time.isoformat(), } - data = dict(origin_ref=origin_ref, lease_ref=lease_ref, lease_expires=expires, lease_last_update=cur_time) - db['lease'].update(data, ['origin_ref', 'lease_ref']) + Lease.renew(db, entity, expires, cur_time) return JSONResponse(response) @app.delete('/leasing/v1/lessor/leases') async def leasing_v1_lessor_lease_remove(request: Request): - token = get_token(request) + token, cur_time = get_token(request), datetime.utcnow() origin_ref = token['origin_ref'] - released_lease_list = list(map(lambda x: x['lease_ref'], db['lease'].find(origin_ref=origin_ref))) - deletions = db['lease'].delete(origin_ref=origin_ref) + released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref))) + deletions = Lease.cleanup(db, origin_ref) logging.info(f'> [ remove ]: {origin_ref}: removed {deletions} leases') - cur_time = datetime.utcnow() response = { "released_lease_list": released_lease_list, "release_failure_list": None, "sync_timestamp": cur_time.isoformat(), "prompts": None } + return JSONResponse(response) diff --git a/app/orm.py b/app/orm.py new file mode 100644 index 0000000..697c720 --- /dev/null +++ b/app/orm.py @@ -0,0 +1,116 @@ +import datetime + +from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, UniqueConstraint, update, and_, delete, inspect +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.engine import Engine +from sqlalchemy.orm import sessionmaker + +Base = declarative_base() + + +class Origin(Base): + __tablename__ = "origin" + + origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4 + + hostname = Column(VARCHAR(length=256), nullable=True) + guest_driver_version = Column(VARCHAR(length=10), nullable=True) + os_platform = Column(VARCHAR(length=256), nullable=True) + os_version = Column(VARCHAR(length=256), nullable=True) + + def __repr__(self): + return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})' + + @staticmethod + def create_statement(engine: Engine): + from sqlalchemy.schema import CreateTable + return CreateTable(Origin.__table__).compile(engine) + + @staticmethod + def create_or_update(engine: Engine, origin: "Origin"): + session = sessionmaker(autocommit=True, autoflush=True, bind=engine)() + entity = session.query(Origin).filter(Origin.origin_ref == origin.origin_ref).first() + print(entity) + if entity is None: + session.add(origin) + else: + values = dict( + hostname=origin.hostname, + guest_driver_version=origin.guest_driver_version, + os_platform=origin.os_platform, + os_version=origin.os_version, + ) + session.execute(update(Origin).where(Origin.origin_ref == origin.origin_ref).values(**values)) + session.flush() + session.close() + + +class Lease(Base): + __tablename__ = "lease" + + origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref), primary_key=True, nullable=False, index=True) # uuid4 + lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4 + + lease_created = Column(DATETIME(), nullable=False) + lease_expires = Column(DATETIME(), nullable=False) + lease_updated = Column(DATETIME(), nullable=False) + + def __repr__(self): + return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})' + + @staticmethod + def create_statement(engine: Engine): + from sqlalchemy.schema import CreateTable + return CreateTable(Lease.__table__).compile(engine) + + @staticmethod + def create_or_update(engine: Engine, lease: "Lease"): + session = sessionmaker(autocommit=True, autoflush=True, bind=engine)() + entity = session.query(Lease).filter(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).first() + if entity is None: + if lease.lease_updated is None: + lease.lease_updated = lease.lease_created + session.add(lease) + else: + values = dict(lease_expires=lease.lease_expires, lease_updated=lease.lease_updated) + session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**values)) + session.flush() + session.close() + + @staticmethod + def find_by_origin_ref(engine: Engine, origin_ref: str) -> ["Lease"]: + session = sessionmaker(autocommit=True, autoflush=True, bind=engine)() + entities = session.query(Lease).filter(Lease.origin_ref == origin_ref).all() + session.close() + return entities + + @staticmethod + def find_by_origin_ref_and_lease_ref(engine: Engine, origin_ref: str, lease_ref: str) -> "Lease": + session = sessionmaker(autocommit=True, autoflush=True, bind=engine)() + entity = session.query(Lease).filter(and_(Lease.origin_ref == origin_ref, Lease.lease_ref == lease_ref)).first() + session.close() + return entity + + @staticmethod + def renew(engine: Engine, lease: "Lease", lease_expires: datetime.datetime, lease_updated: datetime.datetime): + session = sessionmaker(autocommit=True, autoflush=True, bind=engine)() + values = dict(lease_expires=lease.lease_expires, lease_updated=lease.lease_updated) + session.execute(update(Lease).where(and_(Lease.origin_ref == lease.origin_ref, Lease.lease_ref == lease.lease_ref)).values(**values)) + session.close() + + @staticmethod + def cleanup(engine: Engine, origin_ref: str) -> int: + session = sessionmaker(autocommit=True, autoflush=True, bind=engine)() + deletions = session.query(Lease).filter(Lease.origin_ref == origin_ref).delete() + session.close() + return deletions + + +def init(engine: Engine): + tables = [Origin, Lease] + db = inspect(engine) + session = sessionmaker(bind=engine)() + for table in tables: + if not db.dialect.has_table(engine.connect(), table.__tablename__): + session.execute(str(table.create_statement(engine))) + session.close() diff --git a/requirements.txt b/requirements.txt index 413b6d1..cceb6a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,6 @@ uvicorn[standard]==0.20.0 python-jose==3.3.0 pycryptodome==3.16.0 python-dateutil==2.8.2 -dataset==1.5.2 +sqlalchemy==1.4.45 markdown==3.4.1 python-dotenv==0.21.0 diff --git a/test/main.py b/test/main.py new file mode 100644 index 0000000..22c1c6b --- /dev/null +++ b/test/main.py @@ -0,0 +1,85 @@ +from uuid import uuid4 + +from jose import jwt +from starlette.testclient import TestClient +import sys + +# add relative path to use packages as they were in the app/ dir +sys.path.append('../') +sys.path.append('../app') + +from app import main + +client = TestClient(main.app) + +ORIGIN_REF = str(uuid4()) + + +def test_index(): + response = client.get('/') + assert response.status_code == 200 + + +def test_status(): + response = client.get('/status') + assert response.status_code == 200 + assert response.json()['status'] == 'up' + + +def test_client_token(): + response = client.get('/client-token') + assert response.status_code == 200 + + +def test_auth_v1_origin(): + payload = { + "registration_pending": False, + "environment": { + "guest_driver_version": "guest_driver_version", + "hostname": "myhost", + "ip_address_list": ["192.168.1.123"], + "os_version": "os_version", + "os_platform": "os_platform", + "fingerprint": {"mac_address_list": ["ff:ff:ff:ff:ff:ff"]}, + "host_driver_version": "host_driver_version" + }, + "update_pending": False, + "candidate_origin_ref": ORIGIN_REF, + } + + response = client.post('/auth/v1/origin', json=payload) + assert response.status_code == 200 + assert response.json()['origin_ref'] == ORIGIN_REF + + +def test_auth_v1_code(): + payload = { + "code_challenge": "0wmaiAMAlTIDyz4Fgt2/j0tXnGv72TYbbLs4ISRCZlY", + "origin_ref": ORIGIN_REF, + } + + response = client.post('/auth/v1/code', json=payload) + assert response.status_code == 200 + + payload = jwt.get_unverified_claims(token=response.json()['auth_code']) + assert payload['origin_ref'] == ORIGIN_REF + + +def test_auth_v1_token(): + pass + + +def test_leasing_v1_lessor(): + pass + + +def test_leasing_v1_lessor_lease(): + pass + + +def test_leasing_v1_lease_renew(): + pass + + +def test_leasing_v1_lessor_lease_remove(): + pass diff --git a/version.env b/version.env index f7fc9a2..624bade 100644 --- a/version.env +++ b/version.env @@ -1 +1 @@ -VERSION=0.6 +VERSION=1.0.0