forked from kompetenzinventar/ki-backend
Compare commits
119 Commits
Author | SHA1 | Date | |
---|---|---|---|
06caf796cd | |||
54326129e4 | |||
32bb81b69b | |||
fc8a06e562 | |||
b8b713c620 | |||
186ab9af43 | |||
0d77c8d637 | |||
72fda5df51 | |||
739ce0382b | |||
4c345ecc5f | |||
5882eeaf2e | |||
0512bc0dc3 | |||
319bf9b2f5 | |||
717454e205 | |||
496550a959 | |||
7ebf13edf3 | |||
5409bdeaee | |||
4064a65e63 | |||
afe22e13c1 | |||
e4d0a89053 | |||
78bdd796e0 | |||
c9f83e80b3 | |||
5369de67d8 | |||
54c5246f0a | |||
9a95baf106 | |||
338380ed93 | |||
3d128d771c | |||
6b7517fc7a | |||
5ceb361da3 | |||
4379a06f26 | |||
c43f375e4b | |||
621a90b8ab | |||
033dee7836 | |||
ca81e8bf70 | |||
d507a20a93 | |||
3f2c23c386 | |||
b46ac5e379 | |||
fa4429b6ef | |||
7a0f2434db | |||
8c3fe3fe7d | |||
56ade6de68 | |||
28cf714217 | |||
9ff56f6676 | |||
2412df4960 | |||
469ef511d6 | |||
47d2c94b79 | |||
384dd82454 | |||
b0dcfacd25 | |||
86edb246bf | |||
9424e21edc | |||
f02efab07a | |||
08f1104942 | |||
8dde142f38 | |||
d1b1636aa2 | |||
3fcd1fa20e | |||
8394400e96 | |||
843050f923 | |||
812913ffe2 | |||
dee80c7e14 | |||
de60ec0d46 | |||
6c6fcea81c | |||
166e0d40c6 | |||
c9b5ab62ed | |||
c0005100b4 | |||
d614039cdd | |||
316236a7e5 | |||
c15faabd6d | |||
53eab74e60 | |||
82d908193c | |||
6620a6819a | |||
be6fec18ab | |||
9ebae03550 | |||
763a6efc9f | |||
6f04d23e6c | |||
155ddc556c | |||
a5a85e6032 | |||
6a3458a596 | |||
97be8f4667 | |||
776803fc96 | |||
702f4968f6 | |||
c1285153ef | |||
fdc81844b5 | |||
b804c22a93 | |||
9a7a9379e2 | |||
5b707ad294 | |||
c05f040313 | |||
68bf505cd0 | |||
2ac03d0c26 | |||
6b46ea5516 | |||
4798263c27 | |||
|
274f984994 | ||
|
37f57eadea | ||
d7f4acf251 | |||
c65ef4a95c | |||
f3840f18b7 | |||
da46d01765 | |||
0f9f807256 | |||
66294cd52f | |||
|
d4a5c8f5eb | ||
|
5e4d6d464d | ||
|
6e77647eb9 | ||
f7278bf7ea | |||
cf1a5a532c | |||
0fd04d4797 | |||
a5bd954bb5 | |||
881c3d3038 | |||
d60acd169b | |||
f1ecbadf05 | |||
67cb8c9152 | |||
f7e058d387 | |||
695c88e159 | |||
1360b4c738 | |||
689a5ba33e | |||
19aebcc327 | |||
0fcd407006 | |||
dea781cc29 | |||
be9bc8b5cc | |||
4fab7d7cda | |||
|
f131ee335c |
13
.dockerignore
Normal file
13
.dockerignore
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# SPDX-FileCopyrightText: WTF Kooperative eG <https://wtf-eg.de/>
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
|
*
|
||||||
|
!Pipfile
|
||||||
|
!Pipfile.lock
|
||||||
|
!data/
|
||||||
|
!ki/
|
||||||
|
!LICENSES/
|
||||||
|
!migrations/
|
||||||
|
!app.py
|
||||||
|
!run_prod.py
|
142
.drone.yml
142
.drone.yml
@ -4,31 +4,125 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
type: docker
|
type: docker
|
||||||
name: default
|
name: qa
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- pull_request
|
||||||
|
branch:
|
||||||
|
- main
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: qa
|
- name: install-lint-test
|
||||||
image: registry.wtf-eg.net/ki-backend-builder:1.0.0
|
image: python:3.8.20-alpine@sha256:3d93b1f77efce339aa77db726656872517b0d67837989aa7c4b35bd5ae7e81ba
|
||||||
commands:
|
env:
|
||||||
- pipenv install --dev
|
PYROOT: '/pyroot'
|
||||||
- pipenv run flake8
|
PYTHONUSERBASE: '/pyroot'
|
||||||
- pipenv run reuse lint
|
commands:
|
||||||
- pipenv run python -m unittest discover ki
|
- apk add --no-cache gcc g++ musl-dev python3-dev
|
||||||
|
- pip3 install pipenv
|
||||||
|
- pipenv verify
|
||||||
|
- pipenv install --dev
|
||||||
|
- pipenv run flake8
|
||||||
|
- pipenv run reuse lint
|
||||||
|
- pipenv run python -m unittest discover ki
|
||||||
|
- name: docker-dry-run
|
||||||
|
image: plugins/docker:20.18.4@sha256:a8d3d86853c721492213264815f1d00d3ed13f42f5c1855a02f47fa4d5f1e042
|
||||||
|
settings:
|
||||||
|
registry: git.wtf-eg.de
|
||||||
|
repo: git.wtf-eg.de/kompetenzinventar/backend
|
||||||
|
target: ki-backend
|
||||||
|
dry_run: true
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- pull_request
|
||||||
|
|
||||||
- name: docker-publish
|
---
|
||||||
image: plugins/docker
|
kind: pipeline
|
||||||
settings:
|
type: docker
|
||||||
registry: registry.wtf-eg.net
|
name: build
|
||||||
repo: registry.wtf-eg.net/ki-backend
|
|
||||||
target: ki-backend
|
|
||||||
auto_tag: true
|
|
||||||
username:
|
|
||||||
from_secret: "docker_username"
|
|
||||||
password:
|
|
||||||
from_secret: "docker_password"
|
|
||||||
when:
|
|
||||||
branch:
|
|
||||||
- main
|
|
||||||
|
|
||||||
image_pull_secrets:
|
trigger:
|
||||||
- dockerconfig
|
event:
|
||||||
|
- push
|
||||||
|
branch:
|
||||||
|
- main
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- qa
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: docker-publish
|
||||||
|
image: plugins/docker:20.18.4@sha256:a8d3d86853c721492213264815f1d00d3ed13f42f5c1855a02f47fa4d5f1e042
|
||||||
|
settings:
|
||||||
|
registry: git.wtf-eg.de
|
||||||
|
repo: git.wtf-eg.de/kompetenzinventar/backend
|
||||||
|
target: ki-backend
|
||||||
|
auto_tag: true
|
||||||
|
username:
|
||||||
|
from_secret: "docker_username"
|
||||||
|
password:
|
||||||
|
from_secret: "docker_password"
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
name: deploy
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
branch:
|
||||||
|
- main
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: deploy-dev
|
||||||
|
image: appleboy/drone-ssh:1.7.5@sha256:995677e073454912f26d4c0fdd2f9df2e1f5a30d6603d3f2ece667311b6babb3
|
||||||
|
settings:
|
||||||
|
host:
|
||||||
|
- dev01.wtf-eg.net
|
||||||
|
username: drone_deployment
|
||||||
|
key:
|
||||||
|
from_secret: "dev01_deployment_key"
|
||||||
|
command_timeout: 2m
|
||||||
|
script:
|
||||||
|
- echo "Executing forced command..."
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
name: tag-release
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
event:
|
||||||
|
- tag
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: install-lint-test
|
||||||
|
image: python:3.8.20-alpine@sha256:3d93b1f77efce339aa77db726656872517b0d67837989aa7c4b35bd5ae7e81ba
|
||||||
|
env:
|
||||||
|
PYROOT: '/pyroot'
|
||||||
|
PYTHONUSERBASE: '/pyroot'
|
||||||
|
commands:
|
||||||
|
- apk add --no-cache gcc g++ musl-dev python3-dev
|
||||||
|
- pip3 install pipenv
|
||||||
|
- pipenv install --dev
|
||||||
|
- pipenv run flake8
|
||||||
|
- pipenv run reuse lint
|
||||||
|
- pipenv run python -m unittest discover ki
|
||||||
|
- name: docker-publish
|
||||||
|
image: plugins/docker:20.18.4@sha256:a8d3d86853c721492213264815f1d00d3ed13f42f5c1855a02f47fa4d5f1e042
|
||||||
|
settings:
|
||||||
|
registry: git.wtf-eg.de
|
||||||
|
repo: git.wtf-eg.de/kompetenzinventar/backend
|
||||||
|
target: ki-backend
|
||||||
|
auto_tag: true
|
||||||
|
username:
|
||||||
|
from_secret: "docker_username"
|
||||||
|
password:
|
||||||
|
from_secret: "docker_password"
|
||||||
|
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
|||||||
|
3.8.20
|
12
.reuse/dep5
12
.reuse/dep5
@ -1,12 +0,0 @@
|
|||||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
|
||||||
Upstream-Name: Kompetenzinventar
|
|
||||||
Upstream-Contact: Michael Weimann <mail@michael-weimann.eu>
|
|
||||||
Source: https://git.wtf-eg.de/kompetenzinventar/ki-backend
|
|
||||||
|
|
||||||
Files: data/imgs/flags/*
|
|
||||||
Copyright: 2013 Panayiotis Lipiridis <https://flagicons.lipis.dev/>
|
|
||||||
License: MIT
|
|
||||||
|
|
||||||
Files: Pipfile.lock migrations/*
|
|
||||||
Copyright: WTF Kooperative eG <https://wtf-eg.de/>
|
|
||||||
License: AGPL-3.0-or-later
|
|
5
.yapfignore
Normal file
5
.yapfignore
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# SPDX-FileCopyrightText: WTF Kooperative eG <https://wtf-eg.de/>
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
|
migrations/*.py
|
22
Dockerfile
22
Dockerfile
@ -2,7 +2,17 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
FROM registry.wtf-eg.net/ki-backend-builder:1.0.0 as builder
|
FROM python:3.8.20-alpine@sha256:3d93b1f77efce339aa77db726656872517b0d67837989aa7c4b35bd5ae7e81ba AS builder
|
||||||
|
|
||||||
|
ENV PYROOT=/pyroot
|
||||||
|
ENV PYTHONUSERBASE=$PYROOT
|
||||||
|
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
gcc \
|
||||||
|
g++ \
|
||||||
|
musl-dev \
|
||||||
|
python3-dev && \
|
||||||
|
pip3 install pipenv
|
||||||
|
|
||||||
COPY Pipfile* ./
|
COPY Pipfile* ./
|
||||||
|
|
||||||
@ -10,7 +20,10 @@ RUN PIP_USER=1 PIP_IGNORE_INSTALLED=1 pipenv install --system --deploy --ignore-
|
|||||||
RUN pip3 uninstall --yes pipenv
|
RUN pip3 uninstall --yes pipenv
|
||||||
|
|
||||||
|
|
||||||
FROM registry.wtf-eg.net/ki-backend-base:1.0.0 as ki-backend
|
FROM python:3.8.20-alpine@sha256:3d93b1f77efce339aa77db726656872517b0d67837989aa7c4b35bd5ae7e81ba AS ki-backend
|
||||||
|
|
||||||
|
ENV PYROOT=/pyroot
|
||||||
|
ENV PYTHONUSERBASE=$PYROOT
|
||||||
|
|
||||||
# Install six explicitly. Otherwise Python complains about it missing.
|
# Install six explicitly. Otherwise Python complains about it missing.
|
||||||
RUN pip3 install six
|
RUN pip3 install six
|
||||||
@ -22,4 +35,9 @@ WORKDIR /app
|
|||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.source=https://git.wtf-eg.de/kompetenzinventar/ki-backend.git
|
||||||
|
LABEL org.opencontainers.image.url=https://git.wtf-eg.de/kompetenzinventar/ki-backend
|
||||||
|
LABEL org.opencontainers.image.documentation=https://git.wtf-eg.de/kompetenzinventar/ki-backend#docker
|
||||||
|
LABEL org.opencontainers.image.vendor="WTF Kooperative eG"
|
||||||
|
|
||||||
CMD ["python3", "run_prod.py"]
|
CMD ["python3", "run_prod.py"]
|
||||||
|
29
Pipfile
29
Pipfile
@ -8,22 +8,23 @@ verify_ssl = true
|
|||||||
name = "pypi"
|
name = "pypi"
|
||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
flask = "~=2.0.1"
|
flask = "==2.3.3"
|
||||||
python-dotenv = "~=0.17.1"
|
python-dotenv = "==1.0.1"
|
||||||
flask-migrate = "~=3.0.1"
|
flask-migrate = "==4.0.7"
|
||||||
flask-sqlalchemy = "~=2.5.1"
|
flask-sqlalchemy = "==2.5.1"
|
||||||
sqlalchemy = "~=1.4.18"
|
sqlalchemy = "==1.4.54"
|
||||||
waitress = "~=2.0.0"
|
waitress = "==2.1.2"
|
||||||
pyyaml = "~=5.4.1"
|
pyyaml = "==6.0.2"
|
||||||
flask-cors = "~=3.0.10"
|
flask-cors = "==5.0.0"
|
||||||
ldap3 = "~=2.9"
|
ldap3 = "==2.9.1"
|
||||||
pymysql = "~=1.0.2"
|
pymysql = "==1.1.1"
|
||||||
|
werkzeug = "==2.3.8"
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
flake8 = "~=3.9.2"
|
flake8 = "==7.1.1"
|
||||||
yapf = "~=0.31.0"
|
yapf = "==0.40.2"
|
||||||
pre-commit = "~=2.13.0"
|
pre-commit = "==2.21.0"
|
||||||
reuse = "~=0.13.0"
|
reuse = "==4.0.3"
|
||||||
|
|
||||||
[requires]
|
[requires]
|
||||||
python_version = "3.8"
|
python_version = "3.8"
|
||||||
|
959
Pipfile.lock
generated
959
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
45
README.md
45
README.md
@ -9,6 +9,32 @@ SPDX-License-Identifier: AGPL-3.0-or-later
|
|||||||
[![Build Status](https://drone.wtf-eg.de/api/badges/kompetenzinventar/ki-backend/status.svg?ref=refs/heads/main)](https://drone.wtf-eg.de/kompetenzinventar/ki-backend)
|
[![Build Status](https://drone.wtf-eg.de/api/badges/kompetenzinventar/ki-backend/status.svg?ref=refs/heads/main)](https://drone.wtf-eg.de/kompetenzinventar/ki-backend)
|
||||||
[![REUSE status](https://api.reuse.software/badge/git.wtf-eg.de/kompetenzinventar/ki-backend)](https://api.reuse.software/info/git.wtf-eg.de/kompetenzinventar/ki-backend)
|
[![REUSE status](https://api.reuse.software/badge/git.wtf-eg.de/kompetenzinventar/ki-backend)](https://api.reuse.software/info/git.wtf-eg.de/kompetenzinventar/ki-backend)
|
||||||
|
|
||||||
|
## Über
|
||||||
|
|
||||||
|
Dieses Repo enthält das Backend des Projekts Kompentenzinventar - einer Webapplikation zur Erfassung von Userprofilen für die WTF eG.
|
||||||
|
|
||||||
|
Implementiert ist das Backend mit Flask.
|
||||||
|
|
||||||
|
### Mitmachen
|
||||||
|
|
||||||
|
Du kannst gerne bei der Entwicklung des Kompetenzinventars mitmachen.
|
||||||
|
|
||||||
|
- Fehler oder fehlende Funktionen erfassen. Bitte direkt über die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) in Gitea.
|
||||||
|
- Dokumentation oder Implementierung verbessern. Bitte forke hierzu das Projekt, branche von `main` ab und erstelle dann einen [Pull Request](https://git.wtf-eg.de/kompetenzinventar/ki-backend/pulls).
|
||||||
|
|
||||||
|
### Kommunikation
|
||||||
|
|
||||||
|
Folgende Kanäle gibt es für die Kommunikation über das Kompetenzinventar:
|
||||||
|
|
||||||
|
- Die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) im WTF Gitea.
|
||||||
|
- Den Bereich [AG Entwicklung](https://forum.wtf-eg.de/c/interna/ag-entwicklung/21) im WTF Forum.
|
||||||
|
- Einen Raum in Matrix. Zutritt per Einladung, frlan lädt ein, eine einfache PN im Forum reicht.
|
||||||
|
|
||||||
|
### Repos
|
||||||
|
|
||||||
|
* **[ki-backend](https://git.wtf-eg.de/kompetenzinventar/ki-backend)** (dieses Repo) enthält das Backend
|
||||||
|
* [ki-frontend](https://git.wtf-eg.de/kompetenzinventar/ki-frontend) enthält das Frontend
|
||||||
|
* Weitere Repositories befinden sich in der Gitea Organisation [Kompetenzinventar](https://git.wtf-eg.de/kompetenzinventar).
|
||||||
## Entwicklung
|
## Entwicklung
|
||||||
|
|
||||||
### Abhängigkeiten
|
### Abhängigkeiten
|
||||||
@ -196,25 +222,6 @@ docker-compose up
|
|||||||
|
|
||||||
Dann http://localhost:13337 aufrufen.
|
Dann http://localhost:13337 aufrufen.
|
||||||
|
|
||||||
### Workaround, falls der Zugriff auf registry.wtf-eg.net nicht möglich ist
|
|
||||||
|
|
||||||
Voraussetzung:
|
|
||||||
|
|
||||||
[ki-backend-docker](https://git.wtf-eg.de/kompetenzinventar/ki-backend-docker) muss parallel zum `ki-backend` ausgecheckt sein.
|
|
||||||
|
|
||||||
```
|
|
||||||
cd ki-backend-docker
|
|
||||||
docker build . --target base -t ki-backend-base
|
|
||||||
docker build . --target builder -t ki-backend-builder
|
|
||||||
```
|
|
||||||
|
|
||||||
Ändern der 2 Einträge im `Dockerfile` des `ki-backend`:
|
|
||||||
|
|
||||||
- registry.wtf-eg.net/ki-backend-builder:1.0.0 -> ki-backend-builder
|
|
||||||
- registry.wtf-eg.net/ki-backend-base:1.0.0 -> ki-backend-base
|
|
||||||
|
|
||||||
Danach sollte `docker-compose up` funktionieren.
|
|
||||||
|
|
||||||
## Lizenzen
|
## Lizenzen
|
||||||
|
|
||||||
Dieses Projekt erfüllt die [REUSE](https://reuse.software/) Spezifikation.
|
Dieses Projekt erfüllt die [REUSE](https://reuse.software/) Spezifikation.
|
||||||
|
24
REUSE.toml
Normal file
24
REUSE.toml
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# SPDX-FileCopyrightText: NONE
|
||||||
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
|
|
||||||
|
version = 1
|
||||||
|
SPDX-PackageName = "Kompetenzinventar Backend"
|
||||||
|
SPDX-PackageDownloadLocation = "https://git.wtf-eg.de/kompetenzinventar/ki-backend"
|
||||||
|
|
||||||
|
[[annotations]]
|
||||||
|
path = "data/imgs/flags/**"
|
||||||
|
precedence = "aggregate"
|
||||||
|
SPDX-FileCopyrightText = "2013 Panayiotis Lipiridis <https://flagicons.lipis.dev/>"
|
||||||
|
SPDX-License-Identifier = "MIT"
|
||||||
|
|
||||||
|
[[annotations]]
|
||||||
|
path = ["Pipfile.lock", "migrations/**"]
|
||||||
|
precedence = "aggregate"
|
||||||
|
SPDX-FileCopyrightText = "WTF Kooperative eG <https://wtf-eg.de/>"
|
||||||
|
SPDX-License-Identifier = "AGPL-3.0-or-later"
|
||||||
|
|
||||||
|
[[annotations]]
|
||||||
|
path = ["renovate.json", ".python-version"]
|
||||||
|
precedence = "aggregate"
|
||||||
|
SPDX-FileCopyrightText = "WTF Kooperative eG <https://wtf-eg.de/>"
|
||||||
|
SPDX-License-Identifier = "AGPL-3.0-or-later"
|
15
app.py
15
app.py
@ -8,22 +8,23 @@ import os
|
|||||||
from dotenv import load_dotenv, find_dotenv
|
from dotenv import load_dotenv, find_dotenv
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
from flask.logging import default_handler
|
|
||||||
from flask_migrate import Migrate
|
from flask_migrate import Migrate
|
||||||
from flask_sqlalchemy import SQLAlchemy
|
from flask_sqlalchemy import SQLAlchemy
|
||||||
from ldap3.utils.log import logger as ldap3_logger
|
|
||||||
from ldap3.utils.log import set_library_log_detail_level, BASIC
|
from ldap3.utils.log import set_library_log_detail_level, BASIC
|
||||||
|
|
||||||
load_dotenv(find_dotenv())
|
load_dotenv(find_dotenv())
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
loglevel = os.getenv("KI_LOGLEVEL", logging.WARNING)
|
loglevel = os.getenv("KI_LOGLEVEL", logging.WARNING)
|
||||||
loglevel = int(loglevel)
|
loglevel = int(loglevel)
|
||||||
app.logger.setLevel(loglevel)
|
app.logger.setLevel(loglevel)
|
||||||
logging.basicConfig(level=loglevel)
|
app.logger.propagate = False # do not forward messages to the root logger
|
||||||
|
logging.basicConfig(level=loglevel,
|
||||||
set_library_log_detail_level(BASIC)
|
format='[%(asctime)s] %(levelname)s [%(name)s] %(message)s') # configure root logger as fallback
|
||||||
ldap3_logger.addHandler(default_handler)
|
logging.getLogger('werkzeug').propagate = False # werkzeug has its own ColorStreamHandler
|
||||||
|
set_library_log_detail_level(BASIC) # ldap3 has different verbosity levels internally
|
||||||
|
|
||||||
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("SQLALCHEMY_DATABASE_URI")
|
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("SQLALCHEMY_DATABASE_URI")
|
||||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||||
@ -40,6 +41,6 @@ CORS(app)
|
|||||||
db = SQLAlchemy(app)
|
db = SQLAlchemy(app)
|
||||||
migrate = Migrate(app, db, compare_type=True)
|
migrate = Migrate(app, db, compare_type=True)
|
||||||
|
|
||||||
logging.debug("Hello from KI")
|
app.logger.info("Hello from KI")
|
||||||
|
|
||||||
from ki import module # noqa
|
from ki import module # noqa
|
||||||
|
5
data/imgs/flags/ca.svg
Normal file
5
data/imgs/flags/ca.svg
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="810" height="540">
|
||||||
|
<rect width="810" height="540" fill="#FCDD09"/>
|
||||||
|
<path stroke="#DA121A" stroke-width="60" d="M0,90H810m0,120H0m0,120H810m0,120H0"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 242 B |
@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import logging
|
|
||||||
|
|
||||||
from app import app, db
|
from app import app, db
|
||||||
from ki.models import Address, Contact, ContactType, Language, Skill, Profile, ProfileLanguage, ProfileSearchtopic, \
|
from ki.models import Address, Contact, ContactType, Language, Skill, Profile, ProfileLanguage, ProfileSearchtopic, \
|
||||||
@ -13,7 +12,7 @@ from ki.models import Address, Contact, ContactType, Language, Skill, Profile, P
|
|||||||
def seed_contacttypes():
|
def seed_contacttypes():
|
||||||
contacttypes_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/contacttypes.csv"
|
contacttypes_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/contacttypes.csv"
|
||||||
|
|
||||||
logging.info("importing contacttypes")
|
app.logger.info("importing contacttypes")
|
||||||
|
|
||||||
with open(contacttypes_seed_file_path) as file:
|
with open(contacttypes_seed_file_path) as file:
|
||||||
csv_reader = csv.DictReader(file)
|
csv_reader = csv.DictReader(file)
|
||||||
@ -26,22 +25,30 @@ def seed_contacttypes():
|
|||||||
db.session.add(ContactType(id=int(contacttype["id"]), name=contacttype["name"]))
|
db.session.add(ContactType(id=int(contacttype["id"]), name=contacttype["name"]))
|
||||||
|
|
||||||
|
|
||||||
def seed_user(nickname,
|
def seed_user(auth_id,
|
||||||
visible=False,
|
nickname=None,
|
||||||
skills=[],
|
pronouns="",
|
||||||
languages=[],
|
visible=True,
|
||||||
volunteerwork="",
|
volunteerwork="",
|
||||||
availability_status=False,
|
availability_status=False,
|
||||||
freetext="",
|
freetext="",
|
||||||
availability_text="",
|
availability_text="",
|
||||||
availability_hours_per_week=42):
|
availability_hours_per_week=42,
|
||||||
|
skills=[],
|
||||||
|
searchtopics=[],
|
||||||
|
languages=[],
|
||||||
|
address=None,
|
||||||
|
contacts=[]):
|
||||||
|
if not nickname:
|
||||||
|
nickname = auth_id
|
||||||
|
|
||||||
app.logger.info(f"seeding {nickname} \\o/")
|
app.logger.info(f"seeding {nickname} \\o/")
|
||||||
|
|
||||||
user = User(auth_id=nickname)
|
user = User(auth_id=auth_id)
|
||||||
db.session.add(user)
|
db.session.add(user)
|
||||||
|
|
||||||
profile = Profile(nickname=nickname,
|
profile = Profile(nickname=nickname,
|
||||||
pronouns="",
|
pronouns=pronouns,
|
||||||
volunteerwork=volunteerwork,
|
volunteerwork=volunteerwork,
|
||||||
availability_status=availability_status,
|
availability_status=availability_status,
|
||||||
availability_text=availability_text,
|
availability_text=availability_text,
|
||||||
@ -54,10 +61,29 @@ def seed_user(nickname,
|
|||||||
skill = ProfileSkill(profile=profile, skill_id=skill_data[0], level=skill_data[1])
|
skill = ProfileSkill(profile=profile, skill_id=skill_data[0], level=skill_data[1])
|
||||||
db.session.add(skill)
|
db.session.add(skill)
|
||||||
|
|
||||||
|
for skill_id in searchtopics:
|
||||||
|
searchtopic = ProfileSearchtopic(profile=profile, skill_id=skill_id)
|
||||||
|
db.session.add(searchtopic)
|
||||||
|
|
||||||
for language_data in languages:
|
for language_data in languages:
|
||||||
language = ProfileLanguage(profile=profile, language_id=language_data[0], level=language_data[1])
|
language = ProfileLanguage(profile=profile, language_id=language_data[0], level=language_data[1])
|
||||||
db.session.add(language)
|
db.session.add(language)
|
||||||
|
|
||||||
|
if address:
|
||||||
|
_address = Address(name=address[0],
|
||||||
|
street=address[1],
|
||||||
|
house_number=address[2],
|
||||||
|
additional=address[3],
|
||||||
|
postcode=address[4],
|
||||||
|
city=address[5],
|
||||||
|
country=address[6],
|
||||||
|
profile=profile)
|
||||||
|
db.session.add(_address)
|
||||||
|
|
||||||
|
for contact_data in contacts:
|
||||||
|
contact = Contact(profile=profile, contacttype_id=contact_data[0], content=contact_data[1])
|
||||||
|
db.session.add(contact)
|
||||||
|
|
||||||
db.session.add(profile)
|
db.session.add(profile)
|
||||||
|
|
||||||
|
|
||||||
@ -93,75 +119,40 @@ def seed(dev: bool):
|
|||||||
db.session.add(Language(id=iso["639-1"], name=iso["Sprache"]))
|
db.session.add(Language(id=iso["639-1"], name=iso["Sprache"]))
|
||||||
|
|
||||||
if dev:
|
if dev:
|
||||||
app.logger.info("seeding peter :)")
|
seed_user("klaus", visible=False)
|
||||||
|
|
||||||
peter = User(auth_id="peter")
|
for i in range(1, 20):
|
||||||
db.session.add(peter)
|
seed_user(f"babsi{i}")
|
||||||
|
|
||||||
peters_profile = Profile(nickname="peternichtlustig",
|
seed_user("peter",
|
||||||
pronouns="Herr Dr. Dr.",
|
nickname="peternichtlustig",
|
||||||
volunteerwork="Gartenverein",
|
visible=False,
|
||||||
availability_status=True,
|
pronouns="Herr Dr. Dr.",
|
||||||
availability_hours_per_week=42,
|
volunteerwork="Gartenverein",
|
||||||
availability_text="Immer",
|
availability_status=True,
|
||||||
freetext="Ich mag Kaffee",
|
availability_hours_per_week=42,
|
||||||
user=peter)
|
availability_text="Immer",
|
||||||
db.session.add(peters_profile)
|
freetext="Ich mag Kaffee",
|
||||||
|
skills=[(3, 3), (1, 5)],
|
||||||
matrix_contact = Contact(profile=peters_profile, contacttype_id=4, content="@peter:wtf-eg.de")
|
searchtopics=[3, 1],
|
||||||
db.session.add(matrix_contact)
|
languages=[("de", 5), ("fr", 3)],
|
||||||
|
address=("Peter Nichtlustig", "Waldweg", "23i", "Hinterhaus", "13337", "Bielefeld", "Deutschland"),
|
||||||
email_contact = Contact(profile=peters_profile, contacttype_id=1, content="peter@wtf-eg.de")
|
contacts=[(4, "@peter:wtf-eg.de"), (1, "peter@wtf-eg.de")])
|
||||||
db.session.add(email_contact)
|
|
||||||
|
|
||||||
peters_address = Address(name="Peter Nichtlustig",
|
|
||||||
street="Waldweg",
|
|
||||||
house_number="23i",
|
|
||||||
additional="Hinterhaus",
|
|
||||||
postcode="13337",
|
|
||||||
city="Bielefeld",
|
|
||||||
country="Deutschland",
|
|
||||||
profile=peters_profile)
|
|
||||||
db.session.add(peters_address)
|
|
||||||
|
|
||||||
peters_python_skill = ProfileSkill(profile=peters_profile, skill_id=3, level=3)
|
|
||||||
db.session.add(peters_python_skill)
|
|
||||||
|
|
||||||
peters_php_skill = ProfileSkill(profile=peters_profile, skill_id=1, level=5)
|
|
||||||
db.session.add(peters_php_skill)
|
|
||||||
|
|
||||||
peters_python_searchtopic = ProfileSearchtopic(profile=peters_profile, skill_id=3)
|
|
||||||
db.session.add(peters_python_searchtopic)
|
|
||||||
|
|
||||||
peters_php_searchtopic = ProfileSearchtopic(profile=peters_profile, skill_id=1)
|
|
||||||
db.session.add(peters_php_searchtopic)
|
|
||||||
|
|
||||||
peter_de = ProfileLanguage(profile=peters_profile, language_id="de", level=5)
|
|
||||||
db.session.add(peter_de)
|
|
||||||
|
|
||||||
peter_fr = ProfileLanguage(profile=peters_profile, language_id="fr", level=3)
|
|
||||||
db.session.add(peter_fr)
|
|
||||||
|
|
||||||
seed_user("klaus")
|
|
||||||
|
|
||||||
seed_user("dirtydieter",
|
seed_user("dirtydieter",
|
||||||
visible=True,
|
|
||||||
volunteerwork="Müll sammeln",
|
volunteerwork="Müll sammeln",
|
||||||
availability_status=True,
|
availability_status=True,
|
||||||
availability_hours_per_week=24,
|
availability_hours_per_week=24,
|
||||||
availability_text="Nur Nachts!",
|
availability_text="Nur Nachts!",
|
||||||
freetext="1001010010111!!!",
|
freetext="1001010010111!!!",
|
||||||
skills=[(Skill.skill_id_php, 5)])
|
skills=[(1, 5)],
|
||||||
|
address=("Friedrich Witzig", "", "", "", "", "", ""))
|
||||||
|
|
||||||
all_skills = Skill.query.all()
|
all_skills = [(skill.id, 3) for skill in Skill.query.all()]
|
||||||
all_profile_skills = []
|
seed_user("jutta", languages=[("fr", 5)], skills=all_skills)
|
||||||
for skill in all_skills:
|
|
||||||
all_profile_skills.append((skill.id, 3))
|
|
||||||
|
|
||||||
seed_user("jutta", visible=True, languages=[("fr", 5)], skills=all_profile_skills)
|
seed_user("giesela", skills=[(9, 3), (10, 5)])
|
||||||
seed_user("giesela", visible=True, skills=[(Skill.skill_id_mysql, 3), (Skill.skill_id_postgresql, 5)])
|
seed_user("bertha", visible=False, skills=[(11, 3), (10, 5)])
|
||||||
seed_user("bertha", visible=False, skills=[(Skill.skill_id_sqlite, 3), (Skill.skill_id_postgresql, 5)])
|
seed_user("monique", languages=[("fr", 4)])
|
||||||
|
|
||||||
seed_user("monique", visible=True, languages=[("fr", 4)])
|
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -4,11 +4,14 @@
|
|||||||
|
|
||||||
from flask import make_response, request
|
from flask import make_response, request
|
||||||
|
|
||||||
from ki.models import Profile, ProfileSkill, Skill, ProfileLanguage, Language
|
from ki.models import Profile, ProfileSkill, Skill, ProfileLanguage, Language, Address
|
||||||
|
|
||||||
|
|
||||||
def find_profiles():
|
def find_profiles():
|
||||||
page = int(request.args.get("page", 1))
|
try:
|
||||||
|
page = int(request.args.get("page", 1))
|
||||||
|
except ValueError:
|
||||||
|
page = 1
|
||||||
|
|
||||||
if page < 1:
|
if page < 1:
|
||||||
return make_response({"messages": {"page": "Die angefragte Seite muss mindestens 1 sein"}}, 400)
|
return make_response({"messages": {"page": "Die angefragte Seite muss mindestens 1 sein"}}, 400)
|
||||||
@ -19,27 +22,35 @@ def find_profiles():
|
|||||||
return make_response({"messages": {"page_size": "Die maximale Anzahl Einträge pro Seite beträgt 100"}}, 400)
|
return make_response({"messages": {"page_size": "Die maximale Anzahl Einträge pro Seite beträgt 100"}}, 400)
|
||||||
|
|
||||||
query = Profile.query.distinct(Profile.id) \
|
query = Profile.query.distinct(Profile.id) \
|
||||||
|
.order_by(Profile.nickname) \
|
||||||
.filter(Profile.visible.is_(True)) \
|
.filter(Profile.visible.is_(True)) \
|
||||||
.join(Profile.skills, isouter=True).join(ProfileSkill.skill, isouter=True) \
|
.join(Profile.skills, isouter=True).join(ProfileSkill.skill, isouter=True) \
|
||||||
.join(Profile.languages, isouter=True).join(ProfileLanguage.language, isouter=True)
|
.join(Profile.languages, isouter=True).join(ProfileLanguage.language, isouter=True) \
|
||||||
|
.join(Address, isouter=True)
|
||||||
|
|
||||||
if "search" in request.args:
|
if "search" in request.args:
|
||||||
terms = request.args["search"].split(" ")
|
terms = request.args["search"].split(" ")
|
||||||
for term in terms:
|
for term in terms:
|
||||||
|
|
||||||
query = query.filter(
|
query = query.filter(
|
||||||
Profile.nickname.like(f"%{term}%") | Skill.name.like(f"%{term}%") | Language.name.like(f"%{term}%"))
|
Profile.nickname.like(f"%{term}%") | # noqa: W504
|
||||||
|
Skill.name.like(f"%{term}%") | # noqa: W504
|
||||||
|
Language.name.like(f"%{term}%") | # noqa: W504
|
||||||
|
Address.name.like(f"%{term}%"))
|
||||||
|
|
||||||
if "nickname" in request.args:
|
if "nickname" in request.args:
|
||||||
nickname = request.args.get("nickname")
|
nickname = request.args.get("nickname")
|
||||||
query = query.filter(Profile.nickname.like(f"%{nickname}%"))
|
query = query.filter(Profile.nickname.like(f"%{nickname}%"))
|
||||||
|
|
||||||
count = query.count()
|
paginated_result = query.paginate(page=page, per_page=page_size)
|
||||||
|
|
||||||
offset = (page - 1) * page_size
|
|
||||||
db_profiles = query.limit(page_size).offset(offset).all()
|
|
||||||
api_profiles = []
|
api_profiles = []
|
||||||
|
|
||||||
for db_profile in db_profiles:
|
for db_profile in paginated_result.items:
|
||||||
api_profiles.append(db_profile.to_dict())
|
api_profiles.append(db_profile.to_dict())
|
||||||
|
|
||||||
return make_response({"total": count, "profiles": api_profiles})
|
return make_response({
|
||||||
|
"total": paginated_result.total,
|
||||||
|
"pages": paginated_result.pages,
|
||||||
|
"page": paginated_result.page,
|
||||||
|
"profiles": api_profiles
|
||||||
|
})
|
||||||
|
@ -151,11 +151,11 @@ def update_profile(user_id: int):
|
|||||||
profile.freetext = request.json.get("freetext", "")
|
profile.freetext = request.json.get("freetext", "")
|
||||||
profile.visible = request.json.get("visible", False)
|
profile.visible = request.json.get("visible", False)
|
||||||
|
|
||||||
update_address(profile, request.json.get("address", {}))
|
update_address(profile, request.json.get("address"))
|
||||||
update_contacts(profile, request.json.get("contacts", {}))
|
update_contacts(profile, request.json.get("contacts", []))
|
||||||
update_skills(profile, request.json.get("skills", {}))
|
update_skills(profile, request.json.get("skills", []))
|
||||||
update_searchtopics(profile, request.json.get("searchtopics"))
|
update_searchtopics(profile, request.json.get("searchtopics", []))
|
||||||
update_languages(profile, request.json.get("languages", {}))
|
update_languages(profile, request.json.get("languages", []))
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
@ -139,13 +139,6 @@ class Address(db.Model):
|
|||||||
|
|
||||||
|
|
||||||
class Skill(db.Model):
|
class Skill(db.Model):
|
||||||
skill_id_php = 1
|
|
||||||
skill_id_python = 3
|
|
||||||
skill_id_sqlalchemy = 7
|
|
||||||
skill_id_mysql = 9
|
|
||||||
skill_id_postgresql = 10
|
|
||||||
skill_id_sqlite = 11
|
|
||||||
|
|
||||||
__tablename__ = "skill"
|
__tablename__ = "skill"
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
|
@ -17,6 +17,7 @@ content_type_png = "image/png"
|
|||||||
|
|
||||||
|
|
||||||
def token_auth(func):
|
def token_auth(func):
|
||||||
|
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def _token_auth(*args, **kwargs):
|
def _token_auth(*args, **kwargs):
|
||||||
auth_header = request.headers.get("Authorization")
|
auth_header = request.headers.get("Authorization")
|
||||||
|
@ -8,6 +8,7 @@ from ki.test.ApiTest import ApiTest
|
|||||||
|
|
||||||
|
|
||||||
class TestContactTypesEndpoint(ApiTest):
|
class TestContactTypesEndpoint(ApiTest):
|
||||||
|
|
||||||
def test_skills_options(self):
|
def test_skills_options(self):
|
||||||
response = self.client.options("/contacttypes")
|
response = self.client.options("/contacttypes")
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
@ -8,6 +8,7 @@ from ki.test.ApiTest import ApiTest
|
|||||||
|
|
||||||
|
|
||||||
class TestFindProfilesEndpoint(ApiTest):
|
class TestFindProfilesEndpoint(ApiTest):
|
||||||
|
|
||||||
def test_find_profiles_options(self):
|
def test_find_profiles_options(self):
|
||||||
response = self.client.options("/users/profiles")
|
response = self.client.options("/users/profiles")
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
@ -20,25 +21,33 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
response = self.client.get("/users/profiles?nickname=horsthorsthorst",
|
response = self.client.get("/users/profiles?nickname=horsthorsthorst",
|
||||||
headers={"Authorization": "Bearer " + token})
|
headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.json, {"total": 0, "profiles": []})
|
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
|
||||||
|
|
||||||
def test_find_sql_specialchars(self):
|
def test_find_sql_specialchars(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
response = self.client.get("/users/profiles?nickname=%22%27%25", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles?nickname=%22%27%25", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.json, {"total": 0, "profiles": []})
|
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
|
||||||
|
|
||||||
def test_find_all(self):
|
def test_find_all_page1(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
response = self.client.get("/users/profiles", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertDictContainsSubset({"total": 4}, response.json)
|
self.assertDictContainsSubset({"total": 23, "page": 1, "pages": 2}, response.json)
|
||||||
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "babsi1"}, response.json["profiles"][0])
|
||||||
|
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][19])
|
||||||
|
|
||||||
|
def test_find_all_page2(self):
|
||||||
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
|
response = self.client.get("/users/profiles?page=2", headers={"Authorization": "Bearer " + token})
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertDictContainsSubset({"total": 23, "page": 2, "pages": 2}, response.json)
|
||||||
|
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][2])
|
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][2])
|
||||||
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][3])
|
|
||||||
|
|
||||||
def test_find_dieter(self):
|
def test_find_dieter(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
@ -62,8 +71,8 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
response = self.client.get("/users/profiles?search=sql", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles?search=sql", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertDictContainsSubset({"total": 2}, response.json)
|
self.assertDictContainsSubset({"total": 2}, response.json)
|
||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||||
|
|
||||||
def test_find_postgres(self):
|
def test_find_postgres(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
@ -71,8 +80,8 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
response = self.client.get("/users/profiles?search=post", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles?search=post", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertDictContainsSubset({"total": 2}, response.json)
|
self.assertDictContainsSubset({"total": 2}, response.json)
|
||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||||
|
|
||||||
def test_find_php_franzosen(self):
|
def test_find_php_franzosen(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
@ -91,6 +100,14 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][1])
|
||||||
|
|
||||||
|
def test_find_dieter_by_name(self):
|
||||||
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
|
response = self.client.get("/users/profiles?search=friedrich", headers={"Authorization": "Bearer " + token})
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertDictContainsSubset({"total": 1}, response.json)
|
||||||
|
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][0])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "main":
|
if __name__ == "main":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -8,6 +8,7 @@ from ki.test.ApiTest import ApiTest
|
|||||||
|
|
||||||
|
|
||||||
class TestLanguagesEndpoint(ApiTest):
|
class TestLanguagesEndpoint(ApiTest):
|
||||||
|
|
||||||
def test_skills_options(self):
|
def test_skills_options(self):
|
||||||
response = self.client.options("/languages")
|
response = self.client.options("/languages")
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
@ -10,6 +10,7 @@ from ki.test.ApiTest import ApiTest
|
|||||||
|
|
||||||
|
|
||||||
class TestLoginEndpoint(ApiTest):
|
class TestLoginEndpoint(ApiTest):
|
||||||
|
|
||||||
def test_login(self):
|
def test_login(self):
|
||||||
response1_data = self.login("peter", "geheim")
|
response1_data = self.login("peter", "geheim")
|
||||||
response2_data = self.login("peter", "geheim")
|
response2_data = self.login("peter", "geheim")
|
||||||
|
@ -20,7 +20,8 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
self.assertEqual(login_response.status_code, 200)
|
self.assertEqual(login_response.status_code, 200)
|
||||||
self.assertIn("token", login_response.json)
|
self.assertIn("token", login_response.json)
|
||||||
|
|
||||||
response = self.client.post("/users/1/profile",
|
babsi = User.query.filter(User.auth_id == "babsi1").first()
|
||||||
|
response = self.client.post(f"/users/{babsi.id}/profile",
|
||||||
data=json.dumps({}),
|
data=json.dumps({}),
|
||||||
content_type="application/json",
|
content_type="application/json",
|
||||||
headers={"Authorization": "Bearer " + login_response.json["token"]})
|
headers={"Authorization": "Bearer " + login_response.json["token"]})
|
||||||
@ -102,14 +103,15 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
"level": 2
|
"level": 2
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
response = self.client.post("/users/1/profile",
|
peter = User.query.filter(User.auth_id == "peter").first()
|
||||||
|
response = self.client.post(f"/users/{peter.id}/profile",
|
||||||
data=json.dumps(data),
|
data=json.dumps(data),
|
||||||
content_type="application/json",
|
content_type="application/json",
|
||||||
headers={"Authorization": "Bearer " + token})
|
headers={"Authorization": "Bearer " + token})
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
user = User.query.filter(User.id == 1).first()
|
user = User.query.filter(User.id == peter.id).first()
|
||||||
profile = user.profile
|
profile = user.profile
|
||||||
self.assertEqual("Hebbert", profile.nickname)
|
self.assertEqual("Hebbert", profile.nickname)
|
||||||
self.assertEqual("Monsieur", profile.pronouns)
|
self.assertEqual("Monsieur", profile.pronouns)
|
||||||
@ -183,7 +185,8 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
def test_get_visible_proifle(self):
|
def test_get_visible_proifle(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
response = self.client.get("/users/3/profile", headers={"Authorization": f"Bearer {token}"})
|
babsi = User.query.filter(User.auth_id == "babsi1").first()
|
||||||
|
response = self.client.get(f"/users/{babsi.id}/profile", headers={"Authorization": f"Bearer {token}"})
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
@ -194,14 +197,15 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
self.assertEqual(login_response.status_code, 200)
|
self.assertEqual(login_response.status_code, 200)
|
||||||
self.assertIn("token", login_response.json)
|
self.assertIn("token", login_response.json)
|
||||||
|
|
||||||
response = self.client.get("/users/1/profile",
|
peter = User.query.filter(User.auth_id == "peter").first()
|
||||||
|
response = self.client.get(f"/users/{peter.id}/profile",
|
||||||
headers={"Authorization": "Bearer " + login_response.json["token"]})
|
headers={"Authorization": "Bearer " + login_response.json["token"]})
|
||||||
|
profile_id = peter.profile.id
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
response.json, {
|
response.json, {
|
||||||
"profile": {
|
"profile": {
|
||||||
"user_id": 1,
|
"user_id": peter.id,
|
||||||
"nickname": "peternichtlustig",
|
"nickname": "peternichtlustig",
|
||||||
"pronouns": "Herr Dr. Dr.",
|
"pronouns": "Herr Dr. Dr.",
|
||||||
"availability_status": True,
|
"availability_status": True,
|
||||||
@ -218,12 +222,12 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
"id": 1,
|
"id": 1,
|
||||||
"name": "Peter Nichtlustig",
|
"name": "Peter Nichtlustig",
|
||||||
"postcode": "13337",
|
"postcode": "13337",
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"street": "Waldweg"
|
"street": "Waldweg"
|
||||||
},
|
},
|
||||||
"contacts": [{
|
"contacts": [{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"contacttype": {
|
"contacttype": {
|
||||||
"id": 4,
|
"id": 4,
|
||||||
"name": "Matrix"
|
"name": "Matrix"
|
||||||
@ -231,7 +235,7 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
"content": "@peter:wtf-eg.de"
|
"content": "@peter:wtf-eg.de"
|
||||||
}, {
|
}, {
|
||||||
"id": 2,
|
"id": 2,
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"contacttype": {
|
"contacttype": {
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"name": "E-Mail"
|
"name": "E-Mail"
|
||||||
@ -239,7 +243,7 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
"content": "peter@wtf-eg.de"
|
"content": "peter@wtf-eg.de"
|
||||||
}],
|
}],
|
||||||
"skills": [{
|
"skills": [{
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"skill": {
|
"skill": {
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"name": "PHP",
|
"name": "PHP",
|
||||||
@ -247,7 +251,7 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
},
|
},
|
||||||
"level": 5
|
"level": 5
|
||||||
}, {
|
}, {
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"skill": {
|
"skill": {
|
||||||
"id": 3,
|
"id": 3,
|
||||||
"name": "Python",
|
"name": "Python",
|
||||||
@ -256,14 +260,14 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
"level": 3
|
"level": 3
|
||||||
}],
|
}],
|
||||||
"searchtopics": [{
|
"searchtopics": [{
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"skill": {
|
"skill": {
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"name": "PHP",
|
"name": "PHP",
|
||||||
"icon_url": "/skills/1/icon"
|
"icon_url": "/skills/1/icon"
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"skill": {
|
"skill": {
|
||||||
"id": 3,
|
"id": 3,
|
||||||
"name": "Python",
|
"name": "Python",
|
||||||
@ -271,7 +275,7 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
}
|
}
|
||||||
}],
|
}],
|
||||||
"languages": [{
|
"languages": [{
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"language": {
|
"language": {
|
||||||
"id": "de",
|
"id": "de",
|
||||||
"name": "Deutsch",
|
"name": "Deutsch",
|
||||||
@ -279,7 +283,7 @@ class TestProfileEndpoint(ApiTest):
|
|||||||
},
|
},
|
||||||
"level": 5
|
"level": 5
|
||||||
}, {
|
}, {
|
||||||
"profile_id": 1,
|
"profile_id": profile_id,
|
||||||
"language": {
|
"language": {
|
||||||
"id": "fr",
|
"id": "fr",
|
||||||
"name": "Französisch",
|
"name": "Französisch",
|
||||||
|
@ -8,6 +8,7 @@ from ki.test.ApiTest import ApiTest
|
|||||||
|
|
||||||
|
|
||||||
class TestSkillsEndpoint(ApiTest):
|
class TestSkillsEndpoint(ApiTest):
|
||||||
|
|
||||||
def test_skills_options(self):
|
def test_skills_options(self):
|
||||||
response = self.client.options("/skills")
|
response = self.client.options("/skills")
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
@ -1,50 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic,flask_migrate
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[logger_flask_migrate]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = flask_migrate
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
@ -11,9 +11,6 @@ from alembic import context
|
|||||||
# access to the values within the .ini file in use.
|
# access to the values within the .ini file in use.
|
||||||
config = context.config
|
config = context.config
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
logger = logging.getLogger('alembic.env')
|
logger = logging.getLogger('alembic.env')
|
||||||
|
|
||||||
# add your model's MetaData object here
|
# add your model's MetaData object here
|
||||||
|
18
renovate.json
Normal file
18
renovate.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"extends": [
|
||||||
|
"config:best-practices",
|
||||||
|
":disableDependencyDashboard",
|
||||||
|
":maintainLockFilesMonthly",
|
||||||
|
":pinVersions",
|
||||||
|
":separateMultipleMajorReleases"
|
||||||
|
],
|
||||||
|
"packageRules": [
|
||||||
|
{
|
||||||
|
"matchDepNames": ["python"],
|
||||||
|
"groupName": "Python",
|
||||||
|
"separateMinorPatch": true,
|
||||||
|
"separateMultipleMinor": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -11,4 +11,4 @@ with app.app_context():
|
|||||||
config = migrate.get_config()
|
config = migrate.get_config()
|
||||||
command.upgrade(config, "head")
|
command.upgrade(config, "head")
|
||||||
|
|
||||||
serve(app, host="0.0.0.0", port=5000)
|
serve(app, host="0.0.0.0", port=5000, threads=20)
|
||||||
|
Loading…
Reference in New Issue
Block a user