Compare commits

..

No commits in common. "main" and "fix/flags" have entirely different histories.

28 changed files with 718 additions and 966 deletions

View File

@ -1,13 +0,0 @@
# SPDX-FileCopyrightText: WTF Kooperative eG <https://wtf-eg.de/>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
*
!Pipfile
!Pipfile.lock
!data/
!ki/
!LICENSES/
!migrations/
!app.py
!run_prod.py

View File

@ -4,125 +4,31 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
kind: pipeline
type: docker
name: qa
trigger:
event:
- push
- pull_request
branch:
- main
name: default
steps:
- name: install-lint-test
image: python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c
env:
PYROOT: '/pyroot'
PYTHONUSERBASE: '/pyroot'
commands:
- apk add --no-cache gcc g++ musl-dev python3-dev
- pip3 install pipenv
- pipenv verify
- pipenv install --dev
- pipenv run flake8
- pipenv run reuse lint
- SQLALCHEMY_DATABASE_URI=sqlite:// pipenv run python -m unittest discover ki
- name: docker-dry-run
image: plugins/docker:20.18.6@sha256:59c993e3c4e6c097a0e2d274419aac0d7d8e929773f0ba1af44078e54389834f
settings:
registry: git.wtf-eg.de
repo: git.wtf-eg.de/kompetenzinventar/backend
target: ki-backend
dry_run: true
when:
event:
- pull_request
- name: qa
image: registry.wtf-eg.net/ki-backend-builder:1.0.0
commands:
- pipenv install --dev
- pipenv run flake8
- pipenv run reuse lint
- pipenv run python -m unittest discover ki
---
kind: pipeline
type: docker
name: build
- name: docker-publish
image: plugins/docker
settings:
registry: registry.wtf-eg.net
repo: registry.wtf-eg.net/ki-backend
target: ki-backend
auto_tag: true
username:
from_secret: "docker_username"
password:
from_secret: "docker_password"
when:
branch:
- main
trigger:
event:
- push
branch:
- main
depends_on:
- qa
steps:
- name: docker-publish
image: plugins/docker:20.18.6@sha256:59c993e3c4e6c097a0e2d274419aac0d7d8e929773f0ba1af44078e54389834f
settings:
registry: git.wtf-eg.de
repo: git.wtf-eg.de/kompetenzinventar/backend
target: ki-backend
auto_tag: true
username:
from_secret: "docker_username"
password:
from_secret: "docker_password"
---
kind: pipeline
type: docker
name: deploy
trigger:
event:
- push
branch:
- main
depends_on:
- build
steps:
- name: deploy-dev
image: appleboy/drone-ssh:1.7.5@sha256:995677e073454912f26d4c0fdd2f9df2e1f5a30d6603d3f2ece667311b6babb3
settings:
host:
- dev01.wtf-eg.net
username: drone_deployment
key:
from_secret: "dev01_deployment_key"
command_timeout: 2m
script:
- echo "Executing forced command..."
---
kind: pipeline
type: docker
name: tag-release
trigger:
event:
- tag
steps:
- name: install-lint-test
image: python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c
env:
PYROOT: '/pyroot'
PYTHONUSERBASE: '/pyroot'
commands:
- apk add --no-cache gcc g++ musl-dev python3-dev
- pip3 install pipenv
- pipenv install --dev
- pipenv run flake8
- pipenv run reuse lint
- SQLALCHEMY_DATABASE_URI=sqlite:// pipenv run python -m unittest discover ki
- name: docker-publish
image: plugins/docker:20.18.6@sha256:59c993e3c4e6c097a0e2d274419aac0d7d8e929773f0ba1af44078e54389834f
settings:
registry: git.wtf-eg.de
repo: git.wtf-eg.de/kompetenzinventar/backend
target: ki-backend
auto_tag: true
username:
from_secret: "docker_username"
password:
from_secret: "docker_password"
image_pull_secrets:
- dockerconfig

View File

@ -1 +0,0 @@
3.9.21

12
.reuse/dep5 Normal file
View File

@ -0,0 +1,12 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: Kompetenzinventar
Upstream-Contact: Michael Weimann <mail@michael-weimann.eu>
Source: https://git.wtf-eg.de/kompetenzinventar/ki-backend
Files: data/imgs/flags/*
Copyright: 2013 Panayiotis Lipiridis <https://flagicons.lipis.dev/>
License: MIT
Files: Pipfile.lock migrations/*
Copyright: WTF Kooperative eG <https://wtf-eg.de/>
License: AGPL-3.0-or-later

View File

@ -1,5 +0,0 @@
# SPDX-FileCopyrightText: WTF Kooperative eG <https://wtf-eg.de/>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
migrations/*.py

View File

@ -2,17 +2,7 @@
#
# SPDX-License-Identifier: AGPL-3.0-or-later
FROM python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c AS builder
ENV PYROOT=/pyroot
ENV PYTHONUSERBASE=$PYROOT
RUN apk add --no-cache \
gcc \
g++ \
musl-dev \
python3-dev && \
pip3 install pipenv
FROM registry.wtf-eg.net/ki-backend-builder:1.0.0 as builder
COPY Pipfile* ./
@ -20,10 +10,7 @@ RUN PIP_USER=1 PIP_IGNORE_INSTALLED=1 pipenv install --system --deploy --ignore-
RUN pip3 uninstall --yes pipenv
FROM python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c AS ki-backend
ENV PYROOT=/pyroot
ENV PYTHONUSERBASE=$PYROOT
FROM registry.wtf-eg.net/ki-backend-base:1.0.0 as ki-backend
# Install six explicitly. Otherwise Python complains about it missing.
RUN pip3 install six
@ -35,9 +22,4 @@ WORKDIR /app
COPY . .
LABEL org.opencontainers.image.source=https://git.wtf-eg.de/kompetenzinventar/ki-backend.git
LABEL org.opencontainers.image.url=https://git.wtf-eg.de/kompetenzinventar/ki-backend
LABEL org.opencontainers.image.documentation=https://git.wtf-eg.de/kompetenzinventar/ki-backend#docker
LABEL org.opencontainers.image.vendor="WTF Kooperative eG"
CMD ["python3", "run_prod.py"]

31
Pipfile
View File

@ -8,26 +8,25 @@ verify_ssl = true
name = "pypi"
[packages]
flask = "==2.3.3"
python-dotenv = "==1.0.1"
flask-migrate = "==4.0.7"
flask-sqlalchemy = "==3.1.1"
sqlalchemy = "==2.0.36"
waitress = "==2.1.2"
pyyaml = "==6.0.2"
flask-cors = "==5.0.0"
ldap3 = "==2.9.1"
pymysql = "==1.1.1"
werkzeug = "==2.3.8"
flask = "~=2.0.1"
python-dotenv = "~=0.17.1"
flask-migrate = "~=3.0.1"
flask-sqlalchemy = "~=2.5.1"
sqlalchemy = "~=1.4.18"
waitress = "~=2.0.0"
pyyaml = "~=5.4.1"
flask-cors = "~=3.0.10"
ldap3 = "~=2.9"
pymysql = "~=1.0.2"
[dev-packages]
flake8 = "==7.1.1"
yapf = "==0.40.2"
pre-commit = "==2.21.0"
reuse = "==4.0.3"
flake8 = "~=3.9.2"
yapf = "~=0.31.0"
pre-commit = "~=2.13.0"
reuse = "~=0.13.0"
[requires]
python_version = "3.9"
python_version = "3.8"
[scripts]
clean = "rm data/ki.sqlite"

933
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -9,37 +9,11 @@ SPDX-License-Identifier: AGPL-3.0-or-later
[![Build Status](https://drone.wtf-eg.de/api/badges/kompetenzinventar/ki-backend/status.svg?ref=refs/heads/main)](https://drone.wtf-eg.de/kompetenzinventar/ki-backend)
[![REUSE status](https://api.reuse.software/badge/git.wtf-eg.de/kompetenzinventar/ki-backend)](https://api.reuse.software/info/git.wtf-eg.de/kompetenzinventar/ki-backend)
## Über
Dieses Repo enthält das Backend des Projekts Kompentenzinventar - einer Webapplikation zur Erfassung von Userprofilen für die WTF eG.
Implementiert ist das Backend mit Flask.
### Mitmachen
Du kannst gerne bei der Entwicklung des Kompetenzinventars mitmachen.
- Fehler oder fehlende Funktionen erfassen. Bitte direkt über die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) in Gitea.
- Dokumentation oder Implementierung verbessern. Bitte forke hierzu das Projekt, branche von `main` ab und erstelle dann einen [Pull Request](https://git.wtf-eg.de/kompetenzinventar/ki-backend/pulls).
### Kommunikation
Folgende Kanäle gibt es für die Kommunikation über das Kompetenzinventar:
- Die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) im WTF Gitea.
- Den Bereich [AG Entwicklung](https://forum.wtf-eg.de/c/interna/ag-entwicklung/21) im WTF Forum.
- Einen Raum in Matrix. Zutritt per Einladung, frlan lädt ein, eine einfache PN im Forum reicht.
### Repos
* **[ki-backend](https://git.wtf-eg.de/kompetenzinventar/ki-backend)** (dieses Repo) enthält das Backend
* [ki-frontend](https://git.wtf-eg.de/kompetenzinventar/ki-frontend) enthält das Frontend
* Weitere Repositories befinden sich in der Gitea Organisation [Kompetenzinventar](https://git.wtf-eg.de/kompetenzinventar).
## Entwicklung
### Abhängigkeiten
- Python 3.9
- Python 3.8
- [Pipenv](https://github.com/pypa/pipenv)
@ -222,6 +196,25 @@ docker-compose up
Dann http://localhost:13337 aufrufen.
### Workaround, falls der Zugriff auf registry.wtf-eg.net nicht möglich ist
Voraussetzung:
[ki-backend-docker](https://git.wtf-eg.de/kompetenzinventar/ki-backend-docker) muss parallel zum `ki-backend` ausgecheckt sein.
```
cd ki-backend-docker
docker build . --target base -t ki-backend-base
docker build . --target builder -t ki-backend-builder
```
Ändern der 2 Einträge im `Dockerfile` des `ki-backend`:
- registry.wtf-eg.net/ki-backend-builder:1.0.0 -> ki-backend-builder
- registry.wtf-eg.net/ki-backend-base:1.0.0 -> ki-backend-base
Danach sollte `docker-compose up` funktionieren.
## Lizenzen
Dieses Projekt erfüllt die [REUSE](https://reuse.software/) Spezifikation.

View File

@ -1,24 +0,0 @@
# SPDX-FileCopyrightText: NONE
# SPDX-License-Identifier: CC0-1.0
version = 1
SPDX-PackageName = "Kompetenzinventar Backend"
SPDX-PackageDownloadLocation = "https://git.wtf-eg.de/kompetenzinventar/ki-backend"
[[annotations]]
path = "data/imgs/flags/**"
precedence = "aggregate"
SPDX-FileCopyrightText = "2013 Panayiotis Lipiridis <https://flagicons.lipis.dev/>"
SPDX-License-Identifier = "MIT"
[[annotations]]
path = ["Pipfile.lock", "migrations/**"]
precedence = "aggregate"
SPDX-FileCopyrightText = "WTF Kooperative eG <https://wtf-eg.de/>"
SPDX-License-Identifier = "AGPL-3.0-or-later"
[[annotations]]
path = ["renovate.json", ".python-version"]
precedence = "aggregate"
SPDX-FileCopyrightText = "WTF Kooperative eG <https://wtf-eg.de/>"
SPDX-License-Identifier = "AGPL-3.0-or-later"

17
app.py
View File

@ -8,23 +8,22 @@ import os
from dotenv import load_dotenv, find_dotenv
from flask import Flask
from flask_cors import CORS
from flask.logging import default_handler
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from ldap3.utils.log import logger as ldap3_logger
from ldap3.utils.log import set_library_log_detail_level, BASIC
load_dotenv(find_dotenv())
app = Flask(__name__)
# Configure logging
loglevel = os.getenv("KI_LOGLEVEL", logging.WARNING)
loglevel = int(loglevel)
app.logger.setLevel(loglevel)
app.logger.propagate = False # do not forward messages to the root logger
logging.basicConfig(level=loglevel,
format='[%(asctime)s] %(levelname)s [%(name)s] %(message)s') # configure root logger as fallback
logging.getLogger('werkzeug').propagate = False # werkzeug has its own ColorStreamHandler
set_library_log_detail_level(BASIC) # ldap3 has different verbosity levels internally
logging.basicConfig(level=loglevel)
set_library_log_detail_level(BASIC)
ldap3_logger.addHandler(default_handler)
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("SQLALCHEMY_DATABASE_URI")
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
@ -38,9 +37,9 @@ app.config["KI_LDAP_AUTH_PASSWORD"] = os.getenv("KI_LDAP_AUTH_PASSWORD")
app.config["KI_LDAP_BASE_DN"] = os.getenv("KI_LDAP_BASE_DN")
CORS(app)
db = SQLAlchemy(app, session_options={"future": True})
db = SQLAlchemy(app)
migrate = Migrate(app, db, compare_type=True)
app.logger.info("Hello from KI")
logging.debug("Hello from KI")
from ki import module # noqa

View File

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg" width="810" height="540">
<rect width="810" height="540" fill="#FCDD09"/>
<path stroke="#DA121A" stroke-width="60" d="M0,90H810m0,120H0m0,120H810m0,120H0"/>
</svg>

Before

Width:  |  Height:  |  Size: 242 B

View File

@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
import csv
import logging
from app import app, db
from ki.models import Address, Contact, ContactType, Language, Skill, Profile, ProfileLanguage, ProfileSearchtopic, \
@ -12,43 +13,35 @@ from ki.models import Address, Contact, ContactType, Language, Skill, Profile, P
def seed_contacttypes():
contacttypes_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/contacttypes.csv"
app.logger.info("importing contacttypes")
logging.info("importing contacttypes")
with open(contacttypes_seed_file_path) as file:
csv_reader = csv.DictReader(file)
for contacttype in csv_reader:
id = int(contacttype["id"])
db_contacttype = db.session.get(ContactType, id)
db_contacttype = ContactType.query.get(id)
if db_contacttype is None:
db.session.add(ContactType(id=int(contacttype["id"]), name=contacttype["name"]))
def seed_user(auth_id,
nickname=None,
pronouns="",
visible=True,
def seed_user(nickname,
visible=False,
skills=[],
languages=[],
volunteerwork="",
availability_status=False,
freetext="",
availability_text="",
availability_hours_per_week=42,
skills=[],
searchtopics=[],
languages=[],
address=None,
contacts=[]):
if not nickname:
nickname = auth_id
availability_hours_per_week=42):
app.logger.info(f"seeding {nickname} \\o/")
user = User(auth_id=auth_id)
user = User(auth_id=nickname)
db.session.add(user)
profile = Profile(nickname=nickname,
pronouns=pronouns,
pronouns="",
volunteerwork=volunteerwork,
availability_status=availability_status,
availability_text=availability_text,
@ -61,100 +54,114 @@ def seed_user(auth_id,
skill = ProfileSkill(profile=profile, skill_id=skill_data[0], level=skill_data[1])
db.session.add(skill)
for skill_id in searchtopics:
searchtopic = ProfileSearchtopic(profile=profile, skill_id=skill_id)
db.session.add(searchtopic)
for language_data in languages:
language = ProfileLanguage(profile=profile, language_id=language_data[0], level=language_data[1])
db.session.add(language)
if address:
_address = Address(name=address[0],
street=address[1],
house_number=address[2],
additional=address[3],
postcode=address[4],
city=address[5],
country=address[6],
profile=profile)
db.session.add(_address)
for contact_data in contacts:
contact = Contact(profile=profile, contacttype_id=contact_data[0], content=contact_data[1])
db.session.add(contact)
db.session.add(profile)
def seed(dev: bool):
with app.app_context():
seed_contacttypes()
seed_contacttypes()
skill_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/skills.csv"
skill_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/skills.csv"
app.logger.info("importing skills")
app.logger.info("importing skills")
with open(skill_seed_file_path) as skills_file:
skills_csv_reader = csv.DictReader(skills_file)
with open(skill_seed_file_path) as skills_file:
skills_csv_reader = csv.DictReader(skills_file)
for skill in skills_csv_reader:
id = int(skill["id"])
db_skill = db.session.get(Skill, id)
for skill in skills_csv_reader:
id = int(skill["id"])
db_skill = Skill.query.get(id)
if db_skill is None:
db.session.add(Skill(id=int(skill["id"]), name=skill["name"]))
if db_skill is None:
db.session.add(Skill(id=int(skill["id"]), name=skill["name"]))
app.logger.info("importing languages")
app.logger.info("importing languages")
iso_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/iso_639_1.csv"
iso_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/iso_639_1.csv"
with open(iso_seed_file_path) as iso_file:
iso_csv_reader = csv.DictReader(iso_file)
with open(iso_seed_file_path) as iso_file:
iso_csv_reader = csv.DictReader(iso_file)
for iso in iso_csv_reader:
id = iso["639-1"]
db_language = db.session.get(Language, id)
for iso in iso_csv_reader:
id = iso["639-1"]
db_language = Language.query.get(id)
if db_language is None:
db.session.add(Language(id=iso["639-1"], name=iso["Sprache"]))
if db_language is None:
db.session.add(Language(id=iso["639-1"], name=iso["Sprache"]))
if dev:
seed_user("klaus", visible=False)
if dev:
app.logger.info("seeding peter :)")
for i in range(1, 20):
seed_user(f"babsi{i}")
peter = User(auth_id="peter")
db.session.add(peter)
seed_user("peter",
nickname="peternichtlustig",
visible=False,
pronouns="Herr Dr. Dr.",
volunteerwork="Gartenverein",
availability_status=True,
availability_hours_per_week=42,
availability_text="Immer",
freetext="Ich mag Kaffee",
skills=[(3, 3), (1, 5)],
searchtopics=[3, 1],
languages=[("de", 5), ("fr", 3)],
address=("Peter Nichtlustig", "Waldweg", "23i", "Hinterhaus", "13337", "Bielefeld",
"Deutschland"),
contacts=[(4, "@peter:wtf-eg.de"), (1, "peter@wtf-eg.de")])
peters_profile = Profile(nickname="peternichtlustig",
pronouns="Herr Dr. Dr.",
volunteerwork="Gartenverein",
availability_status=True,
availability_hours_per_week=42,
availability_text="Immer",
freetext="Ich mag Kaffee",
user=peter)
db.session.add(peters_profile)
seed_user("dirtydieter",
volunteerwork="Müll sammeln",
availability_status=True,
availability_hours_per_week=24,
availability_text="Nur Nachts!",
freetext="1001010010111!!!",
skills=[(1, 5)],
address=("Friedrich Witzig", "", "", "", "", "", ""))
matrix_contact = Contact(profile=peters_profile, contacttype_id=4, content="@peter:wtf-eg.de")
db.session.add(matrix_contact)
all_skills = [(skill.id, 3) for skill in Skill.query.all()]
seed_user("jutta", languages=[("fr", 5)], skills=all_skills)
email_contact = Contact(profile=peters_profile, contacttype_id=1, content="peter@wtf-eg.de")
db.session.add(email_contact)
seed_user("giesela", skills=[(9, 3), (10, 5)])
seed_user("bertha", visible=False, skills=[(11, 3), (10, 5)])
seed_user("monique", languages=[("fr", 4)])
peters_address = Address(name="Peter Nichtlustig",
street="Waldweg",
house_number="23i",
additional="Hinterhaus",
postcode="13337",
city="Bielefeld",
country="Deutschland",
profile=peters_profile)
db.session.add(peters_address)
db.session.commit()
peters_python_skill = ProfileSkill(profile=peters_profile, skill_id=3, level=3)
db.session.add(peters_python_skill)
peters_php_skill = ProfileSkill(profile=peters_profile, skill_id=1, level=5)
db.session.add(peters_php_skill)
peters_python_searchtopic = ProfileSearchtopic(profile=peters_profile, skill_id=3)
db.session.add(peters_python_searchtopic)
peters_php_searchtopic = ProfileSearchtopic(profile=peters_profile, skill_id=1)
db.session.add(peters_php_searchtopic)
peter_de = ProfileLanguage(profile=peters_profile, language_id="de", level=5)
db.session.add(peter_de)
peter_fr = ProfileLanguage(profile=peters_profile, language_id="fr", level=3)
db.session.add(peter_fr)
seed_user("klaus")
seed_user("dirtydieter",
visible=True,
volunteerwork="Müll sammeln",
availability_status=True,
availability_hours_per_week=24,
availability_text="Nur Nachts!",
freetext="1001010010111!!!",
skills=[(Skill.skill_id_php, 5)])
all_skills = Skill.query.all()
all_profile_skills = []
for skill in all_skills:
all_profile_skills.append((skill.id, 3))
seed_user("jutta", visible=True, languages=[("fr", 5)], skills=all_profile_skills)
seed_user("giesela", visible=True, skills=[(Skill.skill_id_mysql, 3), (Skill.skill_id_postgresql, 5)])
seed_user("bertha", visible=False, skills=[(Skill.skill_id_sqlite, 3), (Skill.skill_id_postgresql, 5)])
seed_user("monique", visible=True, languages=[("fr", 4)])
db.session.commit()

View File

@ -4,14 +4,11 @@
from flask import make_response, request
from ki.models import Profile, ProfileSkill, Skill, ProfileLanguage, Language, Address
from ki.models import Profile, ProfileSkill, Skill, ProfileLanguage, Language
def find_profiles():
try:
page = int(request.args.get("page", 1))
except ValueError:
page = 1
page = int(request.args.get("page", 1))
if page < 1:
return make_response({"messages": {"page": "Die angefragte Seite muss mindestens 1 sein"}}, 400)
@ -22,35 +19,27 @@ def find_profiles():
return make_response({"messages": {"page_size": "Die maximale Anzahl Einträge pro Seite beträgt 100"}}, 400)
query = Profile.query.distinct(Profile.id) \
.order_by(Profile.nickname) \
.filter(Profile.visible.is_(True)) \
.join(Profile.skills, isouter=True).join(ProfileSkill.skill, isouter=True) \
.join(Profile.languages, isouter=True).join(ProfileLanguage.language, isouter=True) \
.join(Address, isouter=True)
.join(Profile.languages, isouter=True).join(ProfileLanguage.language, isouter=True)
if "search" in request.args:
terms = request.args["search"].split(" ")
for term in terms:
query = query.filter(
Profile.nickname.like(f"%{term}%") | # noqa: W504
Skill.name.like(f"%{term}%") | # noqa: W504
Language.name.like(f"%{term}%") | # noqa: W504
Address.name.like(f"%{term}%"))
Profile.nickname.like(f"%{term}%") | Skill.name.like(f"%{term}%") | Language.name.like(f"%{term}%"))
if "nickname" in request.args:
nickname = request.args.get("nickname")
query = query.filter(Profile.nickname.like(f"%{nickname}%"))
paginated_result = query.paginate(page=page, per_page=page_size)
count = query.count()
offset = (page - 1) * page_size
db_profiles = query.limit(page_size).offset(offset).all()
api_profiles = []
for db_profile in paginated_result.items:
for db_profile in db_profiles:
api_profiles.append(db_profile.to_dict())
return make_response({
"total": paginated_result.total,
"pages": paginated_result.pages,
"page": paginated_result.page,
"profiles": api_profiles
})
return make_response({"total": count, "profiles": api_profiles})

View File

@ -33,7 +33,7 @@ def update_languages(profile, languages_data):
if "id" not in language_data["language"]:
continue
language = db.session.get(Language, language_data["language"]["id"])
language = Language.query.get(language_data["language"]["id"])
profile_language = ProfileLanguage.query.filter(ProfileLanguage.profile == profile,
ProfileLanguage.language == language).first()
@ -110,7 +110,7 @@ def update_contacts(profile, contacts_data):
if "id" in contact_data:
contact_id = int(contact_data["id"])
contact_ids_to_be_deleted.remove(contact_id)
contact = db.session.get(Contact, contact_id)
contact = Contact.query.get(contact_id)
else:
contact = Contact(profile=profile, contacttype=contacttype)
db.session.add(contact)
@ -122,7 +122,7 @@ def update_contacts(profile, contacts_data):
def update_profile(user_id: int):
user = db.session.get(User, user_id)
user = User.query.get(user_id)
if user is None:
return make_response({}, 404)
@ -151,11 +151,11 @@ def update_profile(user_id: int):
profile.freetext = request.json.get("freetext", "")
profile.visible = request.json.get("visible", False)
update_address(profile, request.json.get("address"))
update_contacts(profile, request.json.get("contacts", []))
update_skills(profile, request.json.get("skills", []))
update_searchtopics(profile, request.json.get("searchtopics", []))
update_languages(profile, request.json.get("languages", []))
update_address(profile, request.json.get("address", {}))
update_contacts(profile, request.json.get("contacts", {}))
update_skills(profile, request.json.get("skills", {}))
update_searchtopics(profile, request.json.get("searchtopics"))
update_languages(profile, request.json.get("languages", {}))
db.session.commit()

View File

@ -139,6 +139,13 @@ class Address(db.Model):
class Skill(db.Model):
skill_id_php = 1
skill_id_python = 3
skill_id_sqlalchemy = 7
skill_id_mysql = 9
skill_id_postgresql = 10
skill_id_sqlite = 11
__tablename__ = "skill"
id = Column(Integer, primary_key=True)

View File

@ -10,14 +10,13 @@ from ki.auth import auth
from ki.handlers import find_profiles as find_profiles_handler
from ki.handlers import update_profile as update_profile_handler
from ki.models import ContactType, Language, Skill, Token, User
from app import app, db
from app import app
content_type_svg = "image/svg+xml"
content_type_png = "image/png"
def token_auth(func):
@wraps(func)
def _token_auth(*args, **kwargs):
auth_header = request.headers.get("Authorization")
@ -66,7 +65,7 @@ def handle_completion_request(model, key):
def handle_icon_request(model, id, path):
object = db.session.get(model, id)
object = model.query.get(id)
if object is None:
return make_response({}, 404)

View File

@ -26,14 +26,13 @@ class ApiTest(unittest.TestCase):
config = migrate.get_config()
command.upgrade(config, "head")
seed(True)
max_skill = Skill.query.order_by(Skill.id.desc()).first()
self.max_skill_id = max_skill.id
seed(True)
max_skill = Skill.query.order_by(Skill.id.desc()).first()
self.max_skill_id = max_skill.id
def tearDown(self):
with app.app_context():
db.drop_all()
db.engine.dispose()
db.drop_all()
db.engine.dispose()
def login(self, username, password):
login_data = {"username": username, "password": password}

View File

@ -8,7 +8,6 @@ from ki.test.ApiTest import ApiTest
class TestContactTypesEndpoint(ApiTest):
def test_skills_options(self):
response = self.client.options("/contacttypes")
self.assertEqual(response.status_code, 200)

View File

@ -8,7 +8,6 @@ from ki.test.ApiTest import ApiTest
class TestFindProfilesEndpoint(ApiTest):
def test_find_profiles_options(self):
response = self.client.options("/users/profiles")
self.assertEqual(response.status_code, 200)
@ -21,33 +20,25 @@ class TestFindProfilesEndpoint(ApiTest):
response = self.client.get("/users/profiles?nickname=horsthorsthorst",
headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
self.assertEqual(response.json, {"total": 0, "profiles": []})
def test_find_sql_specialchars(self):
token = self.login("peter", "geheim")["token"]
response = self.client.get("/users/profiles?nickname=%22%27%25", headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
self.assertEqual(response.json, {"total": 0, "profiles": []})
def test_find_all_page1(self):
def test_find_all(self):
token = self.login("peter", "geheim")["token"]
response = self.client.get("/users/profiles", headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
self.assertDictContainsSubset({"total": 23, "page": 1, "pages": 2}, response.json)
self.assertDictContainsSubset({"nickname": "babsi1"}, response.json["profiles"][0])
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][19])
def test_find_all_page2(self):
token = self.login("peter", "geheim")["token"]
response = self.client.get("/users/profiles?page=2", headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
self.assertDictContainsSubset({"total": 23, "page": 2, "pages": 2}, response.json)
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
self.assertDictContainsSubset({"total": 4}, response.json)
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][0])
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][2])
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][2])
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][3])
def test_find_dieter(self):
token = self.login("peter", "geheim")["token"]
@ -71,8 +62,8 @@ class TestFindProfilesEndpoint(ApiTest):
response = self.client.get("/users/profiles?search=sql", headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
self.assertDictContainsSubset({"total": 2}, response.json)
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
def test_find_postgres(self):
token = self.login("peter", "geheim")["token"]
@ -80,8 +71,8 @@ class TestFindProfilesEndpoint(ApiTest):
response = self.client.get("/users/profiles?search=post", headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
self.assertDictContainsSubset({"total": 2}, response.json)
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
def test_find_php_franzosen(self):
token = self.login("peter", "geheim")["token"]
@ -100,14 +91,6 @@ class TestFindProfilesEndpoint(ApiTest):
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][1])
def test_find_dieter_by_name(self):
token = self.login("peter", "geheim")["token"]
response = self.client.get("/users/profiles?search=friedrich", headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
self.assertDictContainsSubset({"total": 1}, response.json)
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][0])
if __name__ == "main":
unittest.main()

View File

@ -8,7 +8,6 @@ from ki.test.ApiTest import ApiTest
class TestLanguagesEndpoint(ApiTest):
def test_skills_options(self):
response = self.client.options("/languages")
self.assertEqual(response.status_code, 200)
@ -33,7 +32,6 @@ class TestLanguagesEndpoint(ApiTest):
self.assertEqual(response.status_code, 200)
self.assertIn("Content-Type", response.headers)
self.assertEqual(response.headers["Content-Type"], "image/svg+xml; charset=utf-8")
response.close()
if __name__ == "main":

View File

@ -10,7 +10,6 @@ from ki.test.ApiTest import ApiTest
class TestLoginEndpoint(ApiTest):
def test_login(self):
response1_data = self.login("peter", "geheim")
response2_data = self.login("peter", "geheim")

View File

@ -20,12 +20,10 @@ class TestProfileEndpoint(ApiTest):
self.assertEqual(login_response.status_code, 200)
self.assertIn("token", login_response.json)
with app.app_context():
babsi = User.query.filter(User.auth_id == "babsi1").first()
response = self.client.post(f"/users/{babsi.id}/profile",
data=json.dumps({}),
content_type="application/json",
headers={"Authorization": "Bearer " + login_response.json["token"]})
response = self.client.post("/users/1/profile",
data=json.dumps({}),
content_type="application/json",
headers={"Authorization": "Bearer " + login_response.json["token"]})
self.assertEqual(response.status_code, 403)
@ -104,16 +102,14 @@ class TestProfileEndpoint(ApiTest):
"level": 2
}]
}
with app.app_context():
peter = User.query.filter(User.auth_id == "peter").first()
response = self.client.post(f"/users/{peter.id}/profile",
data=json.dumps(data),
content_type="application/json",
headers={"Authorization": "Bearer " + token})
response = self.client.post("/users/1/profile",
data=json.dumps(data),
content_type="application/json",
headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
with app.app_context():
user = User.query.filter(User.id == peter.id).first()
user = User.query.filter(User.id == 1).first()
profile = user.profile
self.assertEqual("Hebbert", profile.nickname)
self.assertEqual("Monsieur", profile.pronouns)
@ -187,9 +183,7 @@ class TestProfileEndpoint(ApiTest):
def test_get_visible_proifle(self):
token = self.login("peter", "geheim")["token"]
with app.app_context():
babsi = User.query.filter(User.auth_id == "babsi1").first()
response = self.client.get(f"/users/{babsi.id}/profile", headers={"Authorization": f"Bearer {token}"})
response = self.client.get("/users/3/profile", headers={"Authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 200)
@ -200,16 +194,14 @@ class TestProfileEndpoint(ApiTest):
self.assertEqual(login_response.status_code, 200)
self.assertIn("token", login_response.json)
with app.app_context():
peter = User.query.filter(User.auth_id == "peter").first()
response = self.client.get(f"/users/{peter.id}/profile",
headers={"Authorization": "Bearer " + login_response.json["token"]})
profile_id = peter.profile.id
response = self.client.get("/users/1/profile",
headers={"Authorization": "Bearer " + login_response.json["token"]})
self.assertEqual(response.status_code, 200)
self.assertDictEqual(
response.json, {
"profile": {
"user_id": peter.id,
"user_id": 1,
"nickname": "peternichtlustig",
"pronouns": "Herr Dr. Dr.",
"availability_status": True,
@ -226,12 +218,12 @@ class TestProfileEndpoint(ApiTest):
"id": 1,
"name": "Peter Nichtlustig",
"postcode": "13337",
"profile_id": profile_id,
"profile_id": 1,
"street": "Waldweg"
},
"contacts": [{
"id": 1,
"profile_id": profile_id,
"profile_id": 1,
"contacttype": {
"id": 4,
"name": "Matrix"
@ -239,7 +231,7 @@ class TestProfileEndpoint(ApiTest):
"content": "@peter:wtf-eg.de"
}, {
"id": 2,
"profile_id": profile_id,
"profile_id": 1,
"contacttype": {
"id": 1,
"name": "E-Mail"
@ -247,7 +239,7 @@ class TestProfileEndpoint(ApiTest):
"content": "peter@wtf-eg.de"
}],
"skills": [{
"profile_id": profile_id,
"profile_id": 1,
"skill": {
"id": 1,
"name": "PHP",
@ -255,7 +247,7 @@ class TestProfileEndpoint(ApiTest):
},
"level": 5
}, {
"profile_id": profile_id,
"profile_id": 1,
"skill": {
"id": 3,
"name": "Python",
@ -264,14 +256,14 @@ class TestProfileEndpoint(ApiTest):
"level": 3
}],
"searchtopics": [{
"profile_id": profile_id,
"profile_id": 1,
"skill": {
"id": 1,
"name": "PHP",
"icon_url": "/skills/1/icon"
}
}, {
"profile_id": profile_id,
"profile_id": 1,
"skill": {
"id": 3,
"name": "Python",
@ -279,7 +271,7 @@ class TestProfileEndpoint(ApiTest):
}
}],
"languages": [{
"profile_id": profile_id,
"profile_id": 1,
"language": {
"id": "de",
"name": "Deutsch",
@ -287,7 +279,7 @@ class TestProfileEndpoint(ApiTest):
},
"level": 5
}, {
"profile_id": profile_id,
"profile_id": 1,
"language": {
"id": "fr",
"name": "Französisch",

View File

@ -8,7 +8,6 @@ from ki.test.ApiTest import ApiTest
class TestSkillsEndpoint(ApiTest):
def test_skills_options(self):
response = self.client.options("/skills")
self.assertEqual(response.status_code, 200)
@ -41,14 +40,12 @@ class TestSkillsEndpoint(ApiTest):
self.assertEqual(response.status_code, 200)
self.assertIn("Content-Type", response.headers)
self.assertEqual(response.headers["Content-Type"], "image/svg+xml; charset=utf-8")
response.close()
def test_get_fallback_skill_icon(self):
response = self.client.get("/skills/2/icon")
self.assertEqual(response.status_code, 200)
self.assertIn("Content-Type", response.headers)
self.assertEqual(response.headers["Content-Type"], "image/svg+xml; charset=utf-8")
response.close()
if __name__ == "main":

50
migrations/alembic.ini Normal file
View File

@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -11,6 +11,9 @@ from alembic import context
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
@ -19,7 +22,7 @@ logger = logging.getLogger('alembic.env')
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.engine.url).replace(
str(current_app.extensions['migrate'].db.get_engine().url).replace(
'%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
@ -68,7 +71,7 @@ def run_migrations_online():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = current_app.extensions['migrate'].db.engine
connectable = current_app.extensions['migrate'].db.get_engine()
with connectable.connect() as connection:
context.configure(

View File

@ -1,18 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:best-practices",
":disableDependencyDashboard",
":maintainLockFilesMonthly",
":pinVersions",
":separateMultipleMajorReleases"
],
"packageRules": [
{
"matchDepNames": ["python"],
"groupName": "Python",
"separateMinorPatch": true,
"separateMultipleMinor": true
}
]
}

View File

@ -11,4 +11,4 @@ with app.app_context():
config = migrate.get_config()
command.upgrade(config, "head")
serve(app, host="0.0.0.0", port=5000, threads=20)
serve(app, host="0.0.0.0", port=5000)