Compare commits
13 Commits
fix/flags
...
fix/code-s
Author | SHA1 | Date | |
---|---|---|---|
d60acd169b
|
|||
f1ecbadf05 | |||
67cb8c9152 | |||
f7e058d387
|
|||
695c88e159
|
|||
1360b4c738
|
|||
689a5ba33e
|
|||
19aebcc327
|
|||
0fcd407006 | |||
dea781cc29 | |||
be9bc8b5cc
|
|||
4fab7d7cda | |||
f131ee335c |
26
README.md
26
README.md
@ -9,6 +9,32 @@ SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
[](https://drone.wtf-eg.de/kompetenzinventar/ki-backend)
|
||||
[](https://api.reuse.software/info/git.wtf-eg.de/kompetenzinventar/ki-backend)
|
||||
|
||||
## Über
|
||||
|
||||
Dieses Repo enthält das Backend des Projekts Kompentenzinventar - einer Webapplikation zur Erfassung von Userprofilen für die WTF eG.
|
||||
|
||||
Implementiert ist das Backend mit Flask.
|
||||
|
||||
### Mitmachen
|
||||
|
||||
Du kannst gerne bei der Entwicklung des Kompetenzinventars mitmachen.
|
||||
|
||||
- Fehler oder fehlende Funktionen erfassen. Bitte direkt über die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) in Gitea.
|
||||
- Dokumentation oder Implementierung verbessern. Bitte forke hierzu das Projekt, branche von `main` ab und erstelle dann einen [Pull Request](https://git.wtf-eg.de/kompetenzinventar/ki-backend/pulls).
|
||||
|
||||
### Kommunikation
|
||||
|
||||
Folgende Kanäle gibt es für die Kommunikation über das Kompetenzinventar:
|
||||
|
||||
- Die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) im WTF Gitea.
|
||||
- Den Bereich [AG Entwicklung](https://forum.wtf-eg.de/c/interna/ag-entwicklung/21) im WTF Forum.
|
||||
- Einen Raum in Matrix. Zutritt per Einladung, frlan lädt ein, eine einfache PN im Forum reicht.
|
||||
|
||||
### Repos
|
||||
|
||||
* **[ki-backend](https://git.wtf-eg.de/kompetenzinventar/ki-backend)** (dieses Repo) enthält das Backend
|
||||
* [ki-frontend](https://git.wtf-eg.de/kompetenzinventar/ki-frontend) enthält das Frontend
|
||||
* Weitere Repositories befinden sich in der Gitea Organisation [Kompetenzinventar](https://git.wtf-eg.de/kompetenzinventar).
|
||||
## Entwicklung
|
||||
|
||||
### Abhängigkeiten
|
||||
|
15
app.py
15
app.py
@ -8,22 +8,23 @@ import os
|
||||
from dotenv import load_dotenv, find_dotenv
|
||||
from flask import Flask
|
||||
from flask_cors import CORS
|
||||
from flask.logging import default_handler
|
||||
from flask_migrate import Migrate
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from ldap3.utils.log import logger as ldap3_logger
|
||||
from ldap3.utils.log import set_library_log_detail_level, BASIC
|
||||
|
||||
load_dotenv(find_dotenv())
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# Configure logging
|
||||
loglevel = os.getenv("KI_LOGLEVEL", logging.WARNING)
|
||||
loglevel = int(loglevel)
|
||||
app.logger.setLevel(loglevel)
|
||||
logging.basicConfig(level=loglevel)
|
||||
|
||||
set_library_log_detail_level(BASIC)
|
||||
ldap3_logger.addHandler(default_handler)
|
||||
app.logger.propagate = False # do not forward messages to the root logger
|
||||
logging.basicConfig(level=loglevel,
|
||||
format='[%(asctime)s] %(levelname)s [%(name)s] %(message)s') # configure root logger as fallback
|
||||
logging.getLogger('werkzeug').propagate = False # werkzeug has its own ColorStreamHandler
|
||||
set_library_log_detail_level(BASIC) # ldap3 has different verbosity levels internally
|
||||
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("SQLALCHEMY_DATABASE_URI")
|
||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||
@ -40,6 +41,6 @@ CORS(app)
|
||||
db = SQLAlchemy(app)
|
||||
migrate = Migrate(app, db, compare_type=True)
|
||||
|
||||
logging.debug("Hello from KI")
|
||||
app.logger.info("Hello from KI")
|
||||
|
||||
from ki import module # noqa
|
||||
|
@ -3,7 +3,6 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import csv
|
||||
import logging
|
||||
|
||||
from app import app, db
|
||||
from ki.models import Address, Contact, ContactType, Language, Skill, Profile, ProfileLanguage, ProfileSearchtopic, \
|
||||
@ -13,7 +12,7 @@ from ki.models import Address, Contact, ContactType, Language, Skill, Profile, P
|
||||
def seed_contacttypes():
|
||||
contacttypes_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/contacttypes.csv"
|
||||
|
||||
logging.info("importing contacttypes")
|
||||
app.logger.info("importing contacttypes")
|
||||
|
||||
with open(contacttypes_seed_file_path) as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
@ -143,6 +142,8 @@ def seed(dev: bool):
|
||||
db.session.add(peter_fr)
|
||||
|
||||
seed_user("klaus")
|
||||
for i in range(1, 20):
|
||||
seed_user(f"babsi{i}", visible=True)
|
||||
|
||||
seed_user("dirtydieter",
|
||||
visible=True,
|
||||
|
@ -8,7 +8,10 @@ from ki.models import Profile, ProfileSkill, Skill, ProfileLanguage, Language
|
||||
|
||||
|
||||
def find_profiles():
|
||||
page = int(request.args.get("page", 1))
|
||||
try:
|
||||
page = int(request.args.get("page", 1))
|
||||
except ValueError:
|
||||
page = 1
|
||||
|
||||
if page < 1:
|
||||
return make_response({"messages": {"page": "Die angefragte Seite muss mindestens 1 sein"}}, 400)
|
||||
@ -19,6 +22,7 @@ def find_profiles():
|
||||
return make_response({"messages": {"page_size": "Die maximale Anzahl Einträge pro Seite beträgt 100"}}, 400)
|
||||
|
||||
query = Profile.query.distinct(Profile.id) \
|
||||
.order_by(Profile.nickname) \
|
||||
.filter(Profile.visible.is_(True)) \
|
||||
.join(Profile.skills, isouter=True).join(ProfileSkill.skill, isouter=True) \
|
||||
.join(Profile.languages, isouter=True).join(ProfileLanguage.language, isouter=True)
|
||||
@ -33,13 +37,15 @@ def find_profiles():
|
||||
nickname = request.args.get("nickname")
|
||||
query = query.filter(Profile.nickname.like(f"%{nickname}%"))
|
||||
|
||||
count = query.count()
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
db_profiles = query.limit(page_size).offset(offset).all()
|
||||
paginated_result = query.paginate(page=page, per_page=page_size)
|
||||
api_profiles = []
|
||||
|
||||
for db_profile in db_profiles:
|
||||
for db_profile in paginated_result.items:
|
||||
api_profiles.append(db_profile.to_dict())
|
||||
|
||||
return make_response({"total": count, "profiles": api_profiles})
|
||||
return make_response({
|
||||
"total": paginated_result.total,
|
||||
"pages": paginated_result.pages,
|
||||
"page": paginated_result.page,
|
||||
"profiles": api_profiles
|
||||
})
|
||||
|
@ -20,25 +20,33 @@ class TestFindProfilesEndpoint(ApiTest):
|
||||
response = self.client.get("/users/profiles?nickname=horsthorsthorst",
|
||||
headers={"Authorization": "Bearer " + token})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json, {"total": 0, "profiles": []})
|
||||
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
|
||||
|
||||
def test_find_sql_specialchars(self):
|
||||
token = self.login("peter", "geheim")["token"]
|
||||
|
||||
response = self.client.get("/users/profiles?nickname=%22%27%25", headers={"Authorization": "Bearer " + token})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.json, {"total": 0, "profiles": []})
|
||||
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
|
||||
|
||||
def test_find_all(self):
|
||||
def test_find_all_page1(self):
|
||||
token = self.login("peter", "geheim")["token"]
|
||||
|
||||
response = self.client.get("/users/profiles", headers={"Authorization": "Bearer " + token})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictContainsSubset({"total": 4}, response.json)
|
||||
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][0])
|
||||
self.assertDictContainsSubset({"total": 23, "page": 1, "pages": 2}, response.json)
|
||||
self.assertDictContainsSubset({"nickname": "babsi1"}, response.json["profiles"][0])
|
||||
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][19])
|
||||
|
||||
def test_find_all_page2(self):
|
||||
token = self.login("peter", "geheim")["token"]
|
||||
|
||||
response = self.client.get("/users/profiles?page=2", headers={"Authorization": "Bearer " + token})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictContainsSubset({"total": 23, "page": 2, "pages": 2}, response.json)
|
||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][2])
|
||||
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][3])
|
||||
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][2])
|
||||
|
||||
def test_find_dieter(self):
|
||||
token = self.login("peter", "geheim")["token"]
|
||||
@ -62,8 +70,8 @@ class TestFindProfilesEndpoint(ApiTest):
|
||||
response = self.client.get("/users/profiles?search=sql", headers={"Authorization": "Bearer " + token})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictContainsSubset({"total": 2}, response.json)
|
||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
|
||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||
|
||||
def test_find_postgres(self):
|
||||
token = self.login("peter", "geheim")["token"]
|
||||
@ -71,8 +79,8 @@ class TestFindProfilesEndpoint(ApiTest):
|
||||
response = self.client.get("/users/profiles?search=post", headers={"Authorization": "Bearer " + token})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertDictContainsSubset({"total": 2}, response.json)
|
||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
|
||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||
|
||||
def test_find_php_franzosen(self):
|
||||
token = self.login("peter", "geheim")["token"]
|
||||
|
@ -1,50 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic,flask_migrate
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[logger_flask_migrate]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = flask_migrate
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
@ -11,9 +11,6 @@ from alembic import context
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# add your model's MetaData object here
|
||||
|
Reference in New Issue
Block a user