Compare commits

..

49 Commits

Author SHA1 Message Date
03f85ff998 Update dependency pre-commit to v4
All checks were successful
continuous-integration/drone/pr Build is passing
2025-03-18 21:37:25 +00:00
3d271cdf7b Merge pull request 'Update python Docker tag to v3.9.21' (!106) from renovate/minor-3.9-python into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #106
2025-01-08 15:05:46 +01:00
f4c54526d1 Update to Python 3.9
All checks were successful
continuous-integration/drone/pr Build is passing
2025-01-08 14:46:46 +01:00
e9236f7e19 Update python Docker tag to v3.9.21 2025-01-08 14:43:49 +01:00
f04120fdc0 Merge pull request 'Update plugins/docker Docker tag to v20.18.6' (!132) from renovate/plugins-docker-20.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #132
2025-01-08 14:25:15 +01:00
1f87e2d96e Update plugins/docker Docker tag to v20.18.6
All checks were successful
continuous-integration/drone/pr Build is passing
2025-01-07 21:36:26 +00:00
c5a38dca81 Merge pull request 'Update dependency sqlalchemy to v2.0.36' (!128) from renovate/sqlalchemy-2.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #128
2025-01-07 21:43:34 +01:00
6424af8b3f Update dependency sqlalchemy to v2.0.36
All checks were successful
continuous-integration/drone/pr Build is passing
2025-01-07 20:37:20 +00:00
184b1c33eb Merge pull request 'Update dependency flask-sqlalchemy to v3' (!112) from renovate/flask-sqlalchemy-3.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #112
2025-01-07 21:25:08 +01:00
5f27eaa9a1 Set the SQLALCHEMY_DATABASE_URI env differently
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-13 15:49:24 +02:00
862bd9f7a3 Explicitly set SQLALCHEMY_DATABASE_URI for tests
Some checks failed
continuous-integration/drone/pr Build is failing
2024-09-13 15:43:09 +02:00
8478e8d76e Fix test errors
Some checks failed
continuous-integration/drone/pr Build is failing
2024-09-13 15:38:56 +02:00
b0652add43 Replaced deprecated method
see https://flask-sqlalchemy.palletsprojects.com/en/3.1.x/api/#flask_sqlalchemy.SQLAlchemy.get_engine
2024-09-13 15:25:04 +02:00
60c906cb5c Fix 'unclosed file' warnings in tests
see https://github.com/pallets/flask/issues/2468#issuecomment-517797518
2024-09-13 14:11:19 +02:00
2ff958c55f Rewrite sqlachemy code for 1.4 to 2.x migration 2024-09-13 13:43:00 +02:00
146eb995a8 Update SQLAlchemy to v2 2024-09-13 13:42:12 +02:00
33187428cf Update dependency flask-sqlalchemy to v3
Some checks failed
renovate/artifacts Artifact file update failure
continuous-integration/drone/pr Build is failing
2024-09-13 11:36:00 +00:00
06caf796cd Merge pull request 'Update dependency flask-migrate to v4' (!111) from renovate/flask-migrate-4.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #111
2024-09-13 13:13:42 +02:00
54326129e4 Merge pull request 'Update python:3.8.20-alpine Docker digest to 3d93b1f' (!127) from renovate/python into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #127
2024-09-13 12:48:09 +02:00
32bb81b69b Update dependency flask-migrate to v4
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-13 10:36:05 +00:00
fc8a06e562 Update python:3.8.20-alpine Docker digest to 3d93b1f
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-13 10:35:25 +00:00
b8b713c620 Merge pull request 'Update dependency reuse to v4' (!126) from renovate/reuse-4.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #126
2024-09-13 12:33:44 +02:00
186ab9af43 Migrate REUSE config
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-13 12:30:48 +02:00
0d77c8d637 Update dependency reuse to v4
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-11 11:37:14 +00:00
72fda5df51 Merge pull request 'Update dependency reuse to v2' (!124) from renovate/reuse-2.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #124
2024-09-11 13:30:28 +02:00
739ce0382b Update dependency reuse to v2
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-11 10:37:32 +00:00
4c345ecc5f Merge pull request 'Update dependency sqlalchemy to v1.4.54' (!119) from renovate/sqlalchemy-1.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #119
2024-09-11 12:00:35 +02:00
5882eeaf2e Update dependency sqlalchemy to v1.4.54
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-10 18:35:55 +00:00
0512bc0dc3 Merge pull request 'Update dependency flask-migrate to v3.1.0' (!88) from renovate/flask-migrate-3.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #88
2024-09-10 19:42:03 +02:00
319bf9b2f5 Update dependency flask-migrate to v3.1.0
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-10 17:36:22 +00:00
717454e205 Merge pull request 'Update python Docker tag to v3.8.20' (!121) from renovate/patch-python into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #121
2024-09-10 15:49:46 +02:00
496550a959 Update python Docker tag to v3.8.20
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-10 13:35:28 +00:00
7ebf13edf3 Merge pull request 'Update python:3.8.19-alpine Docker digest to 8b53bd2' (!118) from renovate/python into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #118
2024-09-10 15:03:37 +02:00
5409bdeaee Merge pull request 'Verify lockfile before installation' (!120) from verify-lockfile into main
Some checks failed
continuous-integration/drone/push Build is failing
Reviewed-on: #120
2024-09-10 15:03:14 +02:00
4064a65e63 Verify lockfile before installation
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-10 14:52:44 +02:00
afe22e13c1 Merge pull request 'Update dependency python-dotenv to v1' (!116) from renovate/python-dotenv-1.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #116
2024-09-10 14:30:41 +02:00
e4d0a89053 Update lock file
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-10 14:27:54 +02:00
78bdd796e0 Update dependency python-dotenv to v1
Some checks failed
renovate/artifacts Artifact file update failure
continuous-integration/drone/pr Build is passing
2024-09-10 11:36:39 +00:00
c9f83e80b3 Update python:3.8.19-alpine Docker digest to 8b53bd2
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-10 11:35:27 +00:00
5369de67d8 Merge pull request 'Update dependency pre-commit to v2.21.0' (!89) from renovate/pre-commit-2.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #89
2024-09-10 13:07:34 +02:00
54c5246f0a Merge pull request 'Add dry-run Docker build to Pull Request CI run' (!117) from docker-dry-run into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #117
2024-09-10 13:02:19 +02:00
9a95baf106 Add dry-run Docker build to Pull Request CI run
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-10 12:56:04 +02:00
338380ed93 Update dependency pre-commit to v2.21.0
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-05 09:36:44 +00:00
3d128d771c Merge pull request 'Update dependency flask-cors to v5' (!114) from renovate/flask-cors-5.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #114
2024-09-05 11:26:34 +02:00
6b7517fc7a Update dependency flask-cors to v5
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-04 21:36:04 +00:00
5ceb361da3 Merge pull request 'Update python:3.8.19-alpine Docker digest to c2f31d1' (!113) from renovate/python into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #113
2024-09-04 23:03:32 +02:00
4379a06f26 Update python:3.8.19-alpine Docker digest to c2f31d1
All checks were successful
continuous-integration/drone/pr Build is passing
2024-09-04 19:36:16 +00:00
c43f375e4b Merge pull request 'Update dependency flake8 to v7' (!108) from renovate/flake8-7.x into main
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: #108
2024-09-04 20:44:01 +02:00
621a90b8ab Update dependency flake8 to v7
All checks were successful
continuous-integration/drone/pr Build is passing
2024-08-28 14:36:12 +00:00
23 changed files with 676 additions and 738 deletions

View File

@ -15,17 +15,28 @@ trigger:
steps:
- name: install-lint-test
image: python:3.8.19-alpine@sha256:3bd7ea88cb637e09d6c7de24c5394657163a85c2be82bfebe0305cf07f8de1ea
image: python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c
env:
PYROOT: '/pyroot'
PYTHONUSERBASE: '/pyroot'
commands:
- apk add --no-cache gcc g++ musl-dev python3-dev
- pip3 install pipenv
- pipenv verify
- pipenv install --dev
- pipenv run flake8
- pipenv run reuse lint
- pipenv run python -m unittest discover ki
- SQLALCHEMY_DATABASE_URI=sqlite:// pipenv run python -m unittest discover ki
- name: docker-dry-run
image: plugins/docker:20.18.6@sha256:59c993e3c4e6c097a0e2d274419aac0d7d8e929773f0ba1af44078e54389834f
settings:
registry: git.wtf-eg.de
repo: git.wtf-eg.de/kompetenzinventar/backend
target: ki-backend
dry_run: true
when:
event:
- pull_request
---
kind: pipeline
@ -43,7 +54,7 @@ depends_on:
steps:
- name: docker-publish
image: plugins/docker:20.18.4@sha256:a8d3d86853c721492213264815f1d00d3ed13f42f5c1855a02f47fa4d5f1e042
image: plugins/docker:20.18.6@sha256:59c993e3c4e6c097a0e2d274419aac0d7d8e929773f0ba1af44078e54389834f
settings:
registry: git.wtf-eg.de
repo: git.wtf-eg.de/kompetenzinventar/backend
@ -93,7 +104,7 @@ trigger:
steps:
- name: install-lint-test
image: python:3.8.19-alpine@sha256:3bd7ea88cb637e09d6c7de24c5394657163a85c2be82bfebe0305cf07f8de1ea
image: python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c
env:
PYROOT: '/pyroot'
PYTHONUSERBASE: '/pyroot'
@ -103,9 +114,9 @@ steps:
- pipenv install --dev
- pipenv run flake8
- pipenv run reuse lint
- pipenv run python -m unittest discover ki
- SQLALCHEMY_DATABASE_URI=sqlite:// pipenv run python -m unittest discover ki
- name: docker-publish
image: plugins/docker:20.18.4@sha256:a8d3d86853c721492213264815f1d00d3ed13f42f5c1855a02f47fa4d5f1e042
image: plugins/docker:20.18.6@sha256:59c993e3c4e6c097a0e2d274419aac0d7d8e929773f0ba1af44078e54389834f
settings:
registry: git.wtf-eg.de
repo: git.wtf-eg.de/kompetenzinventar/backend

View File

@ -27,5 +27,5 @@ repos:
name: reuse
entry: reuse lint
language: system
exclude: ^(venv).*$
exclude: .*
always_run: true

View File

@ -1 +1 @@
3.8.19
3.9.21

View File

@ -1,16 +0,0 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: Kompetenzinventar
Upstream-Contact: Michael Weimann <mail@michael-weimann.eu>
Source: https://git.wtf-eg.de/kompetenzinventar/ki-backend
Files: data/imgs/flags/*
Copyright: 2013 Panayiotis Lipiridis <https://flagicons.lipis.dev/>
License: MIT
Files: Pipfile.lock migrations/*
Copyright: WTF Kooperative eG <https://wtf-eg.de/>
License: AGPL-3.0-or-later
Files: renovate.json .python-version
Copyright: WTF Kooperative eG <https://wtf-eg.de/>
License: AGPL-3.0-or-later

View File

@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: AGPL-3.0-or-later
FROM python:3.8.19-alpine@sha256:3bd7ea88cb637e09d6c7de24c5394657163a85c2be82bfebe0305cf07f8de1ea AS builder
FROM python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c AS builder
ENV PYROOT=/pyroot
ENV PYTHONUSERBASE=$PYROOT
@ -20,7 +20,7 @@ RUN PIP_USER=1 PIP_IGNORE_INSTALLED=1 pipenv install --system --deploy --ignore-
RUN pip3 uninstall --yes pipenv
FROM python:3.8.19-alpine@sha256:3bd7ea88cb637e09d6c7de24c5394657163a85c2be82bfebe0305cf07f8de1ea AS ki-backend
FROM python:3.9.21-alpine@sha256:f2f6a5627a879693b8c23e04df0b1a6aae3e09c165fa2a08f5c64b2b54c58d3c AS ki-backend
ENV PYROOT=/pyroot
ENV PYTHONUSERBASE=$PYROOT

18
Pipfile
View File

@ -9,25 +9,25 @@ name = "pypi"
[packages]
flask = "==2.3.3"
python-dotenv = "==0.21.1"
flask-migrate = "==3.0.1"
flask-sqlalchemy = "==2.5.1"
sqlalchemy = "==1.4.53"
python-dotenv = "==1.0.1"
flask-migrate = "==4.0.7"
flask-sqlalchemy = "==3.1.1"
sqlalchemy = "==2.0.36"
waitress = "==2.1.2"
pyyaml = "==6.0.2"
flask-cors = "==3.0.10"
flask-cors = "==5.0.0"
ldap3 = "==2.9.1"
pymysql = "==1.1.1"
werkzeug = "==2.3.8"
[dev-packages]
flake8 = "==6.1.0"
flake8 = "==7.1.1"
yapf = "==0.40.2"
pre-commit = "==2.13.0"
reuse = "==0.14.0"
pre-commit = "==4.2.0"
reuse = "==4.0.3"
[requires]
python_version = "3.8"
python_version = "3.9"
[scripts]
clean = "rm data/ki.sqlite"

990
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -39,7 +39,7 @@ Folgende Kanäle gibt es für die Kommunikation über das Kompetenzinventar:
### Abhängigkeiten
- Python 3.8
- Python 3.9
- [Pipenv](https://github.com/pypa/pipenv)

24
REUSE.toml Normal file
View File

@ -0,0 +1,24 @@
# SPDX-FileCopyrightText: NONE
# SPDX-License-Identifier: CC0-1.0
version = 1
SPDX-PackageName = "Kompetenzinventar Backend"
SPDX-PackageDownloadLocation = "https://git.wtf-eg.de/kompetenzinventar/ki-backend"
[[annotations]]
path = "data/imgs/flags/**"
precedence = "aggregate"
SPDX-FileCopyrightText = "2013 Panayiotis Lipiridis <https://flagicons.lipis.dev/>"
SPDX-License-Identifier = "MIT"
[[annotations]]
path = ["Pipfile.lock", "migrations/**"]
precedence = "aggregate"
SPDX-FileCopyrightText = "WTF Kooperative eG <https://wtf-eg.de/>"
SPDX-License-Identifier = "AGPL-3.0-or-later"
[[annotations]]
path = ["renovate.json", ".python-version"]
precedence = "aggregate"
SPDX-FileCopyrightText = "WTF Kooperative eG <https://wtf-eg.de/>"
SPDX-License-Identifier = "AGPL-3.0-or-later"

7
app.py
View File

@ -38,12 +38,9 @@ app.config["KI_LDAP_AUTH_PASSWORD"] = os.getenv("KI_LDAP_AUTH_PASSWORD")
app.config["KI_LDAP_BASE_DN"] = os.getenv("KI_LDAP_BASE_DN")
CORS(app)
db = SQLAlchemy(app)
db = SQLAlchemy(app, session_options={"future": True})
migrate = Migrate(app, db, compare_type=True)
app.logger.info("Hello from KI")
from ki import module # noqa
from ki import resume
app.register_blueprint(resume.bp_resume, url_prefix='/resume')

View File

@ -19,7 +19,7 @@ def seed_contacttypes():
for contacttype in csv_reader:
id = int(contacttype["id"])
db_contacttype = ContactType.query.get(id)
db_contacttype = db.session.get(ContactType, id)
if db_contacttype is None:
db.session.add(ContactType(id=int(contacttype["id"]), name=contacttype["name"]))
@ -88,71 +88,73 @@ def seed_user(auth_id,
def seed(dev: bool):
seed_contacttypes()
with app.app_context():
seed_contacttypes()
skill_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/skills.csv"
skill_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/skills.csv"
app.logger.info("importing skills")
app.logger.info("importing skills")
with open(skill_seed_file_path) as skills_file:
skills_csv_reader = csv.DictReader(skills_file)
with open(skill_seed_file_path) as skills_file:
skills_csv_reader = csv.DictReader(skills_file)
for skill in skills_csv_reader:
id = int(skill["id"])
db_skill = Skill.query.get(id)
for skill in skills_csv_reader:
id = int(skill["id"])
db_skill = db.session.get(Skill, id)
if db_skill is None:
db.session.add(Skill(id=int(skill["id"]), name=skill["name"]))
if db_skill is None:
db.session.add(Skill(id=int(skill["id"]), name=skill["name"]))
app.logger.info("importing languages")
app.logger.info("importing languages")
iso_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/iso_639_1.csv"
iso_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/iso_639_1.csv"
with open(iso_seed_file_path) as iso_file:
iso_csv_reader = csv.DictReader(iso_file)
with open(iso_seed_file_path) as iso_file:
iso_csv_reader = csv.DictReader(iso_file)
for iso in iso_csv_reader:
id = iso["639-1"]
db_language = Language.query.get(id)
for iso in iso_csv_reader:
id = iso["639-1"]
db_language = db.session.get(Language, id)
if db_language is None:
db.session.add(Language(id=iso["639-1"], name=iso["Sprache"]))
if db_language is None:
db.session.add(Language(id=iso["639-1"], name=iso["Sprache"]))
if dev:
seed_user("klaus", visible=False)
if dev:
seed_user("klaus", visible=False)
for i in range(1, 20):
seed_user(f"babsi{i}")
for i in range(1, 20):
seed_user(f"babsi{i}")
seed_user("peter",
nickname="peternichtlustig",
visible=False,
pronouns="Herr Dr. Dr.",
volunteerwork="Gartenverein",
availability_status=True,
availability_hours_per_week=42,
availability_text="Immer",
freetext="Ich mag Kaffee",
skills=[(3, 3), (1, 5)],
searchtopics=[3, 1],
languages=[("de", 5), ("fr", 3)],
address=("Peter Nichtlustig", "Waldweg", "23i", "Hinterhaus", "13337", "Bielefeld", "Deutschland"),
contacts=[(4, "@peter:wtf-eg.de"), (1, "peter@wtf-eg.de")])
seed_user("peter",
nickname="peternichtlustig",
visible=False,
pronouns="Herr Dr. Dr.",
volunteerwork="Gartenverein",
availability_status=True,
availability_hours_per_week=42,
availability_text="Immer",
freetext="Ich mag Kaffee",
skills=[(3, 3), (1, 5)],
searchtopics=[3, 1],
languages=[("de", 5), ("fr", 3)],
address=("Peter Nichtlustig", "Waldweg", "23i", "Hinterhaus", "13337", "Bielefeld",
"Deutschland"),
contacts=[(4, "@peter:wtf-eg.de"), (1, "peter@wtf-eg.de")])
seed_user("dirtydieter",
volunteerwork="Müll sammeln",
availability_status=True,
availability_hours_per_week=24,
availability_text="Nur Nachts!",
freetext="1001010010111!!!",
skills=[(1, 5)],
address=("Friedrich Witzig", "", "", "", "", "", ""))
seed_user("dirtydieter",
volunteerwork="Müll sammeln",
availability_status=True,
availability_hours_per_week=24,
availability_text="Nur Nachts!",
freetext="1001010010111!!!",
skills=[(1, 5)],
address=("Friedrich Witzig", "", "", "", "", "", ""))
all_skills = [(skill.id, 3) for skill in Skill.query.all()]
seed_user("jutta", languages=[("fr", 5)], skills=all_skills)
all_skills = [(skill.id, 3) for skill in Skill.query.all()]
seed_user("jutta", languages=[("fr", 5)], skills=all_skills)
seed_user("giesela", skills=[(9, 3), (10, 5)])
seed_user("bertha", visible=False, skills=[(11, 3), (10, 5)])
seed_user("monique", languages=[("fr", 4)])
seed_user("giesela", skills=[(9, 3), (10, 5)])
seed_user("bertha", visible=False, skills=[(11, 3), (10, 5)])
seed_user("monique", languages=[("fr", 4)])
db.session.commit()
db.session.commit()

View File

@ -33,7 +33,7 @@ def update_languages(profile, languages_data):
if "id" not in language_data["language"]:
continue
language = Language.query.get(language_data["language"]["id"])
language = db.session.get(Language, language_data["language"]["id"])
profile_language = ProfileLanguage.query.filter(ProfileLanguage.profile == profile,
ProfileLanguage.language == language).first()
@ -110,7 +110,7 @@ def update_contacts(profile, contacts_data):
if "id" in contact_data:
contact_id = int(contact_data["id"])
contact_ids_to_be_deleted.remove(contact_id)
contact = Contact.query.get(contact_id)
contact = db.session.get(Contact, contact_id)
else:
contact = Contact(profile=profile, contacttype=contacttype)
db.session.add(contact)
@ -122,7 +122,7 @@ def update_contacts(profile, contacts_data):
def update_profile(user_id: int):
user = User.query.get(user_id)
user = db.session.get(User, user_id)
if user is None:
return make_response({}, 404)

View File

@ -33,7 +33,7 @@ class Profile(db.Model):
volunteerwork = Column(String(4000), default="")
freetext = Column(String(4000), default="")
availability_status = Column(Boolean, default=False, nullable=False)
availability_status = Column(Boolean, default=False)
availability_text = Column(String(4000), default="")
availability_hours_per_week = Column(Integer, default=0)

View File

@ -1,32 +0,0 @@
# SPDX-FileCopyrightText: WTF Kooperative eG <https://wtf-eg.de/>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
from flask import Blueprint
from ki.token_auth import token_auth
from ki.resume_models import Resume
bp_resume = Blueprint('resume', __name__,
template_folder='templates')
@bp_resume.route('/')
@token_auth
def show(page):
"""
return the list of resumes as object with data array inside
"""
pass
@bp_resume.route("/<resume_id>")
@token_auth
def get_resume(resume_id):
"""
lookup for resume with resume_id, check if its from this user
and provide its contents in the appropriate format
shall support 'format' parameter with values of 'html', 'pdf'
if no parameter is given, json is returned
"""
r = Resume()
return r.to_dict()

View File

@ -1,28 +0,0 @@
# SPDX-FileCopyrightText: WTF Kooperative eG <https://wtf-eg.de/>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
from sqlalchemy import Column, Integer, String, ForeignKey, JSON
from sqlalchemy.orm import relationship
from app import db
class Resume(db.Model):
__tablename__ = 'resume'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("user.id", ondelete='CASCADE'))
label = Column("label", String(50), nullable=True)
data = Column('data', JSON)
user = relationship("User", backref='user', passive_deletes=True)
def to_dict(self):
return {
"id": self.id,
'user_id': self.user_id,
"label": self.label,
"data": self.data
}

View File

@ -4,18 +4,42 @@
import os
from flask import g, make_response, request, send_file
from functools import wraps
from ki.auth import auth
from ki.handlers import find_profiles as find_profiles_handler
from ki.handlers import update_profile as update_profile_handler
from ki.models import ContactType, Language, Skill, User
from app import app
from ki.token_auth import token_auth
from ki.models import ContactType, Language, Skill, Token, User
from app import app, db
content_type_svg = "image/svg+xml"
content_type_png = "image/png"
def token_auth(func):
@wraps(func)
def _token_auth(*args, **kwargs):
auth_header = request.headers.get("Authorization")
if (auth_header is None):
return make_response({}, 401)
if not auth_header.startswith("Bearer"):
return make_response({}, 401)
token = Token.query.filter(Token.token == auth_header[7:]).first()
if token is None:
return make_response({}, 403)
g.user = token.user
return func(*args, **kwargs)
return _token_auth
def models_to_list(models):
models_list = []
@ -42,7 +66,7 @@ def handle_completion_request(model, key):
def handle_icon_request(model, id, path):
object = model.query.get(id)
object = db.session.get(model, id)
if object is None:
return make_response({}, 404)

View File

@ -26,13 +26,14 @@ class ApiTest(unittest.TestCase):
config = migrate.get_config()
command.upgrade(config, "head")
seed(True)
max_skill = Skill.query.order_by(Skill.id.desc()).first()
self.max_skill_id = max_skill.id
seed(True)
max_skill = Skill.query.order_by(Skill.id.desc()).first()
self.max_skill_id = max_skill.id
def tearDown(self):
db.drop_all()
db.engine.dispose()
with app.app_context():
db.drop_all()
db.engine.dispose()
def login(self, username, password):
login_data = {"username": username, "password": password}

View File

@ -33,6 +33,7 @@ class TestLanguagesEndpoint(ApiTest):
self.assertEqual(response.status_code, 200)
self.assertIn("Content-Type", response.headers)
self.assertEqual(response.headers["Content-Type"], "image/svg+xml; charset=utf-8")
response.close()
if __name__ == "main":

View File

@ -20,11 +20,12 @@ class TestProfileEndpoint(ApiTest):
self.assertEqual(login_response.status_code, 200)
self.assertIn("token", login_response.json)
babsi = User.query.filter(User.auth_id == "babsi1").first()
response = self.client.post(f"/users/{babsi.id}/profile",
data=json.dumps({}),
content_type="application/json",
headers={"Authorization": "Bearer " + login_response.json["token"]})
with app.app_context():
babsi = User.query.filter(User.auth_id == "babsi1").first()
response = self.client.post(f"/users/{babsi.id}/profile",
data=json.dumps({}),
content_type="application/json",
headers={"Authorization": "Bearer " + login_response.json["token"]})
self.assertEqual(response.status_code, 403)
@ -103,11 +104,12 @@ class TestProfileEndpoint(ApiTest):
"level": 2
}]
}
peter = User.query.filter(User.auth_id == "peter").first()
response = self.client.post(f"/users/{peter.id}/profile",
data=json.dumps(data),
content_type="application/json",
headers={"Authorization": "Bearer " + token})
with app.app_context():
peter = User.query.filter(User.auth_id == "peter").first()
response = self.client.post(f"/users/{peter.id}/profile",
data=json.dumps(data),
content_type="application/json",
headers={"Authorization": "Bearer " + token})
self.assertEqual(response.status_code, 200)
with app.app_context():
@ -185,8 +187,9 @@ class TestProfileEndpoint(ApiTest):
def test_get_visible_proifle(self):
token = self.login("peter", "geheim")["token"]
babsi = User.query.filter(User.auth_id == "babsi1").first()
response = self.client.get(f"/users/{babsi.id}/profile", headers={"Authorization": f"Bearer {token}"})
with app.app_context():
babsi = User.query.filter(User.auth_id == "babsi1").first()
response = self.client.get(f"/users/{babsi.id}/profile", headers={"Authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 200)
@ -197,10 +200,11 @@ class TestProfileEndpoint(ApiTest):
self.assertEqual(login_response.status_code, 200)
self.assertIn("token", login_response.json)
peter = User.query.filter(User.auth_id == "peter").first()
response = self.client.get(f"/users/{peter.id}/profile",
headers={"Authorization": "Bearer " + login_response.json["token"]})
profile_id = peter.profile.id
with app.app_context():
peter = User.query.filter(User.auth_id == "peter").first()
response = self.client.get(f"/users/{peter.id}/profile",
headers={"Authorization": "Bearer " + login_response.json["token"]})
profile_id = peter.profile.id
self.assertEqual(response.status_code, 200)
self.assertDictEqual(
response.json, {

View File

@ -41,12 +41,14 @@ class TestSkillsEndpoint(ApiTest):
self.assertEqual(response.status_code, 200)
self.assertIn("Content-Type", response.headers)
self.assertEqual(response.headers["Content-Type"], "image/svg+xml; charset=utf-8")
response.close()
def test_get_fallback_skill_icon(self):
response = self.client.get("/skills/2/icon")
self.assertEqual(response.status_code, 200)
self.assertIn("Content-Type", response.headers)
self.assertEqual(response.headers["Content-Type"], "image/svg+xml; charset=utf-8")
response.close()
if __name__ == "main":

View File

@ -1,31 +0,0 @@
# SPDX-FileCopyrightText: WTF Kooperative eG <https://wtf-eg.de/>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
from flask import g, make_response, request
from functools import wraps
from ki.models import Token
def token_auth(func):
@wraps(func)
def _token_auth(*args, **kwargs):
auth_header = request.headers.get("Authorization")
if (auth_header is None):
return make_response({}, 401)
if not auth_header.startswith("Bearer"):
return make_response({}, 401)
token = Token.query.filter(Token.token == auth_header[7:]).first()
if token is None:
return make_response({}, 403)
g.user = token.user
return func(*args, **kwargs)
return _token_auth

View File

@ -19,7 +19,7 @@ logger = logging.getLogger('alembic.env')
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.get_engine().url).replace(
str(current_app.extensions['migrate'].db.engine.url).replace(
'%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
@ -68,7 +68,7 @@ def run_migrations_online():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = current_app.extensions['migrate'].db.get_engine()
connectable = current_app.extensions['migrate'].db.engine
with connectable.connect() as connection:
context.configure(

View File

@ -1,35 +0,0 @@
"""add resume
Revision ID: 6be5073423b4
Revises: b5023977cbda
Create Date: 2024-08-30 18:18:14.555874
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6be5073423b4'
down_revision = 'b5023977cbda'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('resume',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('label', sa.String(length=50), nullable=True),
sa.Column('data', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('resume')
# ### end Alembic commands ###