Compare commits
27 Commits
fix/flags
...
d7f4acf251
Author | SHA1 | Date | |
---|---|---|---|
d7f4acf251 | |||
c65ef4a95c | |||
f3840f18b7 | |||
da46d01765 | |||
0f9f807256
|
|||
66294cd52f | |||
d4a5c8f5eb | |||
5e4d6d464d | |||
6e77647eb9 | |||
f7278bf7ea | |||
cf1a5a532c
|
|||
0fd04d4797 | |||
a5bd954bb5
|
|||
881c3d3038 | |||
d60acd169b
|
|||
f1ecbadf05 | |||
67cb8c9152 | |||
f7e058d387
|
|||
695c88e159
|
|||
1360b4c738
|
|||
689a5ba33e
|
|||
19aebcc327
|
|||
0fcd407006 | |||
dea781cc29 | |||
be9bc8b5cc
|
|||
4fab7d7cda | |||
f131ee335c |
17
.drone.yml
17
.drone.yml
@ -27,8 +27,25 @@ steps:
|
|||||||
password:
|
password:
|
||||||
from_secret: "docker_password"
|
from_secret: "docker_password"
|
||||||
when:
|
when:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
branch:
|
branch:
|
||||||
- main
|
- main
|
||||||
|
|
||||||
|
- name: docker-publish-tag
|
||||||
|
image: plugins/docker
|
||||||
|
settings:
|
||||||
|
registry: registry.wtf-eg.net
|
||||||
|
repo: registry.wtf-eg.net/ki-backend
|
||||||
|
target: ki-backend
|
||||||
|
auto_tag: true
|
||||||
|
username:
|
||||||
|
from_secret: "docker_username"
|
||||||
|
password:
|
||||||
|
from_secret: "docker_password"
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- tag
|
||||||
|
|
||||||
image_pull_secrets:
|
image_pull_secrets:
|
||||||
- dockerconfig
|
- dockerconfig
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
FROM registry.wtf-eg.net/ki-backend-builder:1.0.0 as builder
|
FROM registry.wtf-eg.net/ki-backend-builder:1.0.1 as builder
|
||||||
|
|
||||||
COPY Pipfile* ./
|
COPY Pipfile* ./
|
||||||
|
|
||||||
@ -10,7 +10,7 @@ RUN PIP_USER=1 PIP_IGNORE_INSTALLED=1 pipenv install --system --deploy --ignore-
|
|||||||
RUN pip3 uninstall --yes pipenv
|
RUN pip3 uninstall --yes pipenv
|
||||||
|
|
||||||
|
|
||||||
FROM registry.wtf-eg.net/ki-backend-base:1.0.0 as ki-backend
|
FROM registry.wtf-eg.net/ki-backend-base:1.0.1 as ki-backend
|
||||||
|
|
||||||
# Install six explicitly. Otherwise Python complains about it missing.
|
# Install six explicitly. Otherwise Python complains about it missing.
|
||||||
RUN pip3 install six
|
RUN pip3 install six
|
||||||
|
2
Pipfile
2
Pipfile
@ -14,7 +14,7 @@ flask-migrate = "~=3.0.1"
|
|||||||
flask-sqlalchemy = "~=2.5.1"
|
flask-sqlalchemy = "~=2.5.1"
|
||||||
sqlalchemy = "~=1.4.18"
|
sqlalchemy = "~=1.4.18"
|
||||||
waitress = "~=2.0.0"
|
waitress = "~=2.0.0"
|
||||||
pyyaml = "~=5.4.1"
|
pyyaml = "~=6.0.1"
|
||||||
flask-cors = "~=3.0.10"
|
flask-cors = "~=3.0.10"
|
||||||
ldap3 = "~=2.9"
|
ldap3 = "~=2.9"
|
||||||
pymysql = "~=1.0.2"
|
pymysql = "~=1.0.2"
|
||||||
|
860
Pipfile.lock
generated
860
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
26
README.md
26
README.md
@ -9,6 +9,32 @@ SPDX-License-Identifier: AGPL-3.0-or-later
|
|||||||
[](https://drone.wtf-eg.de/kompetenzinventar/ki-backend)
|
[](https://drone.wtf-eg.de/kompetenzinventar/ki-backend)
|
||||||
[](https://api.reuse.software/info/git.wtf-eg.de/kompetenzinventar/ki-backend)
|
[](https://api.reuse.software/info/git.wtf-eg.de/kompetenzinventar/ki-backend)
|
||||||
|
|
||||||
|
## Über
|
||||||
|
|
||||||
|
Dieses Repo enthält das Backend des Projekts Kompentenzinventar - einer Webapplikation zur Erfassung von Userprofilen für die WTF eG.
|
||||||
|
|
||||||
|
Implementiert ist das Backend mit Flask.
|
||||||
|
|
||||||
|
### Mitmachen
|
||||||
|
|
||||||
|
Du kannst gerne bei der Entwicklung des Kompetenzinventars mitmachen.
|
||||||
|
|
||||||
|
- Fehler oder fehlende Funktionen erfassen. Bitte direkt über die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) in Gitea.
|
||||||
|
- Dokumentation oder Implementierung verbessern. Bitte forke hierzu das Projekt, branche von `main` ab und erstelle dann einen [Pull Request](https://git.wtf-eg.de/kompetenzinventar/ki-backend/pulls).
|
||||||
|
|
||||||
|
### Kommunikation
|
||||||
|
|
||||||
|
Folgende Kanäle gibt es für die Kommunikation über das Kompetenzinventar:
|
||||||
|
|
||||||
|
- Die [Issues](https://git.wtf-eg.de/kompetenzinventar/ki-backend/issues) im WTF Gitea.
|
||||||
|
- Den Bereich [AG Entwicklung](https://forum.wtf-eg.de/c/interna/ag-entwicklung/21) im WTF Forum.
|
||||||
|
- Einen Raum in Matrix. Zutritt per Einladung, frlan lädt ein, eine einfache PN im Forum reicht.
|
||||||
|
|
||||||
|
### Repos
|
||||||
|
|
||||||
|
* **[ki-backend](https://git.wtf-eg.de/kompetenzinventar/ki-backend)** (dieses Repo) enthält das Backend
|
||||||
|
* [ki-frontend](https://git.wtf-eg.de/kompetenzinventar/ki-frontend) enthält das Frontend
|
||||||
|
* Weitere Repositories befinden sich in der Gitea Organisation [Kompetenzinventar](https://git.wtf-eg.de/kompetenzinventar).
|
||||||
## Entwicklung
|
## Entwicklung
|
||||||
|
|
||||||
### Abhängigkeiten
|
### Abhängigkeiten
|
||||||
|
15
app.py
15
app.py
@ -8,22 +8,23 @@ import os
|
|||||||
from dotenv import load_dotenv, find_dotenv
|
from dotenv import load_dotenv, find_dotenv
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
from flask.logging import default_handler
|
|
||||||
from flask_migrate import Migrate
|
from flask_migrate import Migrate
|
||||||
from flask_sqlalchemy import SQLAlchemy
|
from flask_sqlalchemy import SQLAlchemy
|
||||||
from ldap3.utils.log import logger as ldap3_logger
|
|
||||||
from ldap3.utils.log import set_library_log_detail_level, BASIC
|
from ldap3.utils.log import set_library_log_detail_level, BASIC
|
||||||
|
|
||||||
load_dotenv(find_dotenv())
|
load_dotenv(find_dotenv())
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
loglevel = os.getenv("KI_LOGLEVEL", logging.WARNING)
|
loglevel = os.getenv("KI_LOGLEVEL", logging.WARNING)
|
||||||
loglevel = int(loglevel)
|
loglevel = int(loglevel)
|
||||||
app.logger.setLevel(loglevel)
|
app.logger.setLevel(loglevel)
|
||||||
logging.basicConfig(level=loglevel)
|
app.logger.propagate = False # do not forward messages to the root logger
|
||||||
|
logging.basicConfig(level=loglevel,
|
||||||
set_library_log_detail_level(BASIC)
|
format='[%(asctime)s] %(levelname)s [%(name)s] %(message)s') # configure root logger as fallback
|
||||||
ldap3_logger.addHandler(default_handler)
|
logging.getLogger('werkzeug').propagate = False # werkzeug has its own ColorStreamHandler
|
||||||
|
set_library_log_detail_level(BASIC) # ldap3 has different verbosity levels internally
|
||||||
|
|
||||||
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("SQLALCHEMY_DATABASE_URI")
|
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("SQLALCHEMY_DATABASE_URI")
|
||||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||||
@ -40,6 +41,6 @@ CORS(app)
|
|||||||
db = SQLAlchemy(app)
|
db = SQLAlchemy(app)
|
||||||
migrate = Migrate(app, db, compare_type=True)
|
migrate = Migrate(app, db, compare_type=True)
|
||||||
|
|
||||||
logging.debug("Hello from KI")
|
app.logger.info("Hello from KI")
|
||||||
|
|
||||||
from ki import module # noqa
|
from ki import module # noqa
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import logging
|
|
||||||
|
|
||||||
from app import app, db
|
from app import app, db
|
||||||
from ki.models import Address, Contact, ContactType, Language, Skill, Profile, ProfileLanguage, ProfileSearchtopic, \
|
from ki.models import Address, Contact, ContactType, Language, Skill, Profile, ProfileLanguage, ProfileSearchtopic, \
|
||||||
@ -13,7 +12,7 @@ from ki.models import Address, Contact, ContactType, Language, Skill, Profile, P
|
|||||||
def seed_contacttypes():
|
def seed_contacttypes():
|
||||||
contacttypes_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/contacttypes.csv"
|
contacttypes_seed_file_path = app.config["KI_DATA_DIR"] + "/seed_data/contacttypes.csv"
|
||||||
|
|
||||||
logging.info("importing contacttypes")
|
app.logger.info("importing contacttypes")
|
||||||
|
|
||||||
with open(contacttypes_seed_file_path) as file:
|
with open(contacttypes_seed_file_path) as file:
|
||||||
csv_reader = csv.DictReader(file)
|
csv_reader = csv.DictReader(file)
|
||||||
@ -60,6 +59,8 @@ def seed_user(nickname,
|
|||||||
|
|
||||||
db.session.add(profile)
|
db.session.add(profile)
|
||||||
|
|
||||||
|
return profile
|
||||||
|
|
||||||
|
|
||||||
def seed(dev: bool):
|
def seed(dev: bool):
|
||||||
seed_contacttypes()
|
seed_contacttypes()
|
||||||
@ -143,15 +144,20 @@ def seed(dev: bool):
|
|||||||
db.session.add(peter_fr)
|
db.session.add(peter_fr)
|
||||||
|
|
||||||
seed_user("klaus")
|
seed_user("klaus")
|
||||||
|
for i in range(1, 20):
|
||||||
|
seed_user(f"babsi{i}", visible=True)
|
||||||
|
|
||||||
seed_user("dirtydieter",
|
dieter = seed_user("dirtydieter",
|
||||||
visible=True,
|
visible=True,
|
||||||
volunteerwork="Müll sammeln",
|
volunteerwork="Müll sammeln",
|
||||||
availability_status=True,
|
availability_status=True,
|
||||||
availability_hours_per_week=24,
|
availability_hours_per_week=24,
|
||||||
availability_text="Nur Nachts!",
|
availability_text="Nur Nachts!",
|
||||||
freetext="1001010010111!!!",
|
freetext="1001010010111!!!",
|
||||||
skills=[(Skill.skill_id_php, 5)])
|
skills=[(Skill.skill_id_php, 5)])
|
||||||
|
|
||||||
|
dieters_address = Address(name="Friedrich Witzig", profile=dieter)
|
||||||
|
db.session.add(dieters_address)
|
||||||
|
|
||||||
all_skills = Skill.query.all()
|
all_skills = Skill.query.all()
|
||||||
all_profile_skills = []
|
all_profile_skills = []
|
||||||
|
@ -4,11 +4,14 @@
|
|||||||
|
|
||||||
from flask import make_response, request
|
from flask import make_response, request
|
||||||
|
|
||||||
from ki.models import Profile, ProfileSkill, Skill, ProfileLanguage, Language
|
from ki.models import Profile, ProfileSkill, Skill, ProfileLanguage, Language, Address
|
||||||
|
|
||||||
|
|
||||||
def find_profiles():
|
def find_profiles():
|
||||||
page = int(request.args.get("page", 1))
|
try:
|
||||||
|
page = int(request.args.get("page", 1))
|
||||||
|
except ValueError:
|
||||||
|
page = 1
|
||||||
|
|
||||||
if page < 1:
|
if page < 1:
|
||||||
return make_response({"messages": {"page": "Die angefragte Seite muss mindestens 1 sein"}}, 400)
|
return make_response({"messages": {"page": "Die angefragte Seite muss mindestens 1 sein"}}, 400)
|
||||||
@ -19,27 +22,35 @@ def find_profiles():
|
|||||||
return make_response({"messages": {"page_size": "Die maximale Anzahl Einträge pro Seite beträgt 100"}}, 400)
|
return make_response({"messages": {"page_size": "Die maximale Anzahl Einträge pro Seite beträgt 100"}}, 400)
|
||||||
|
|
||||||
query = Profile.query.distinct(Profile.id) \
|
query = Profile.query.distinct(Profile.id) \
|
||||||
|
.order_by(Profile.nickname) \
|
||||||
.filter(Profile.visible.is_(True)) \
|
.filter(Profile.visible.is_(True)) \
|
||||||
.join(Profile.skills, isouter=True).join(ProfileSkill.skill, isouter=True) \
|
.join(Profile.skills, isouter=True).join(ProfileSkill.skill, isouter=True) \
|
||||||
.join(Profile.languages, isouter=True).join(ProfileLanguage.language, isouter=True)
|
.join(Profile.languages, isouter=True).join(ProfileLanguage.language, isouter=True) \
|
||||||
|
.join(Address, isouter=True)
|
||||||
|
|
||||||
if "search" in request.args:
|
if "search" in request.args:
|
||||||
terms = request.args["search"].split(" ")
|
terms = request.args["search"].split(" ")
|
||||||
for term in terms:
|
for term in terms:
|
||||||
|
|
||||||
query = query.filter(
|
query = query.filter(
|
||||||
Profile.nickname.like(f"%{term}%") | Skill.name.like(f"%{term}%") | Language.name.like(f"%{term}%"))
|
Profile.nickname.like(f"%{term}%") | # noqa: W504
|
||||||
|
Skill.name.like(f"%{term}%") | # noqa: W504
|
||||||
|
Language.name.like(f"%{term}%") | # noqa: W504
|
||||||
|
Address.name.like(f"%{term}%"))
|
||||||
|
|
||||||
if "nickname" in request.args:
|
if "nickname" in request.args:
|
||||||
nickname = request.args.get("nickname")
|
nickname = request.args.get("nickname")
|
||||||
query = query.filter(Profile.nickname.like(f"%{nickname}%"))
|
query = query.filter(Profile.nickname.like(f"%{nickname}%"))
|
||||||
|
|
||||||
count = query.count()
|
paginated_result = query.paginate(page=page, per_page=page_size)
|
||||||
|
|
||||||
offset = (page - 1) * page_size
|
|
||||||
db_profiles = query.limit(page_size).offset(offset).all()
|
|
||||||
api_profiles = []
|
api_profiles = []
|
||||||
|
|
||||||
for db_profile in db_profiles:
|
for db_profile in paginated_result.items:
|
||||||
api_profiles.append(db_profile.to_dict())
|
api_profiles.append(db_profile.to_dict())
|
||||||
|
|
||||||
return make_response({"total": count, "profiles": api_profiles})
|
return make_response({
|
||||||
|
"total": paginated_result.total,
|
||||||
|
"pages": paginated_result.pages,
|
||||||
|
"page": paginated_result.page,
|
||||||
|
"profiles": api_profiles
|
||||||
|
})
|
||||||
|
@ -20,25 +20,33 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
response = self.client.get("/users/profiles?nickname=horsthorsthorst",
|
response = self.client.get("/users/profiles?nickname=horsthorsthorst",
|
||||||
headers={"Authorization": "Bearer " + token})
|
headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.json, {"total": 0, "profiles": []})
|
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
|
||||||
|
|
||||||
def test_find_sql_specialchars(self):
|
def test_find_sql_specialchars(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
response = self.client.get("/users/profiles?nickname=%22%27%25", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles?nickname=%22%27%25", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(response.json, {"total": 0, "profiles": []})
|
self.assertEqual(response.json, {"total": 0, "page": 1, "pages": 0, "profiles": []})
|
||||||
|
|
||||||
def test_find_all(self):
|
def test_find_all_page1(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
response = self.client.get("/users/profiles", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertDictContainsSubset({"total": 4}, response.json)
|
self.assertDictContainsSubset({"total": 23, "page": 1, "pages": 2}, response.json)
|
||||||
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "babsi1"}, response.json["profiles"][0])
|
||||||
|
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][19])
|
||||||
|
|
||||||
|
def test_find_all_page2(self):
|
||||||
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
|
response = self.client.get("/users/profiles?page=2", headers={"Authorization": "Bearer " + token})
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertDictContainsSubset({"total": 23, "page": 2, "pages": 2}, response.json)
|
||||||
|
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][2])
|
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][2])
|
||||||
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][3])
|
|
||||||
|
|
||||||
def test_find_dieter(self):
|
def test_find_dieter(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
@ -62,8 +70,8 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
response = self.client.get("/users/profiles?search=sql", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles?search=sql", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertDictContainsSubset({"total": 2}, response.json)
|
self.assertDictContainsSubset({"total": 2}, response.json)
|
||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||||
|
|
||||||
def test_find_postgres(self):
|
def test_find_postgres(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
@ -71,8 +79,8 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
response = self.client.get("/users/profiles?search=post", headers={"Authorization": "Bearer " + token})
|
response = self.client.get("/users/profiles?search=post", headers={"Authorization": "Bearer " + token})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertDictContainsSubset({"total": 2}, response.json)
|
self.assertDictContainsSubset({"total": 2}, response.json)
|
||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "giesela"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][1])
|
||||||
|
|
||||||
def test_find_php_franzosen(self):
|
def test_find_php_franzosen(self):
|
||||||
token = self.login("peter", "geheim")["token"]
|
token = self.login("peter", "geheim")["token"]
|
||||||
@ -91,6 +99,14 @@ class TestFindProfilesEndpoint(ApiTest):
|
|||||||
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
self.assertDictContainsSubset({"nickname": "jutta"}, response.json["profiles"][0])
|
||||||
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][1])
|
self.assertDictContainsSubset({"nickname": "monique"}, response.json["profiles"][1])
|
||||||
|
|
||||||
|
def test_find_dieter_by_name(self):
|
||||||
|
token = self.login("peter", "geheim")["token"]
|
||||||
|
|
||||||
|
response = self.client.get("/users/profiles?search=friedrich", headers={"Authorization": "Bearer " + token})
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertDictContainsSubset({"total": 1}, response.json)
|
||||||
|
self.assertDictContainsSubset({"nickname": "dirtydieter"}, response.json["profiles"][0])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "main":
|
if __name__ == "main":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,50 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic,flask_migrate
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[logger_flask_migrate]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = flask_migrate
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
@ -11,9 +11,6 @@ from alembic import context
|
|||||||
# access to the values within the .ini file in use.
|
# access to the values within the .ini file in use.
|
||||||
config = context.config
|
config = context.config
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
logger = logging.getLogger('alembic.env')
|
logger = logging.getLogger('alembic.env')
|
||||||
|
|
||||||
# add your model's MetaData object here
|
# add your model's MetaData object here
|
||||||
|
@ -11,4 +11,4 @@ with app.app_context():
|
|||||||
config = migrate.get_config()
|
config = migrate.get_config()
|
||||||
command.upgrade(config, "head")
|
command.upgrade(config, "head")
|
||||||
|
|
||||||
serve(app, host="0.0.0.0", port=5000)
|
serve(app, host="0.0.0.0", port=5000, threads=20)
|
||||||
|
Reference in New Issue
Block a user