feat: Alembic migrations, initial schema

- Initialize Alembic with MySQL/MariaDB-targeted configuration
- Configure env.py to read DB URL from app.config.get_settings()
- Create initial migration (062ccae5457b) for all 11 tables:
  users, refresh_tokens, invitation_links, allowed_domains,
  cases, case_icd_codes, weekly_reports, yearly_summary,
  import_log, audit_log, notifications
- Include all indexes, foreign keys, check constraints, and
  MySQL text prefix index (icd(20))
- Add seed script (scripts/init_db.py) for dak.de domain whitelist
- DB apply deferred: MariaDB on Hetzner 1 not reachable from dev

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
CCS Admin 2026-02-24 07:33:27 +00:00
parent e7befe78b6
commit 4649f7a082
7 changed files with 706 additions and 0 deletions

119
backend/alembic.ini Normal file
View file

@ -0,0 +1,119 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# sqlalchemy.url is set dynamically in alembic/env.py from app.config
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
backend/alembic/README Normal file
View file

@ -0,0 +1 @@
Generic single-database configuration.

86
backend/alembic/env.py Normal file
View file

@ -0,0 +1,86 @@
"""Alembic environment configuration for DAK Zweitmeinungs-Portal."""
import sys
import os
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from alembic import context
# ---------------------------------------------------------------------------
# Ensure the backend package is importable.
# alembic is invoked from backend/, so '.' is already on sys.path via
# alembic.ini's `prepend_sys_path = .`, but we add it explicitly to be safe.
# ---------------------------------------------------------------------------
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from app.database import Base # noqa: E402
from app.models import * # noqa: E402, F401, F403 — register all models with metadata
from app.config import get_settings # noqa: E402
# ---------------------------------------------------------------------------
# Alembic Config object — provides access to the .ini file values.
# ---------------------------------------------------------------------------
config = context.config
# Interpret the config file for Python logging.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# The MetaData object for 'autogenerate' support.
target_metadata = Base.metadata
def get_url() -> str:
"""Return the database URL from application settings."""
return get_settings().database_url
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL and not an Engine,
so we don't need a DBAPI to be available.
Calls to context.execute() emit the given string to the script output.
"""
url = get_url()
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
Creates an Engine and associates a connection with the context.
"""
configuration = config.get_section(config.config_ini_section, {})
configuration["sqlalchemy.url"] = get_url()
connectable = engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View file

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,447 @@
"""initial schema
Revision ID: 062ccae5457b
Revises:
Create Date: 2026-02-24 07:31:10.140166
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision: str = "062ccae5457b"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ---- 1. users (no FK dependencies) ----
op.create_table(
"users",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("username", sa.String(100), nullable=False),
sa.Column("email", sa.String(255), nullable=False),
sa.Column("password_hash", sa.String(255), nullable=False),
sa.Column(
"role", sa.String(20), nullable=False, server_default="dak_mitarbeiter"
),
sa.Column("mfa_secret", sa.String(255), nullable=True),
sa.Column("mfa_enabled", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="1"),
sa.Column(
"must_change_password", sa.Boolean(), nullable=False, server_default="0"
),
sa.Column("last_login", sa.DateTime(), nullable=True),
sa.Column(
"failed_login_attempts", sa.Integer(), nullable=False, server_default="0"
),
sa.Column("locked_until", sa.DateTime(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("username", name="uk_username"),
sa.UniqueConstraint("email", name="uk_email"),
sa.CheckConstraint(
"role IN ('admin', 'dak_mitarbeiter')", name="chk_role"
),
)
# ---- 2. refresh_tokens (FK -> users) ----
op.create_table(
"refresh_tokens",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("token_hash", sa.String(255), nullable=False),
sa.Column("expires_at", sa.DateTime(), nullable=False),
sa.Column("revoked", sa.Boolean(), nullable=False, server_default="0"),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["user_id"], ["users.id"], ondelete="CASCADE"
),
sa.Index("idx_user", "user_id"),
sa.Index("idx_token", "token_hash"),
)
# ---- 3. invitation_links (FK -> users) ----
op.create_table(
"invitation_links",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("token", sa.String(255), nullable=False),
sa.Column("email", sa.String(255), nullable=True),
sa.Column(
"role", sa.String(20), nullable=False, server_default="dak_mitarbeiter"
),
sa.Column("created_by", sa.Integer(), nullable=True),
sa.Column("expires_at", sa.DateTime(), nullable=False),
sa.Column("used_at", sa.DateTime(), nullable=True),
sa.Column("used_by", sa.Integer(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="1"),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(["created_by"], ["users.id"]),
sa.ForeignKeyConstraint(["used_by"], ["users.id"]),
sa.UniqueConstraint("token", name="uk_token"),
)
# ---- 4. allowed_domains (no FK) ----
op.create_table(
"allowed_domains",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("domain", sa.String(255), nullable=False),
sa.Column(
"role", sa.String(20), nullable=False, server_default="dak_mitarbeiter"
),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default="1"),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("domain", name="uk_domain"),
)
# ---- 5. cases (FK -> users for updated_by, icd_entered_by, coding_completed_by) ----
op.create_table(
"cases",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("fall_id", sa.String(100), nullable=True),
sa.Column("crm_ticket_id", sa.String(20), nullable=True),
sa.Column("jahr", sa.SmallInteger(), nullable=False),
sa.Column("kw", sa.SmallInteger(), nullable=False),
sa.Column("datum", sa.Date(), nullable=False),
sa.Column("anrede", sa.String(20), nullable=True),
sa.Column("vorname", sa.String(100), nullable=True),
sa.Column("nachname", sa.String(100), nullable=False),
sa.Column("geburtsdatum", sa.Date(), nullable=True),
sa.Column("kvnr", sa.String(20), nullable=True),
sa.Column(
"versicherung", sa.String(50), nullable=False, server_default="DAK"
),
sa.Column("icd", sa.Text(), nullable=True),
sa.Column("fallgruppe", sa.String(20), nullable=False),
sa.Column("strasse", sa.String(255), nullable=True),
sa.Column("plz", sa.String(10), nullable=True),
sa.Column("ort", sa.String(100), nullable=True),
sa.Column("email", sa.String(255), nullable=True),
sa.Column("ansprechpartner", sa.String(200), nullable=True),
sa.Column("telefonnummer", sa.String(50), nullable=True),
sa.Column("mobiltelefon", sa.String(50), nullable=True),
sa.Column("email2", sa.String(255), nullable=True),
sa.Column("telefon2", sa.String(50), nullable=True),
sa.Column("unterlagen", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("unterlagen_verschickt", sa.Date(), nullable=True),
sa.Column("erhalten", sa.Boolean(), nullable=True),
sa.Column("unterlagen_erhalten", sa.Date(), nullable=True),
sa.Column("unterlagen_an_gutachter", sa.Date(), nullable=True),
sa.Column("gutachten", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("gutachter", sa.String(100), nullable=True),
sa.Column("gutachten_erstellt", sa.Date(), nullable=True),
sa.Column("gutachten_versendet", sa.Date(), nullable=True),
sa.Column(
"schweigepflicht", sa.Boolean(), nullable=False, server_default="0"
),
sa.Column("ablehnung", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("abbruch", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("abbruch_datum", sa.Date(), nullable=True),
sa.Column("gutachten_typ", sa.String(20), nullable=True),
sa.Column("therapieaenderung", sa.String(5), nullable=True),
sa.Column(
"ta_diagnosekorrektur",
sa.Boolean(),
nullable=False,
server_default="0",
),
sa.Column(
"ta_unterversorgung", sa.Boolean(), nullable=False, server_default="0"
),
sa.Column(
"ta_uebertherapie", sa.Boolean(), nullable=False, server_default="0"
),
sa.Column("kurzbeschreibung", sa.Text(), nullable=True),
sa.Column("fragestellung", sa.Text(), nullable=True),
sa.Column("kommentar", sa.Text(), nullable=True),
sa.Column("sonstiges", sa.Text(), nullable=True),
sa.Column("abgerechnet", sa.Boolean(), nullable=False, server_default="0"),
sa.Column("abrechnung_datum", sa.Date(), nullable=True),
sa.Column("import_source", sa.String(255), nullable=True),
sa.Column(
"imported_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.Column("updated_by", sa.Integer(), nullable=True),
sa.Column("icd_entered_by", sa.Integer(), nullable=True),
sa.Column("icd_entered_at", sa.DateTime(), nullable=True),
sa.Column("coding_completed_by", sa.Integer(), nullable=True),
sa.Column("coding_completed_at", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(["updated_by"], ["users.id"]),
sa.ForeignKeyConstraint(["icd_entered_by"], ["users.id"]),
sa.ForeignKeyConstraint(["coding_completed_by"], ["users.id"]),
sa.UniqueConstraint("fall_id", name="uk_fall_id"),
sa.Index("idx_jahr_kw", "jahr", "kw"),
sa.Index("idx_kvnr", "kvnr"),
sa.Index("idx_fallgruppe", "fallgruppe"),
sa.Index("idx_datum", "datum"),
sa.Index("idx_nachname_vorname", "nachname", "vorname"),
sa.Index("idx_pending_coding", "gutachten", "gutachten_typ"),
sa.CheckConstraint(
"fallgruppe IN ('onko','kardio','intensiv','galle','sd')",
name="chk_fallgruppe",
),
sa.CheckConstraint(
"gutachten_typ IS NULL OR gutachten_typ IN "
"('Bestätigung','Alternative')",
name="chk_gutachten_typ",
),
sa.CheckConstraint(
"therapieaenderung IS NULL OR therapieaenderung IN ('Ja','Nein')",
name="chk_ta",
),
)
# The idx_pending_icd index uses a prefix length on the `icd` TEXT column.
# SQLAlchemy / Alembic cannot express mysql_length via sa.Index in create_table,
# so we create it with raw SQL.
op.execute(
"CREATE INDEX idx_pending_icd ON cases (jahr, kw, fallgruppe, icd(20))"
)
# ---- 6. case_icd_codes (FK -> cases) ----
op.create_table(
"case_icd_codes",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("case_id", sa.Integer(), nullable=False),
sa.Column("icd_code", sa.String(20), nullable=False),
sa.Column("icd_hauptgruppe", sa.String(10), nullable=True),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["case_id"], ["cases.id"], ondelete="CASCADE"
),
sa.Index("idx_case", "case_id"),
sa.Index("idx_code", "icd_code"),
sa.Index("idx_haupt", "icd_hauptgruppe"),
)
# ---- 7. weekly_reports (FK -> users) ----
op.create_table(
"weekly_reports",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("jahr", sa.SmallInteger(), nullable=False),
sa.Column("kw", sa.SmallInteger(), nullable=False),
sa.Column("report_date", sa.Date(), nullable=False),
sa.Column("report_file_path", sa.String(500), nullable=True),
sa.Column("report_data", mysql.JSON(), nullable=True),
sa.Column("generated_by", sa.Integer(), nullable=True),
sa.Column(
"generated_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(["generated_by"], ["users.id"]),
sa.UniqueConstraint("jahr", "kw", name="uk_jahr_kw"),
)
# ---- 8. yearly_summary (no FK) ----
op.create_table(
"yearly_summary",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("jahr", sa.SmallInteger(), nullable=False),
sa.Column("kw", sa.SmallInteger(), nullable=False),
# Overall counts
sa.Column("erstberatungen", sa.Integer(), server_default="0"),
sa.Column("ablehnungen", sa.Integer(), server_default="0"),
sa.Column("unterlagen", sa.Integer(), server_default="0"),
sa.Column("keine_rueckmeldung", sa.Integer(), server_default="0"),
sa.Column("gutachten_gesamt", sa.Integer(), server_default="0"),
sa.Column("gutachten_alternative", sa.Integer(), server_default="0"),
sa.Column("gutachten_bestaetigung", sa.Integer(), server_default="0"),
# Per-Fallgruppe: onko
sa.Column("onko_anzahl", sa.Integer(), server_default="0"),
sa.Column("onko_gutachten", sa.Integer(), server_default="0"),
sa.Column("onko_keine_rm", sa.Integer(), server_default="0"),
# Per-Fallgruppe: kardio
sa.Column("kardio_anzahl", sa.Integer(), server_default="0"),
sa.Column("kardio_gutachten", sa.Integer(), server_default="0"),
sa.Column("kardio_keine_rm", sa.Integer(), server_default="0"),
# Per-Fallgruppe: intensiv
sa.Column("intensiv_anzahl", sa.Integer(), server_default="0"),
sa.Column("intensiv_gutachten", sa.Integer(), server_default="0"),
sa.Column("intensiv_keine_rm", sa.Integer(), server_default="0"),
# Per-Fallgruppe: galle
sa.Column("galle_anzahl", sa.Integer(), server_default="0"),
sa.Column("galle_gutachten", sa.Integer(), server_default="0"),
sa.Column("galle_keine_rm", sa.Integer(), server_default="0"),
# Per-Fallgruppe: sd
sa.Column("sd_anzahl", sa.Integer(), server_default="0"),
sa.Column("sd_gutachten", sa.Integer(), server_default="0"),
sa.Column("sd_keine_rm", sa.Integer(), server_default="0"),
# Gutachten-Typ per Fallgruppe
sa.Column("onko_alternative", sa.Integer(), server_default="0"),
sa.Column("onko_bestaetigung", sa.Integer(), server_default="0"),
sa.Column("kardio_alternative", sa.Integer(), server_default="0"),
sa.Column("kardio_bestaetigung", sa.Integer(), server_default="0"),
sa.Column("intensiv_alternative", sa.Integer(), server_default="0"),
sa.Column("intensiv_bestaetigung", sa.Integer(), server_default="0"),
sa.Column("galle_alternative", sa.Integer(), server_default="0"),
sa.Column("galle_bestaetigung", sa.Integer(), server_default="0"),
sa.Column("sd_alternative", sa.Integer(), server_default="0"),
sa.Column("sd_bestaetigung", sa.Integer(), server_default="0"),
# Therapieaenderung counts
sa.Column("ta_ja", sa.Integer(), server_default="0"),
sa.Column("ta_nein", sa.Integer(), server_default="0"),
sa.Column("ta_diagnosekorrektur", sa.Integer(), server_default="0"),
sa.Column("ta_unterversorgung", sa.Integer(), server_default="0"),
sa.Column("ta_uebertherapie", sa.Integer(), server_default="0"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("jahr", "kw", name="uk_jahr_kw"),
)
# ---- 9. import_log (FK -> users) ----
op.create_table(
"import_log",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("filename", sa.String(255), nullable=False),
sa.Column("import_type", sa.String(50), nullable=False),
sa.Column(
"cases_imported", sa.Integer(), nullable=False, server_default="0"
),
sa.Column(
"cases_skipped", sa.Integer(), nullable=False, server_default="0"
),
sa.Column(
"cases_updated", sa.Integer(), nullable=False, server_default="0"
),
sa.Column("errors", sa.Text(), nullable=True),
sa.Column("details", mysql.JSON(), nullable=True),
sa.Column("imported_by", sa.Integer(), nullable=True),
sa.Column(
"imported_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(["imported_by"], ["users.id"]),
sa.CheckConstraint(
"import_type IN "
"('csv_crm','icd_xlsx','historical_excel','excel_sync')",
name="chk_imp_type",
),
)
# ---- 10. audit_log (FK -> users) ----
op.create_table(
"audit_log",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("action", sa.String(100), nullable=False),
sa.Column("entity_type", sa.String(50), nullable=True),
sa.Column("entity_id", sa.Integer(), nullable=True),
sa.Column("old_values", mysql.JSON(), nullable=True),
sa.Column("new_values", mysql.JSON(), nullable=True),
sa.Column("ip_address", sa.String(45), nullable=True),
sa.Column("user_agent", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(["user_id"], ["users.id"]),
sa.Index("idx_user", "user_id"),
sa.Index("idx_entity", "entity_type", "entity_id"),
sa.Index("idx_created", "created_at"),
)
# ---- 11. notifications (FK -> users) ----
op.create_table(
"notifications",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("recipient_id", sa.Integer(), nullable=False),
sa.Column("notification_type", sa.String(50), nullable=False),
sa.Column("title", sa.String(255), nullable=False),
sa.Column("message", sa.Text(), nullable=True),
sa.Column("related_entity_type", sa.String(50), nullable=True),
sa.Column("related_entity_id", sa.Integer(), nullable=True),
sa.Column("is_read", sa.Boolean(), nullable=False, server_default="0"),
sa.Column(
"email_sent", sa.Boolean(), nullable=False, server_default="0"
),
sa.Column("email_sent_at", sa.DateTime(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
server_default=sa.func.now(),
),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(["recipient_id"], ["users.id"]),
sa.Index("idx_recipient", "recipient_id", "is_read"),
sa.CheckConstraint(
"notification_type IN ("
"'new_cases_uploaded','icd_entered','icd_uploaded',"
"'report_ready','coding_completed')",
name="chk_notif",
),
)
def downgrade() -> None:
# Drop in reverse order of creation (respecting FK dependencies).
op.drop_table("notifications")
op.drop_table("audit_log")
op.drop_table("import_log")
op.drop_table("yearly_summary")
op.drop_table("weekly_reports")
op.drop_table("case_icd_codes")
op.drop_index("idx_pending_icd", table_name="cases")
op.drop_table("cases")
op.drop_table("allowed_domains")
op.drop_table("invitation_links")
op.drop_table("refresh_tokens")
op.drop_table("users")

View file

@ -0,0 +1,27 @@
"""Seed initial data into the database."""
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from app.database import SessionLocal
from app.models.user import AllowedDomain
def seed() -> None:
db = SessionLocal()
try:
existing = db.query(AllowedDomain).filter_by(domain="dak.de").first()
if not existing:
db.add(AllowedDomain(domain="dak.de", role="dak_mitarbeiter"))
db.commit()
print("Seeded: dak.de domain whitelist")
else:
print("Already exists: dak.de domain whitelist")
finally:
db.close()
if __name__ == "__main__":
seed()