Compare commits
19 Commits
062a384444
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dc78c0550e | ||
|
|
0371a36209 | ||
|
|
6cd6ce01eb | ||
|
|
ac719b219f | ||
|
|
283f75a5e2 | ||
|
|
6a513a97ce | ||
|
|
6211290923 | ||
|
|
6b40c04b0d | ||
|
|
355b5156a5 | ||
|
|
51b8e15726 | ||
|
|
35ea49d6d2 | ||
|
|
d3af80145b | ||
|
|
1c2bdbf310 | ||
|
|
f8cdbf8d74 | ||
|
|
f195a44707 | ||
|
|
cb59bea178 | ||
|
|
7680b0eb91 | ||
|
|
08e0ae7e83 | ||
|
|
3265228ce6 |
71
Dockerfile
Normal file
71
Dockerfile
Normal file
@@ -0,0 +1,71 @@
|
||||
# ── Build stage: Python deps ────────────────────────────────────────────
|
||||
FROM python:3.12-slim AS builder
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc libpq-dev curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY apps/api/pyproject.toml ./api/pyproject.toml
|
||||
COPY apps/scanner/pyproject.toml ./scanner/pyproject.toml
|
||||
|
||||
RUN pip install --no-cache-dir --prefix=/install api/.
|
||||
RUN pip install --no-cache-dir --prefix=/install scanner/. \
|
||||
&& PYTHONPATH=/install/lib/python3.12/site-packages \
|
||||
/install/bin/playwright install chromium --with-deps
|
||||
|
||||
# ── Build stage: banner bundle ─────────────────────────────────────────
|
||||
FROM node:20-slim AS banner-builder
|
||||
WORKDIR /build/banner
|
||||
COPY apps/banner/package.json apps/banner/package-lock.json ./
|
||||
RUN npm ci
|
||||
COPY apps/banner/ .
|
||||
RUN npm run build
|
||||
|
||||
# ── Build stage: admin UI ──────────────────────────────────────────────
|
||||
FROM node:20-slim AS admin-builder
|
||||
WORKDIR /build/admin
|
||||
COPY apps/admin-ui/package.json apps/admin-ui/package-lock.json ./
|
||||
RUN npm ci
|
||||
COPY apps/admin-ui/ .
|
||||
COPY --from=banner-builder /build/banner/dist/ ./public/
|
||||
RUN npx vite build
|
||||
|
||||
# ── Runtime stage ──────────────────────────────────────────────────────
|
||||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libpq5 postgresql-client curl tini supervisor nginx \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& apt-get clean
|
||||
|
||||
# Copy Python deps from builder
|
||||
COPY --from=builder /install /usr/local
|
||||
|
||||
# Copy application code
|
||||
COPY apps/api/src ./src
|
||||
COPY apps/api/alembic ./alembic
|
||||
COPY apps/api/alembic.ini ./alembic.ini
|
||||
COPY apps/scanner/src ./src_scanner
|
||||
|
||||
RUN if [ -d src_scanner ]; then \
|
||||
cp -r src_scanner/* src/ 2>/dev/null || true; \
|
||||
fi
|
||||
|
||||
# Copy built Admin UI static files
|
||||
COPY --from=admin-builder /build/admin/dist /var/www/html
|
||||
|
||||
# Copy configs
|
||||
COPY apps/admin-ui/nginx.conf /etc/nginx/conf.d/default.conf
|
||||
COPY supervisord.conf /etc/supervisord.conf
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
|
||||
CMD curl -f http://localhost/health || exit 1
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
CMD ["/usr/bin/tini", "--", "supervisord", "-c", "/etc/supervisord.conf"]
|
||||
@@ -1,52 +0,0 @@
|
||||
# ── Build stage ──────────────────────────────────────────────────────
|
||||
FROM python:3.12-slim AS builder
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc libpq-dev curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy pyproject.toml for both api and scanner
|
||||
COPY apps/api/pyproject.toml ./api/pyproject.toml
|
||||
COPY apps/scanner/pyproject.toml ./scanner/pyproject.toml
|
||||
|
||||
# Install API dependencies
|
||||
RUN pip install --no-cache-dir --prefix=/install api/.
|
||||
|
||||
# Install Scanner dependencies (Playwright + Chromium)
|
||||
RUN pip install --no-cache-dir --prefix=/install scanner/. \
|
||||
&& playwright install chromium --with-deps
|
||||
|
||||
# ── Runtime stage ────────────────────────────────────────────────────
|
||||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libpq5 curl tini \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& apt-get clean
|
||||
|
||||
# Copy installed dependencies from builder
|
||||
COPY --from=builder /install /usr/local
|
||||
|
||||
# Copy application code
|
||||
COPY apps/api/src ./src
|
||||
COPY apps/scanner/src ./src_scanner
|
||||
COPY supervisord.conf /etc/supervisord.conf
|
||||
|
||||
# Move scanner source into api structure
|
||||
RUN if [ -d src_scanner ]; then \
|
||||
cp -r src_scanner/* src/ 2>/dev/null || true; \
|
||||
fi
|
||||
|
||||
# Healthcheck for API
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=30s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Use tini as init system for proper signal handling
|
||||
ENTRYPOINT ["/usr/bin/tini", "--"]
|
||||
|
||||
# supervisord manages multiple processes
|
||||
CMD ["supervisord", "-c", "/etc/supervisord.conf"]
|
||||
@@ -1,14 +1,29 @@
|
||||
server {
|
||||
worker_processes auto;
|
||||
pid /run/nginx.pid;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
access_log /var/log/nginx/access.log;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
root /usr/share/nginx/html;
|
||||
root /var/www/html;
|
||||
index index.html;
|
||||
|
||||
# Banner entry points — cross-origin script loads from customer
|
||||
# sites, so they need permissive CORS. Served from the web root
|
||||
# because the loader derives the bundle URL from its own origin
|
||||
# (see apps/banner/src/loader.ts). Declared before the SPA
|
||||
# fallback so nginx doesn't rewrite them to index.html when the
|
||||
# files aren't yet built in dev.
|
||||
# Health check endpoint
|
||||
location = /health {
|
||||
access_log off;
|
||||
return 200 "nginx ok\n";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
|
||||
# Banner entry points
|
||||
location = /consent-loader.js {
|
||||
add_header Access-Control-Allow-Origin "*" always;
|
||||
add_header Access-Control-Allow-Methods "GET, OPTIONS" always;
|
||||
@@ -23,27 +38,33 @@ server {
|
||||
try_files $uri =404;
|
||||
}
|
||||
|
||||
# SPA fallback — serve index.html for all other routes
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
# Proxy API requests to the backend
|
||||
# Uses Docker's embedded DNS with a variable so nginx resolves at request
|
||||
# time rather than at startup — prevents crash if api is temporarily down.
|
||||
# Proxy API requests to FastAPI backend — strip /api prefix
|
||||
location /api/ {
|
||||
resolver 127.0.0.11 valid=10s;
|
||||
set $upstream http://api:8000;
|
||||
proxy_pass $upstream;
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Cache static assets
|
||||
location /docs {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
location /openapi.json {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
# SPA fallback
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /assets/ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
import os
|
||||
from logging.config import fileConfig
|
||||
from urllib.parse import urlparse, parse_qs, urlencode, unquote
|
||||
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from sqlalchemy import create_engine, pool
|
||||
|
||||
from alembic import context
|
||||
from src.models import Base
|
||||
|
||||
# Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Override sqlalchemy.url from environment if set
|
||||
database_url = os.environ.get("DATABASE_URL")
|
||||
if database_url:
|
||||
# Alembic needs the synchronous driver
|
||||
database_url = database_url.replace("postgresql+asyncpg://", "postgresql://")
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
raw_url = os.environ.get("DATABASE_URL", "")
|
||||
if raw_url:
|
||||
# Convert async driver to sync driver
|
||||
url = raw_url.replace("postgresql+asyncpg://", "postgresql://")
|
||||
url = unquote(url)
|
||||
# Strip sslmode (not supported by psycopg2)
|
||||
parsed = urlparse(url)
|
||||
if parsed.query:
|
||||
params = parse_qs(parsed.query)
|
||||
params.pop("sslmode", None)
|
||||
new_query = urlencode(params, doseq=True)
|
||||
url = parsed._replace(query=new_query).geturl()
|
||||
config.set_main_option("sqlalchemy.url", url)
|
||||
|
||||
# Set up Python logging from the config file
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
@@ -24,7 +30,6 @@ target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
@@ -32,25 +37,26 @@ def run_migrations_offline() -> None:
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
# Use DATABASE_URL env directly, properly converted for psycopg2
|
||||
raw_url = os.environ.get("DATABASE_URL", "")
|
||||
url = raw_url.replace("postgresql+asyncpg://", "postgresql://")
|
||||
url = unquote(url)
|
||||
# Strip sslmode
|
||||
parsed = urlparse(url)
|
||||
if parsed.query:
|
||||
params = parse_qs(parsed.query)
|
||||
params.pop("sslmode", None)
|
||||
new_query = urlencode(params, doseq=True)
|
||||
url = parsed._replace(query=new_query).geturl()
|
||||
|
||||
connectable = create_engine(url, poolclass=pool.NullPool)
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
)
|
||||
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
@@ -117,6 +117,40 @@ class Settings(BaseSettings):
|
||||
rate_limit_enabled: bool = True
|
||||
rate_limit_per_minute: int = 120
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _normalize_database_url(self) -> "Settings":
|
||||
"""Auto-fix common database URL schemes for asyncpg compatibility.
|
||||
|
||||
Platforms like Easypanel emit DATABASE_URL as ``postgres://...``
|
||||
(shortcut or legacy scheme). SQLAlchemy expects the dialect name
|
||||
``postgresql://`` and we need the ``+asyncpg`` driver suffix for
|
||||
the async engine. Normalise both cases here so the rest of the
|
||||
codebase can always assume ``postgresql+asyncpg://``.
|
||||
|
||||
Also strips ``sslmode`` from query strings — asyncpg does not
|
||||
accept this psycopg2 parameter and would raise TypeError.
|
||||
"""
|
||||
url = self.database_url
|
||||
|
||||
# Fix dialect scheme
|
||||
if url.startswith("postgres://"):
|
||||
url = url.replace("postgres://", "postgresql+asyncpg://", 1)
|
||||
elif url.startswith("postgresql://"):
|
||||
url = url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
||||
|
||||
# Strip sslmode from query string (asyncpg doesn't support it)
|
||||
if "?sslmode=" in url or "&sslmode=" in url:
|
||||
from urllib.parse import urlparse, urlencode, parse_qs
|
||||
|
||||
parsed = urlparse(url)
|
||||
params = parse_qs(parsed.query, keep_blank_values=True)
|
||||
params.pop("sslmode", None)
|
||||
query = urlencode(params, doseq=True)
|
||||
url = parsed._replace(query=query).geturl()
|
||||
|
||||
self.database_url = url
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _check_production_safety(self) -> "Settings":
|
||||
"""Refuse to start with unsafe defaults in non-dev environments."""
|
||||
|
||||
39
entrypoint.sh
Normal file
39
entrypoint.sh
Normal file
@@ -0,0 +1,39 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# Extract host and port from DATABASE_URL
|
||||
DB_HOST=$(echo "$DATABASE_URL" | sed -E 's|.*@([^/:]+).*|\1|')
|
||||
DB_PORT=$(echo "$DATABASE_URL" | sed -E 's|.*@[^/:]+:([0-9]+)/.*|\1|')
|
||||
|
||||
if [ -z "$DB_PORT" ] || [ "$DB_PORT" = "$DB_HOST" ]; then
|
||||
DB_PORT="5432"
|
||||
fi
|
||||
|
||||
echo "Waiting for postgres at $DB_HOST:$DB_PORT ..."
|
||||
|
||||
max_retries=30
|
||||
counter=0
|
||||
until (
|
||||
pg_isready -h "$DB_HOST" -p "$DB_PORT" -q 2>/dev/null
|
||||
) || (
|
||||
(echo > /dev/tcp/"$DB_HOST"/"$DB_PORT") 2>/dev/null
|
||||
); do
|
||||
counter=$((counter + 1))
|
||||
if [ $counter -ge $max_retries ]; then
|
||||
echo "ERROR: postgres at $DB_HOST:$DB_PORT not ready after ${max_retries}s"
|
||||
exit 1
|
||||
fi
|
||||
echo " postgres not ready, retrying in 2s ... ($counter/$max_retries)"
|
||||
sleep 2
|
||||
done
|
||||
|
||||
echo "postgres is ready!"
|
||||
|
||||
# Run alembic migrations
|
||||
if [ -f /app/alembic/env.py ]; then
|
||||
echo "Running database migrations ..."
|
||||
python -m alembic upgrade head
|
||||
echo "Migrations complete!"
|
||||
fi
|
||||
|
||||
exec "$@"
|
||||
@@ -4,8 +4,20 @@ logfile=/var/log/supervisor/supervisord.log
|
||||
pidfile=/var/run/supervisord.pid
|
||||
user=root
|
||||
|
||||
[program:nginx]
|
||||
command=nginx -g "daemon off;" -c /etc/nginx/conf.d/default.conf
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
stopwaitsecs=5
|
||||
killasgroup=true
|
||||
priority=100
|
||||
|
||||
[program:api]
|
||||
command=sh -c "uvicorn src.main:app --host 0.0.0.0 --port 8000 --workers ${API_WORKERS:-4} --access-log --proxy-headers --forwarded-allow-ips '*'"
|
||||
command=sh -c "uvicorn src.main:app --host 127.0.0.1 --port 8000 --workers ${API_WORKERS:-4} --access-log --proxy-headers --forwarded-allow-ips '*'"
|
||||
directory=/app
|
||||
autostart=true
|
||||
autorestart=true
|
||||
@@ -15,7 +27,7 @@ stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
stopwaitsecs=10
|
||||
killasgroup=true
|
||||
priority=100
|
||||
priority=200
|
||||
|
||||
[program:worker]
|
||||
command=celery -A src.celery_app worker --loglevel=info --concurrency=2
|
||||
@@ -28,7 +40,7 @@ stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
stopwaitsecs=30
|
||||
killasgroup=true
|
||||
priority=200
|
||||
priority=300
|
||||
|
||||
[program:beat]
|
||||
command=celery -A src.celery_app beat --loglevel=info
|
||||
@@ -41,18 +53,18 @@ stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
stopwaitsecs=10
|
||||
killasgroup=true
|
||||
priority=300
|
||||
priority=400
|
||||
|
||||
[program:scanner]
|
||||
command=python -m src.worker
|
||||
directory=/app
|
||||
autostart=${ENABLE_SCANNER:-false}
|
||||
autorestart=true
|
||||
autostart=false
|
||||
autorestart=false
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
stopwaitsecs=10
|
||||
killasgroup=true
|
||||
priority=400
|
||||
priority=500
|
||||
environment=PYTHONUNBUFFERED="1"
|
||||
|
||||
Reference in New Issue
Block a user