- Replace Astro DB with better-sqlite3 (bypasses remote SQLite limitation) - Add consent API with auto-create table pattern - Update admin dashboard to fetch from API instead of Astro DB - Add IP hashing and rate limiting for GDPR compliance - Add seo-utils-mcp-guide skill - Update legal templates with business placeholders
1010 lines
31 KiB
Python
1010 lines
31 KiB
Python
#!/usr/bin/env python3
|
||
"""
|
||
Website Creator - Generate PDPA-compliant Astro websites
|
||
|
||
Creates complete Astro projects with:
|
||
- Bilingual support (Thai/English)
|
||
- Umami Analytics integration (auto-create)
|
||
- GA4 Analytics support (existing or new)
|
||
- Google Search Console setup
|
||
- Cookie consent management
|
||
- Consent logging database (Astro DB)
|
||
- PDPA-compliant legal pages
|
||
- Easypanel deployment (manual sync after local preview)
|
||
|
||
Usage:
|
||
python3 create_astro_website.py \\
|
||
--name "Deal Plus Tech" \\
|
||
--type "corporate" \\
|
||
--languages "th,en" \\
|
||
--output "./dealplustech-website"
|
||
|
||
# Then preview locally, and when ready:
|
||
# Script will ask: "Sync to Gitea and deploy?"
|
||
"""
|
||
|
||
import os
|
||
import sys
|
||
import argparse
|
||
import shutil
|
||
import subprocess
|
||
import json
|
||
import time
|
||
from pathlib import Path
|
||
from datetime import datetime
|
||
from urllib.parse import urlparse
|
||
|
||
|
||
# ============================================================================
|
||
# INTERACTIVE SETUP FUNCTIONS
|
||
# ============================================================================
|
||
|
||
|
||
def ask_analytics_setup():
|
||
"""
|
||
Interactive analytics setup workflow
|
||
|
||
Returns:
|
||
dict: Analytics configuration
|
||
"""
|
||
print("\n" + "=" * 60)
|
||
print("📊 ANALYTICS SETUP")
|
||
print("=" * 60)
|
||
|
||
config = {
|
||
"search_console": None,
|
||
"analytics_type": None, # 'umami' or 'ga4'
|
||
"umami_auto_create": False,
|
||
"umami_website_id": None,
|
||
"ga4_property_id": None,
|
||
"ga4_credentials_path": None,
|
||
"ga4_existing": False,
|
||
}
|
||
|
||
# Step 1: Google Search Console (for all websites)
|
||
print("\n1️⃣ Google Search Console Setup")
|
||
print(" GSC is recommended for all websites for SEO monitoring.")
|
||
|
||
gsc_choice = (
|
||
input("\n Do you want to setup Google Search Console? (y/n): ")
|
||
.strip()
|
||
.lower()
|
||
)
|
||
|
||
if gsc_choice == "y":
|
||
print("\n GSC Setup Options:")
|
||
print(" 1. I'll add it manually later (skip for now)")
|
||
print(" 2. I have service account credentials file")
|
||
|
||
gsc_method = input("\n Choose option (1-2): ").strip()
|
||
|
||
if gsc_method == "2":
|
||
gsc_path = input(" Enter path to GSC credentials file: ").strip()
|
||
if os.path.exists(gsc_path):
|
||
config["search_console"] = {
|
||
"credentials_path": gsc_path,
|
||
"setup_later": False,
|
||
}
|
||
print(" ✓ GSC credentials loaded")
|
||
else:
|
||
print(" ⚠ File not found, will setup later")
|
||
config["search_console"] = {"setup_later": True}
|
||
else:
|
||
config["search_console"] = {"setup_later": True}
|
||
print(" ✓ Will setup later")
|
||
else:
|
||
print(" ⏭️ Skipping GSC setup")
|
||
|
||
# Step 2: Choose Analytics Type (Umami OR GA4)
|
||
print("\n2️⃣ Analytics Platform")
|
||
print(" Choose ONE analytics platform:")
|
||
print(" 1. Umami Analytics (recommended for most users)")
|
||
print(" - Privacy-focused, self-hosted")
|
||
print(" - Simple setup, auto-created")
|
||
print(" - Good for most websites")
|
||
print("\n 2. Google Analytics 4 (for advanced users)")
|
||
print(" - Full-featured analytics")
|
||
print(" - Requires Google account")
|
||
print(" - Good for existing GA4 users")
|
||
|
||
analytics_choice = input("\n Choose analytics (1-2): ").strip()
|
||
|
||
if analytics_choice == "1":
|
||
# Umami setup
|
||
config["analytics_type"] = "umami"
|
||
print("\n 📈 Umami Analytics Setup")
|
||
|
||
# Check if Umami credentials are configured
|
||
from skills._env_loader import load_unified_env
|
||
|
||
load_unified_env()
|
||
|
||
umami_url = os.getenv("UMAMI_URL", "")
|
||
umami_username = os.getenv("UMAMI_USERNAME", "")
|
||
umami_password = os.getenv("UMAMI_PASSWORD", "")
|
||
|
||
if umami_url and umami_username and umami_password:
|
||
print(" ✓ Umami credentials found in .env")
|
||
print(" ✓ Will auto-create Umami website for this project")
|
||
config["umami_auto_create"] = True
|
||
else:
|
||
print(" ⚠ Umami credentials not configured in .env")
|
||
print(" ⏭️ Skipping Umami setup (can add manually later)")
|
||
|
||
elif analytics_choice == "2":
|
||
# GA4 setup
|
||
config["analytics_type"] = "ga4"
|
||
print("\n 🔍 Google Analytics 4 Setup")
|
||
print(" 1. Create new GA4 property (auto-setup)")
|
||
print(" 2. Use existing GA4 property (manual setup)")
|
||
|
||
ga4_choice = input("\n Choose option (1-2): ").strip()
|
||
|
||
if ga4_choice == "1":
|
||
print("\n ⚠ Auto-creating GA4 properties requires API setup.")
|
||
print(" ⏭️ Will provide instructions for manual setup")
|
||
config["ga4_existing"] = False
|
||
else:
|
||
print("\n Please provide your existing GA4 details:")
|
||
|
||
# Check unified .env for GA4 credentials
|
||
from skills._env_loader import load_unified_env
|
||
|
||
load_unified_env()
|
||
|
||
ga4_property_id = os.getenv("GA4_PROPERTY_ID", "")
|
||
ga4_credentials_path = os.getenv("GA4_CREDENTIALS_PATH", "")
|
||
|
||
if ga4_property_id:
|
||
print(f" Found GA4 Property ID in .env: {ga4_property_id[:20]}...")
|
||
use_global = (
|
||
input(" Use this for this project? (y/n): ").strip().lower()
|
||
)
|
||
|
||
if use_global == "y":
|
||
config["ga4_property_id"] = ga4_property_id
|
||
config["ga4_credentials_path"] = ga4_credentials_path
|
||
print(" ✓ Using global GA4 credentials")
|
||
else:
|
||
config["ga4_property_id"] = input(
|
||
" Enter GA4 Property ID: "
|
||
).strip()
|
||
config["ga4_credentials_path"] = input(
|
||
" Enter GA4 credentials file path: "
|
||
).strip()
|
||
else:
|
||
config["ga4_property_id"] = input(
|
||
" Enter GA4 Property ID (G-XXXXXXXXXX): "
|
||
).strip()
|
||
config["ga4_credentials_path"] = input(
|
||
" Enter GA4 credentials file path: "
|
||
).strip()
|
||
|
||
config["ga4_existing"] = True
|
||
else:
|
||
print(" ⏭️ Skipping analytics setup")
|
||
|
||
return config
|
||
|
||
|
||
# ============================================================================
|
||
# TEMPLATES (abbreviated for brevity)
|
||
# ============================================================================
|
||
|
||
ASTRO_CONFIG_TEMPLATE = """import {{ defineConfig }} from 'astro/config';
|
||
import tailwindcss from '@tailwindcss/vite';
|
||
import sitemap from '@astrojs/sitemap';
|
||
|
||
export default defineConfig({{
|
||
site: '{site_url}',
|
||
output: 'hybrid',
|
||
i18n: {{
|
||
locales: [{locales}],
|
||
defaultLocale: '{default_locale}',
|
||
routing: {{
|
||
prefixDefaultLocale: false,
|
||
fallbackType: 'rewrite',
|
||
}},
|
||
fallback: {{
|
||
th: 'en',
|
||
}},
|
||
}},
|
||
integrations: [
|
||
tailwindcss(),
|
||
sitemap({{
|
||
i18n: {{
|
||
defaultLocale: '{default_locale}',
|
||
}},
|
||
}}),
|
||
],
|
||
vite: {{
|
||
optimizeDeps: {{
|
||
exclude: ['better-sqlite3']
|
||
}},
|
||
build: {{
|
||
rollupOptions: {{
|
||
external: ['better-sqlite3']
|
||
}}
|
||
}}
|
||
}}
|
||
}});
|
||
"""
|
||
|
||
PACKAGE_JSON_TEMPLATE = """{{
|
||
"name": "{name}",
|
||
"type": "module",
|
||
"version": "1.0.0",
|
||
"scripts": {{
|
||
"dev": "astro dev",
|
||
"build": "astro build",
|
||
"preview": "astro preview",
|
||
"astro": "astro"
|
||
}},
|
||
"dependencies": {{
|
||
"astro": "^5.17.1",
|
||
"@astrojs/sitemap": "^3.2.0",
|
||
"@tailwindcss/vite": "^4.2.1",
|
||
"tailwindcss": "^4.2.1",
|
||
"better-sqlite3": "^11.0.0"
|
||
}},
|
||
"devDependencies": {{
|
||
"@types/better-sqlite3": "^7.6.8"
|
||
}}
|
||
}}
|
||
"""
|
||
|
||
# ==============================================================================
|
||
# CONSENT API TEMPLATES (using better-sqlite3 directly)
|
||
# ==============================================================================
|
||
|
||
CONSENT_DB_TEMPLATE = """import Database from 'better-sqlite3';
|
||
import {{ join }} from 'path';
|
||
import {{ mkdirSync, existsSync }} from 'fs';
|
||
|
||
const DATA_DIR = join(process.cwd(), 'data');
|
||
const DB_PATH = join(DATA_DIR, 'consent.db');
|
||
|
||
export function getDb() {{
|
||
// 1. Create directory if not exists
|
||
if (!existsSync(DATA_DIR)) {{
|
||
mkdirSync(DATA_DIR, {{ recursive: true }});
|
||
}}
|
||
|
||
// 2. Open database
|
||
const db = new Database(DB_PATH);
|
||
|
||
// 3. Auto-create table (works with remote SQLite!)
|
||
db.exec(`
|
||
CREATE TABLE IF NOT EXISTS ConsentLog (
|
||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
sessionId TEXT UNIQUE NOT NULL,
|
||
timestamp TEXT NOT NULL,
|
||
essential INTEGER NOT NULL DEFAULT 0,
|
||
analytics INTEGER NOT NULL DEFAULT 0,
|
||
marketing INTEGER NOT NULL DEFAULT 0,
|
||
policyVersion TEXT NOT NULL,
|
||
ipHash TEXT,
|
||
userAgent TEXT
|
||
)
|
||
`);
|
||
|
||
return db;
|
||
}}
|
||
|
||
// Rate limiting map
|
||
const rateLimitMap = new Map<string, {{ count: number; resetTime: number }}>();
|
||
const RATE_LIMIT = 10;
|
||
const RATE_WINDOW = 60000; // 1 minute
|
||
|
||
export function checkRateLimit(ip: string): boolean {{
|
||
const now = Date.now();
|
||
const record = rateLimitMap.get(ip);
|
||
|
||
if (!record || now > record.resetTime) {{
|
||
rateLimitMap.set(ip, {{ count: 1, resetTime: now + RATE_WINDOW }});
|
||
return true;
|
||
}}
|
||
|
||
if (record.count >= RATE_LIMIT) {{
|
||
return false;
|
||
}}
|
||
|
||
record.count++;
|
||
return true;
|
||
}}
|
||
|
||
// IP Hashing for privacy (GDPR compliance)
|
||
export async function hashIP(ip: string): Promise<string> {{
|
||
try {{
|
||
if (crypto.subtle) {{
|
||
const hashBuffer = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(ip));
|
||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
|
||
return hashHex.substring(0, 16);
|
||
}}
|
||
}} catch {{}}
|
||
return `fallback-${Date.now()}`;
|
||
}}
|
||
"""
|
||
|
||
CONSENT_API_INDEX_TEMPLATE = """import type {{ APIRoute }} from 'astro';
|
||
import {{ getDb, checkRateLimit, hashIP }} from '../../../../lib/db';
|
||
|
||
export const GET: APIRoute = async ({{ clientAddress }}) => {{
|
||
// Rate limit check
|
||
const ip = clientAddress || 'unknown';
|
||
if (!checkRateLimit(ip)) {{
|
||
return new Response(
|
||
JSON.stringify({{ error: 'Rate limit exceeded' }}),
|
||
{{ status: 429, headers: {{ 'Content-Type': 'application/json' }} }}
|
||
);
|
||
}}
|
||
|
||
try {{
|
||
const db = getDb();
|
||
const logs = db.prepare(`
|
||
SELECT * FROM ConsentLog
|
||
ORDER BY timestamp DESC
|
||
LIMIT 100
|
||
`).all();
|
||
db.close();
|
||
|
||
return new Response(
|
||
JSON.stringify({{ logs }}),
|
||
{{ status: 200, headers: {{ 'Content-Type': 'application/json' }} }}
|
||
);
|
||
}} catch (error) {{
|
||
return new Response(
|
||
JSON.stringify({{ error: 'Failed to fetch logs' }}),
|
||
{{ status: 500 }}
|
||
);
|
||
}}
|
||
}};
|
||
|
||
export const POST: APIRoute = async ({{ request, clientAddress }}) => {{
|
||
try {{
|
||
const body = await request.json();
|
||
const {{ sessionId, essential, analytics, marketing, policyVersion, userAgent }} = body;
|
||
|
||
// Validate required fields
|
||
if (!sessionId || essential === undefined || !policyVersion) {{
|
||
return new Response(
|
||
JSON.stringify({{ error: 'Missing required fields' }}),
|
||
{{ status: 400, headers: {{ 'Content-Type': 'application/json' }} }}
|
||
);
|
||
}}
|
||
|
||
const db = getDb();
|
||
const ipHash = await hashIP(clientAddress || 'unknown');
|
||
const timestamp = new Date().toISOString();
|
||
|
||
// Insert with prepared statement (prevents SQL injection)
|
||
const stmt = db.prepare(`
|
||
INSERT INTO ConsentLog (sessionId, timestamp, essential, analytics, marketing, policyVersion, ipHash, userAgent)
|
||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||
`);
|
||
stmt.run(
|
||
sessionId,
|
||
timestamp,
|
||
essential ? 1 : 0,
|
||
analytics ? 1 : 0,
|
||
marketing ? 1 : 0,
|
||
policyVersion,
|
||
ipHash,
|
||
userAgent || 'unknown'
|
||
);
|
||
db.close();
|
||
|
||
return new Response(
|
||
JSON.stringify({{ success: true, sessionId }}),
|
||
{{ status: 201, headers: {{ 'Content-Type': 'application/json' }} }}
|
||
);
|
||
}} catch (error) {{
|
||
return new Response(
|
||
JSON.stringify({{ error: 'Failed to log consent' }}),
|
||
{{ status: 500 }}
|
||
);
|
||
}}
|
||
}};
|
||
"""
|
||
|
||
CONSENT_API_DELETE_TEMPLATE = """import type {{ APIRoute }} from 'astro';
|
||
import {{ getDb }} from '../../../../lib/db';
|
||
|
||
export const DELETE: APIRoute = async ({{ params }}) => {{
|
||
const {{ sessionId }} = params;
|
||
|
||
if (!sessionId) {{
|
||
return new Response(
|
||
JSON.stringify({{ error: 'Session ID required' }}),
|
||
{{ status: 400, headers: {{ 'Content-Type': 'application/json' }} }}
|
||
);
|
||
}}
|
||
|
||
try {{
|
||
const db = getDb();
|
||
const stmt = db.prepare('DELETE FROM ConsentLog WHERE sessionId = ?');
|
||
const result = stmt.run(sessionId);
|
||
db.close();
|
||
|
||
if (result.changes === 0) {{
|
||
return new Response(
|
||
JSON.stringify({{ error: 'Record not found' }}),
|
||
{{ status: 404 }}
|
||
);
|
||
}}
|
||
|
||
return new Response(
|
||
JSON.stringify({{ success: true }}),
|
||
{{ status: 200, headers: {{ 'Content-Type': 'application/json' }} }}
|
||
);
|
||
}} catch (error) {{
|
||
return new Response(
|
||
JSON.stringify({{ error: 'Failed to delete' }}),
|
||
{{ status: 500 }}
|
||
);
|
||
}}
|
||
}};
|
||
"""
|
||
|
||
# ... (rest of templates remain the same)
|
||
|
||
|
||
# ============================================================================
|
||
# MAIN FUNCTION
|
||
# ============================================================================
|
||
|
||
|
||
def main():
|
||
"""Main entry point."""
|
||
parser = argparse.ArgumentParser(description="Create PDPA-compliant Astro website")
|
||
parser.add_argument("--name", required=True, help="Website name")
|
||
parser.add_argument(
|
||
"--type",
|
||
default="corporate",
|
||
choices=["corporate", "portfolio", "landing", "blog", "ecommerce"],
|
||
help="Website type",
|
||
)
|
||
parser.add_argument(
|
||
"--languages", default="th,en", help="Languages (comma-separated): th, en"
|
||
)
|
||
parser.add_argument(
|
||
"--primary-color", default="#2563eb", help="Primary color (hex)"
|
||
)
|
||
parser.add_argument(
|
||
"--secondary-color", default="#1e40af", help="Secondary color (hex)"
|
||
)
|
||
parser.add_argument(
|
||
"--features",
|
||
default="blog,contact",
|
||
help="Features (comma-separated): blog, products, contact, portfolio",
|
||
)
|
||
parser.add_argument("--umami-id", default="", help="Umami Website ID")
|
||
parser.add_argument(
|
||
"--umami-domain", default="analytics.example.com", help="Umami domain"
|
||
)
|
||
parser.add_argument("--output", "-o", default=".", help="Output directory")
|
||
parser.add_argument(
|
||
"--no-interactive",
|
||
action="store_true",
|
||
help="Skip interactive setup (use defaults)",
|
||
)
|
||
|
||
args = parser.parse_args()
|
||
|
||
# Auto-generate admin password from project folder name
|
||
args.admin_password = Path(args.output).name.replace(" ", "").lower()
|
||
|
||
# Load unified credentials
|
||
from dotenv import load_dotenv
|
||
|
||
load_dotenv(os.path.join(os.path.dirname(__file__), "../../../.env"))
|
||
|
||
# Get Umami credentials for auto-setup
|
||
args.umami_url = os.getenv("UMAMI_URL", "")
|
||
args.umami_username = os.getenv("UMAMI_USERNAME", "")
|
||
args.umami_password = os.getenv("UMAMI_PASSWORD", "")
|
||
args.auto_setup_umami = bool(
|
||
args.umami_url and args.umami_username and args.umami_password
|
||
)
|
||
|
||
languages = [lang.strip() for lang in args.languages.split(",")]
|
||
default_locale = "en" if "en" in languages else languages[0]
|
||
|
||
features = [f.strip() for f in args.features.split(",")]
|
||
|
||
print(f"Creating website: {args.name}")
|
||
print(f"Type: {args.type}")
|
||
print(f"Languages: {languages}")
|
||
print(f"Features: {features}")
|
||
print(f"Output: {args.output}")
|
||
|
||
# Interactive analytics setup (if not in no-interactive mode)
|
||
analytics_config = None
|
||
if not args.no_interactive:
|
||
analytics_config = ask_analytics_setup()
|
||
|
||
# Create project structure
|
||
create_project(args, languages, default_locale, features)
|
||
|
||
# Save analytics configuration to project
|
||
if analytics_config:
|
||
save_analytics_config(args.output, analytics_config)
|
||
|
||
# Auto-setup Umami if credentials provided
|
||
umami_website_id = args.umami_id
|
||
if args.auto_setup_umami and (
|
||
not analytics_config or analytics_config.get("analytics_type") == "umami"
|
||
):
|
||
print("\n📈 Setting up Umami Analytics...")
|
||
try:
|
||
from umami_integration import setup_umami_for_website
|
||
|
||
website_domain = args.name.lower().replace(" ", "-") + ".moreminimore.com"
|
||
success, result = setup_umami_for_website(
|
||
args.umami_url,
|
||
args.umami_username,
|
||
args.umami_password,
|
||
args.name,
|
||
website_domain,
|
||
args.output,
|
||
)
|
||
if success:
|
||
umami_website_id = result["website_id"]
|
||
print(f" ✓ Umami website created: {umami_website_id}")
|
||
else:
|
||
print(
|
||
f" ⚠ Umami setup skipped: {result.get('error', 'Unknown error')}"
|
||
)
|
||
except Exception as e:
|
||
print(f" ⚠ Umami setup failed: {e}")
|
||
print(" Continuing without Umami...")
|
||
|
||
print(f"\n✅ Website created successfully at: {args.output}")
|
||
|
||
# Update .env with Umami ID if auto-setup
|
||
env_file = os.path.join(args.output, ".env")
|
||
if os.path.exists(env_file) and umami_website_id:
|
||
with open(env_file, "a", encoding="utf-8") as f:
|
||
f.write(f"\n# Umami Analytics (auto-configured)\n")
|
||
f.write(f"UMAMI_WEBSITE_ID={umami_website_id}\n")
|
||
print(f" ✓ Umami ID added to .env")
|
||
|
||
print("\nNext steps:")
|
||
print(f" 1. cd {args.output}")
|
||
print(" 2. npm install")
|
||
print(" 3. Update .env with your credentials")
|
||
print(" 4. npm run dev")
|
||
|
||
# Always ask to sync (skip if no-interactive mode)
|
||
print("")
|
||
print("=" * 60)
|
||
print("🏠 Website created locally!")
|
||
print("=" * 60)
|
||
print("")
|
||
print("Preview at: http://localhost:4321")
|
||
print("")
|
||
|
||
# Ask if they want to sync to Gitea/Easypanel
|
||
if args.no_interactive:
|
||
print("✅ Done! Website is ready at:", args.output)
|
||
print("To sync later, run the sync command manually.")
|
||
return
|
||
|
||
sync_choice = (
|
||
input("Do you want to sync to Gitea and deploy to Easypanel? (y/n): ")
|
||
.strip()
|
||
.lower()
|
||
)
|
||
|
||
if sync_choice != "y":
|
||
print("")
|
||
print("✅ Done! Website is ready at:", args.output)
|
||
print(
|
||
"To sync later, run this script again or use gitea-sync/easypanel-deploy skills."
|
||
)
|
||
return
|
||
|
||
print("")
|
||
print("Proceeding with sync and deployment...")
|
||
print("")
|
||
|
||
# Step 1: Sync to Gitea
|
||
print("📦 Step 1/3: Syncing to Gitea...")
|
||
git_url = sync_to_gitea(args.output, args.name)
|
||
|
||
# Step 2: Deploy to Easypanel
|
||
print("")
|
||
print("🚀 Step 2/3: Deploying to Easypanel...")
|
||
deployment_url = deploy_to_easypanel(args.output, args.name, git_url)
|
||
|
||
# Step 3: Verify and monitor
|
||
print("")
|
||
print("📊 Step 3/3: Monitoring deployment...")
|
||
monitor_deployment(args.name)
|
||
|
||
# Final output
|
||
print("")
|
||
print("=" * 60)
|
||
print("✅ COMPLETE!")
|
||
print("=" * 60)
|
||
print("")
|
||
print(f"📁 Website generated: {args.output}")
|
||
print(f"🌐 Gitea Repository: {git_url.replace('.git', '')}")
|
||
print(f"🚀 Easypanel Deployment: {deployment_url}")
|
||
print("")
|
||
print("📋 Next steps:")
|
||
print(f" 1. Website is deploying to: {deployment_url}")
|
||
print(f" 2. Check status at: https://panelwebsite.moreminimore.com")
|
||
print(f" 3. Edit Umami config: cd {args.output} && nano .env")
|
||
print("")
|
||
|
||
|
||
def save_analytics_config(output_path: str, config: dict):
|
||
"""Save analytics configuration to project context"""
|
||
context_dir = os.path.join(output_path, "context")
|
||
os.makedirs(context_dir, exist_ok=True)
|
||
|
||
# Save data-services.json
|
||
data_services = {
|
||
"ga4": {
|
||
"enabled": config.get("analytics_type") == "ga4",
|
||
"property_id": config.get("ga4_property_id", ""),
|
||
"credentials_path": config.get("ga4_credentials_path", ""),
|
||
}
|
||
if config.get("analytics_type") == "ga4"
|
||
else {"enabled": False},
|
||
"gsc": {
|
||
"enabled": config.get("search_console") is not None,
|
||
"site_url": "",
|
||
"credentials_path": config.get("search_console", {}).get(
|
||
"credentials_path", ""
|
||
),
|
||
},
|
||
"umami": {
|
||
"enabled": config.get("analytics_type") == "umami",
|
||
"api_url": os.getenv("UMAMI_URL", ""),
|
||
"website_id": config.get("umami_website_id", ""),
|
||
}
|
||
if config.get("analytics_type") == "umami"
|
||
else {"enabled": False},
|
||
"dataforseo": {"enabled": False},
|
||
}
|
||
|
||
with open(
|
||
os.path.join(context_dir, "data-services.json"), "w", encoding="utf-8"
|
||
) as f:
|
||
json.dump(data_services, f, indent=2)
|
||
|
||
print(f" ✓ Analytics config saved to context/data-services.json")
|
||
|
||
|
||
# ============================================================================
|
||
# PROJECT CREATION FUNCTIONS
|
||
# ============================================================================
|
||
|
||
|
||
def create_project(args, languages, default_locale, features):
|
||
"""Create the Astro project structure with templates."""
|
||
output_path = Path(args.output)
|
||
project_name = args.name.lower().replace(" ", "-")
|
||
site_url = f"https://{project_name}.moreminimore.com"
|
||
|
||
# Get template directory
|
||
script_dir = Path(__file__).parent
|
||
template_dir = script_dir / "templates"
|
||
|
||
print("\n📁 Creating project structure...")
|
||
|
||
# Create directories
|
||
dirs = [
|
||
output_path / "public" / "images",
|
||
output_path / "public" / "images" / "icons",
|
||
output_path / "src" / "components" / "common",
|
||
output_path / "src" / "components" / "consent",
|
||
output_path / "src" / "components" / "ui",
|
||
output_path / "src" / "layouts",
|
||
output_path / "src" / "pages",
|
||
output_path / "src" / "pages" / default_locale,
|
||
output_path / "src" / "pages" / "admin",
|
||
output_path / "src" / "pages" / "api" / "consent",
|
||
output_path / "src" / "styles",
|
||
output_path / "src" / "content" / "blog",
|
||
output_path / "src" / "lib",
|
||
output_path / "data",
|
||
]
|
||
|
||
for d in dirs:
|
||
d.mkdir(parents=True, exist_ok=True)
|
||
|
||
print(" ✓ Directory structure created")
|
||
|
||
# Copy templates if they exist
|
||
if template_dir.exists():
|
||
print(" 📦 Copying templates with IDs...")
|
||
|
||
# Copy layouts
|
||
layout_src = template_dir / "layouts" / "BaseLayout.astro"
|
||
if layout_src.exists():
|
||
content = layout_src.read_text(encoding="utf-8")
|
||
content = content.replace(
|
||
"const siteName = 'Website Name'", f"const siteName = '{args.name}'"
|
||
)
|
||
content = content.replace(
|
||
"const siteUrl = 'https://example.com'", f"const siteUrl = '{site_url}'"
|
||
)
|
||
(output_path / "src" / "layouts" / "BaseLayout.astro").write_text(
|
||
content, encoding="utf-8"
|
||
)
|
||
|
||
# Copy Header
|
||
header_src = template_dir / "components" / "common" / "Header.astro"
|
||
if header_src.exists():
|
||
shutil.copy(
|
||
header_src,
|
||
output_path / "src" / "components" / "common" / "Header.astro",
|
||
)
|
||
|
||
# Copy Footer
|
||
footer_src = template_dir / "components" / "common" / "Footer.astro"
|
||
if footer_src.exists():
|
||
shutil.copy(
|
||
footer_src,
|
||
output_path / "src" / "components" / "common" / "Footer.astro",
|
||
)
|
||
|
||
# Copy page templates
|
||
page_src = template_dir / "pages" / "index.astro"
|
||
if page_src.exists():
|
||
shutil.copy(
|
||
page_src, output_path / "src" / "pages" / default_locale / "index.astro"
|
||
)
|
||
|
||
# Copy styles
|
||
style_src = template_dir / "styles" / "global.css"
|
||
if style_src.exists():
|
||
shutil.copy(style_src, output_path / "src" / "styles" / "global.css")
|
||
|
||
# Copy LINE icon
|
||
line_icon_src = template_dir / "icons" / "line.svg"
|
||
if line_icon_src.exists():
|
||
icons_dir = output_path / "public" / "images" / "icons"
|
||
icons_dir.mkdir(parents=True, exist_ok=True)
|
||
shutil.copy(line_icon_src, icons_dir / "line.svg")
|
||
print(" ✓ LINE icon copied")
|
||
|
||
print(" ✓ Templates copied")
|
||
|
||
# Create astro.config.mjs
|
||
locales_str = ", ".join([f"'{lang}'" for lang in languages])
|
||
astro_config = ASTRO_CONFIG_TEMPLATE.format(
|
||
site_url=site_url, locales=locales_str, default_locale=default_locale
|
||
)
|
||
(output_path / "astro.config.mjs").write_text(astro_config, encoding="utf-8")
|
||
print(" ✓ astro.config.mjs created")
|
||
|
||
# Create package.json
|
||
package_json = PACKAGE_JSON_TEMPLATE.format(name=project_name)
|
||
(output_path / "package.json").write_text(package_json, encoding="utf-8")
|
||
print(" ✓ package.json created")
|
||
|
||
# Create tsconfig.json
|
||
tsconfig = """{
|
||
"extends": "astro/tsconfigs/strict",
|
||
"compilerOptions": {
|
||
"baseUrl": ".",
|
||
"paths": {
|
||
"@/*": ["src/*"]
|
||
}
|
||
}
|
||
}
|
||
"""
|
||
(output_path / "tsconfig.json").write_text(tsconfig, encoding="utf-8")
|
||
|
||
# Create env file
|
||
env_content = f"""# Website Configuration
|
||
SITE_NAME={args.name}
|
||
SITE_URL={site_url}
|
||
|
||
# Umami Analytics (optional - get from Umami dashboard)
|
||
# UMAMI_WEBSITE_ID=
|
||
# UMAMI_URL=
|
||
"""
|
||
(output_path / ".env").write_text(env_content, encoding="utf-8")
|
||
print(" ✓ Configuration files created")
|
||
|
||
# Create basic index page if no template
|
||
if not (output_path / "src" / "pages" / default_locale / "index.astro").exists():
|
||
index_content = f"""---
|
||
import BaseLayout from '../layouts/BaseLayout.astro';
|
||
import Header from '../components/common/Header.astro';
|
||
import Footer from '../components/common/Footer.astro';
|
||
---
|
||
|
||
<BaseLayout title="Home" description="Welcome to {args.name}">
|
||
<Header />
|
||
<main id="main-content">
|
||
<section id="hero-section" class="hero">
|
||
<h1 id="hero-title">Welcome to {args.name}</h1>
|
||
<p id="hero-subtitle">Your trusted partner</p>
|
||
</section>
|
||
</main>
|
||
<Footer />
|
||
</BaseLayout>
|
||
"""
|
||
(output_path / "src" / "pages" / default_locale / "index.astro").write_text(
|
||
index_content, encoding="utf-8"
|
||
)
|
||
|
||
print(" ✓ Basic pages created")
|
||
|
||
# Create consent API with better-sqlite3 (bypasses Astro DB limitation)
|
||
lib_db_content = CONSENT_DB_TEMPLATE
|
||
(output_path / "src" / "lib" / "db.ts").write_text(lib_db_content, encoding="utf-8")
|
||
print(" ✓ Consent database library created (better-sqlite3)")
|
||
|
||
# Create consent API endpoints
|
||
consent_index = CONSENT_API_INDEX_TEMPLATE
|
||
(output_path / "src" / "pages" / "api" / "consent" / "index.ts").write_text(
|
||
consent_index, encoding="utf-8"
|
||
)
|
||
|
||
consent_delete = CONSENT_API_DELETE_TEMPLATE
|
||
consent_delete = consent_delete.replace("{{", "{").replace("}}", "}")
|
||
consent_delete = consent_delete.replace("[sessionId]", "[sessionId]")
|
||
(output_path / "src" / "pages" / "api" / "consent" / "[sessionId].ts").write_text(
|
||
consent_delete, encoding="utf-8"
|
||
)
|
||
print(" ✓ Consent API endpoints created")
|
||
|
||
# Create admin consent logs page
|
||
admin_consent_src = template_dir / "admin-consent-logs.astro"
|
||
if admin_consent_src.exists():
|
||
shutil.copy(
|
||
admin_consent_src,
|
||
output_path / "src" / "pages" / "admin" / "consent-logs.astro",
|
||
)
|
||
print(" ✓ Admin consent logs page copied")
|
||
|
||
# Create Dockerfile (using alpine for better-sqlite3 native module support)
|
||
dockerfile = f"""FROM node:20-alpine
|
||
|
||
WORKDIR /app
|
||
|
||
# Install build dependencies for better-sqlite3
|
||
RUN apk add --no-cache python3 make g++
|
||
|
||
# Install dependencies
|
||
COPY package*.json ./
|
||
RUN npm install
|
||
|
||
# Copy source
|
||
COPY . .
|
||
|
||
# Build
|
||
RUN npm run build
|
||
|
||
# Serve
|
||
EXPOSE 80
|
||
CMD ["npm", "run", "preview"]
|
||
"""
|
||
(output_path / "Dockerfile").write_text(dockerfile, encoding="utf-8")
|
||
print(" ✓ Dockerfile created (alpine for better-sqlite3)")
|
||
|
||
# Create .gitignore
|
||
gitignore = """# Dependencies
|
||
node_modules/
|
||
|
||
# Build output
|
||
dist/
|
||
|
||
# Environment
|
||
.env
|
||
.env.*
|
||
!.env.example
|
||
|
||
# IDE
|
||
.idea/
|
||
.vscode/
|
||
*.swp
|
||
*.swo
|
||
|
||
# OS
|
||
.DS_Store
|
||
Thumbs.db
|
||
"""
|
||
(output_path / ".gitignore").write_text(gitignore, encoding="utf-8")
|
||
print(" ✓ .gitignore created")
|
||
|
||
return output_path
|
||
|
||
|
||
def sync_to_gitea(output_path: str, repo_name: str) -> str:
|
||
"""Sync project to Gitea repository."""
|
||
try:
|
||
# Import gitea sync functionality
|
||
sys.path.insert(0, str(Path(__file__).parent.parent / "gitea-sync" / "scripts"))
|
||
from sync import sync_repo
|
||
|
||
# Use the gitea-sync script
|
||
result = sync_repo(
|
||
repo_name=repo_name,
|
||
repo_path=output_path,
|
||
description=f"Website: {repo_name}",
|
||
auto_push=True,
|
||
)
|
||
if result.get("success"):
|
||
return result.get(
|
||
"url", f"https://git.moreminimore.com/user/{repo_name}.git"
|
||
)
|
||
else:
|
||
print(f" ⚠ Gitea sync failed: {result.get('error')}")
|
||
return f"https://git.moreminimore.com/user/{repo_name}.git"
|
||
except Exception as e:
|
||
print(f" ⚠ Gitea sync error: {e}")
|
||
print(" Continuing without Gitea sync...")
|
||
# Return a dummy URL so deployment can continue
|
||
return f"https://git.moreminimore.com/user/{repo_name}.git"
|
||
|
||
|
||
def deploy_to_easypanel(output_path: str, project_name: str, git_url: str) -> str:
|
||
"""Deploy project to Easypanel."""
|
||
try:
|
||
# Import easypanel deploy functionality
|
||
sys.path.insert(
|
||
0, str(Path(__file__).parent.parent / "easypanel-deploy" / "scripts")
|
||
)
|
||
from deploy import (
|
||
get_session_token,
|
||
create_service,
|
||
update_git_source,
|
||
update_build_type,
|
||
deploy_service,
|
||
load_env,
|
||
)
|
||
|
||
# Load credentials
|
||
env = load_env()
|
||
username = env.get("EASYPANEL_USERNAME", "")
|
||
password = env.get("EASYPANEL_PASSWORD", "")
|
||
|
||
if not username or not password:
|
||
print(" ⚠ Easypanel credentials not found")
|
||
print(" Skipping deployment - you can deploy manually later")
|
||
return f"https://{project_name}.moreminimore.com"
|
||
|
||
# Get session token
|
||
token = get_session_token(username, password)
|
||
if not token:
|
||
print(" ⚠ Failed to get Easypanel session")
|
||
return f"https://{project_name}.moreminimore.com"
|
||
|
||
# Create service
|
||
create_service(project_name, "web", token)
|
||
|
||
# Update git source
|
||
update_git_source(project_name, "web", git_url, "main", token)
|
||
|
||
# Set build type to dockerfile
|
||
update_build_type(project_name, "web", token, "dockerfile")
|
||
|
||
# Deploy
|
||
deploy_service(project_name, "web", token)
|
||
|
||
return f"https://{project_name}.moreminimore.com"
|
||
except Exception as e:
|
||
print(f" ⚠ Easypanel deployment error: {e}")
|
||
print(" Continuing without deployment...")
|
||
return f"https://{project_name}.moreminimore.com"
|
||
|
||
|
||
def monitor_deployment(project_name: str):
|
||
"""Monitor deployment status."""
|
||
print(f" 📊 Monitoring deployment for {project_name}...")
|
||
print(" (Deployment is running in background)")
|
||
print(" Check status at: https://panelwebsite.moreminimore.com")
|
||
|
||
|
||
if __name__ == "__main__":
|
||
main()
|