Base code
This commit is contained in:
26
backend/alwrity_utils/__init__.py
Normal file
26
backend/alwrity_utils/__init__.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""
|
||||
ALwrity Utilities Package
|
||||
Modular utilities for ALwrity backend startup and configuration.
|
||||
"""
|
||||
|
||||
from .dependency_manager import DependencyManager
|
||||
from .environment_setup import EnvironmentSetup
|
||||
from .database_setup import DatabaseSetup
|
||||
from .production_optimizer import ProductionOptimizer
|
||||
from .health_checker import HealthChecker
|
||||
from .rate_limiter import RateLimiter
|
||||
from .frontend_serving import FrontendServing
|
||||
from .router_manager import RouterManager
|
||||
from .onboarding_manager import OnboardingManager
|
||||
|
||||
__all__ = [
|
||||
'DependencyManager',
|
||||
'EnvironmentSetup',
|
||||
'DatabaseSetup',
|
||||
'ProductionOptimizer',
|
||||
'HealthChecker',
|
||||
'RateLimiter',
|
||||
'FrontendServing',
|
||||
'RouterManager',
|
||||
'OnboardingManager'
|
||||
]
|
||||
220
backend/alwrity_utils/database_setup.py
Normal file
220
backend/alwrity_utils/database_setup.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""
|
||||
Database Setup Module
|
||||
Handles database initialization and table creation.
|
||||
"""
|
||||
|
||||
from typing import List, Tuple
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class DatabaseSetup:
|
||||
"""Manages database setup for ALwrity backend."""
|
||||
|
||||
def __init__(self, production_mode: bool = False):
|
||||
self.production_mode = production_mode
|
||||
|
||||
def setup_essential_tables(self) -> bool:
|
||||
"""Set up essential database tables."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
if verbose:
|
||||
print("📊 Setting up essential database tables...")
|
||||
|
||||
try:
|
||||
from services.database import init_database, engine
|
||||
|
||||
# Initialize database connection
|
||||
init_database()
|
||||
if verbose:
|
||||
print(" ✅ Database connection initialized")
|
||||
|
||||
# Create essential tables
|
||||
self._create_monitoring_tables()
|
||||
self._create_subscription_tables()
|
||||
self._create_persona_tables()
|
||||
self._create_onboarding_tables()
|
||||
|
||||
if verbose:
|
||||
print("✅ Essential database tables created")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f"⚠️ Warning: Database setup failed: {e}")
|
||||
if self.production_mode:
|
||||
print(" Continuing in production mode...")
|
||||
else:
|
||||
print(" This may affect functionality")
|
||||
return True # Don't fail startup for database issues
|
||||
|
||||
def _create_monitoring_tables(self) -> bool:
|
||||
"""Create API monitoring tables."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
try:
|
||||
from models.api_monitoring import Base as MonitoringBase
|
||||
MonitoringBase.metadata.create_all(bind=engine)
|
||||
if verbose:
|
||||
print(" ✅ Monitoring tables created")
|
||||
return True
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f" ⚠️ Monitoring tables failed: {e}")
|
||||
return True # Non-critical
|
||||
|
||||
def _create_subscription_tables(self) -> bool:
|
||||
"""Create subscription and billing tables."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
try:
|
||||
from models.subscription_models import Base as SubscriptionBase
|
||||
SubscriptionBase.metadata.create_all(bind=engine)
|
||||
if verbose:
|
||||
print(" ✅ Subscription tables created")
|
||||
return True
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f" ⚠️ Subscription tables failed: {e}")
|
||||
return True # Non-critical
|
||||
|
||||
def _create_persona_tables(self) -> bool:
|
||||
"""Create persona analysis tables."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
try:
|
||||
from models.persona_models import Base as PersonaBase
|
||||
PersonaBase.metadata.create_all(bind=engine)
|
||||
if verbose:
|
||||
print(" ✅ Persona tables created")
|
||||
return True
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f" ⚠️ Persona tables failed: {e}")
|
||||
return True # Non-critical
|
||||
|
||||
def _create_onboarding_tables(self) -> bool:
|
||||
"""Create onboarding tables."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
try:
|
||||
from models.onboarding import Base as OnboardingBase
|
||||
OnboardingBase.metadata.create_all(bind=engine)
|
||||
if verbose:
|
||||
print(" ✅ Onboarding tables created")
|
||||
return True
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f" ⚠️ Onboarding tables failed: {e}")
|
||||
return True # Non-critical
|
||||
|
||||
def verify_tables(self) -> bool:
|
||||
"""Verify that essential tables exist."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
if self.production_mode:
|
||||
if verbose:
|
||||
print("⚠️ Skipping table verification in production mode")
|
||||
return True
|
||||
|
||||
if verbose:
|
||||
print("🔍 Verifying database tables...")
|
||||
|
||||
try:
|
||||
from services.database import engine
|
||||
from sqlalchemy import inspect
|
||||
|
||||
inspector = inspect(engine)
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
essential_tables = [
|
||||
'api_monitoring_logs',
|
||||
'subscription_plans',
|
||||
'user_subscriptions',
|
||||
'onboarding_sessions',
|
||||
'persona_data'
|
||||
]
|
||||
|
||||
existing_tables = [table for table in essential_tables if table in tables]
|
||||
if verbose:
|
||||
print(f" ✅ Found tables: {existing_tables}")
|
||||
|
||||
if len(existing_tables) < len(essential_tables):
|
||||
missing = [table for table in essential_tables if table not in existing_tables]
|
||||
if verbose:
|
||||
print(f" ⚠️ Missing tables: {missing}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Table verification failed: {e}")
|
||||
return True # Non-critical
|
||||
|
||||
def setup_advanced_tables(self) -> bool:
|
||||
"""Set up advanced tables (non-critical)."""
|
||||
if self.production_mode:
|
||||
print("⚠️ Skipping advanced table setup in production mode")
|
||||
return True
|
||||
|
||||
print("🔧 Setting up advanced database features...")
|
||||
|
||||
try:
|
||||
# Set up monitoring tables
|
||||
self._setup_monitoring_tables()
|
||||
|
||||
# Set up billing tables
|
||||
self._setup_billing_tables()
|
||||
|
||||
logger.debug("✅ Advanced database features configured")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Advanced table setup failed: {e}")
|
||||
return True # Non-critical
|
||||
|
||||
def _setup_monitoring_tables(self) -> bool:
|
||||
"""Set up API monitoring tables."""
|
||||
try:
|
||||
sys.path.append(str(Path(__file__).parent.parent))
|
||||
from scripts.create_monitoring_tables import create_monitoring_tables
|
||||
|
||||
if create_monitoring_tables():
|
||||
print(" ✅ API monitoring tables created")
|
||||
return True
|
||||
else:
|
||||
print(" ⚠️ API monitoring setup failed")
|
||||
return True # Non-critical
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Monitoring setup failed: {e}")
|
||||
return True # Non-critical
|
||||
|
||||
def _setup_billing_tables(self) -> bool:
|
||||
"""Set up billing and subscription tables."""
|
||||
try:
|
||||
sys.path.append(str(Path(__file__).parent.parent))
|
||||
from scripts.create_billing_tables import create_billing_tables, check_existing_tables
|
||||
from services.database import engine
|
||||
|
||||
# Check if tables already exist
|
||||
if check_existing_tables(engine):
|
||||
logger.debug("✅ Billing tables already exist")
|
||||
return True
|
||||
|
||||
if create_billing_tables():
|
||||
logger.debug("✅ Billing tables created")
|
||||
return True
|
||||
else:
|
||||
logger.warning("Billing setup failed")
|
||||
return True # Non-critical
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Billing setup failed: {e}")
|
||||
return True # Non-critical
|
||||
183
backend/alwrity_utils/dependency_manager.py
Normal file
183
backend/alwrity_utils/dependency_manager.py
Normal file
@@ -0,0 +1,183 @@
|
||||
"""
|
||||
Dependency Management Module
|
||||
Handles installation and verification of Python dependencies.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple
|
||||
|
||||
|
||||
class DependencyManager:
|
||||
"""Manages Python package dependencies for ALwrity backend."""
|
||||
|
||||
def __init__(self, requirements_file: str = "requirements.txt"):
|
||||
self.requirements_file = Path(requirements_file)
|
||||
self.critical_packages = [
|
||||
'fastapi',
|
||||
'uvicorn',
|
||||
'pydantic',
|
||||
'sqlalchemy',
|
||||
'loguru'
|
||||
]
|
||||
|
||||
self.optional_packages = [
|
||||
'openai',
|
||||
'google.generativeai',
|
||||
'anthropic',
|
||||
'mistralai',
|
||||
'spacy',
|
||||
'nltk'
|
||||
]
|
||||
|
||||
def install_requirements(self) -> bool:
|
||||
"""Install packages from requirements.txt."""
|
||||
print("📦 Installing required packages...")
|
||||
|
||||
if not self.requirements_file.exists():
|
||||
print(f"❌ Requirements file not found: {self.requirements_file}")
|
||||
return False
|
||||
|
||||
try:
|
||||
subprocess.check_call([
|
||||
sys.executable, "-m", "pip", "install", "-r", str(self.requirements_file)
|
||||
])
|
||||
print("✅ All packages installed successfully!")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Error installing packages: {e}")
|
||||
return False
|
||||
|
||||
def check_critical_dependencies(self) -> Tuple[bool, List[str]]:
|
||||
"""Check if critical dependencies are available."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
if verbose:
|
||||
print("🔍 Checking critical dependencies...")
|
||||
|
||||
missing_packages = []
|
||||
|
||||
for package in self.critical_packages:
|
||||
try:
|
||||
__import__(package.replace('-', '_'))
|
||||
if verbose:
|
||||
print(f" ✅ {package}")
|
||||
except ImportError:
|
||||
if verbose:
|
||||
print(f" ❌ {package} - MISSING")
|
||||
missing_packages.append(package)
|
||||
|
||||
if missing_packages:
|
||||
if verbose:
|
||||
print(f"❌ Missing critical packages: {', '.join(missing_packages)}")
|
||||
return False, missing_packages
|
||||
|
||||
if verbose:
|
||||
print("✅ All critical dependencies available!")
|
||||
return True, []
|
||||
|
||||
def check_optional_dependencies(self) -> Tuple[bool, List[str]]:
|
||||
"""Check if optional dependencies are available."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
if verbose:
|
||||
print("🔍 Checking optional dependencies...")
|
||||
|
||||
missing_packages = []
|
||||
|
||||
for package in self.optional_packages:
|
||||
try:
|
||||
__import__(package.replace('-', '_'))
|
||||
if verbose:
|
||||
print(f" ✅ {package}")
|
||||
except ImportError:
|
||||
if verbose:
|
||||
print(f" ⚠️ {package} - MISSING (optional)")
|
||||
missing_packages.append(package)
|
||||
|
||||
if missing_packages and verbose:
|
||||
print(f"⚠️ Missing optional packages: {', '.join(missing_packages)}")
|
||||
print(" Some features may not be available")
|
||||
|
||||
return len(missing_packages) == 0, missing_packages
|
||||
|
||||
def setup_spacy_model(self) -> bool:
|
||||
"""Set up spaCy English model."""
|
||||
print("🧠 Setting up spaCy model...")
|
||||
|
||||
try:
|
||||
import spacy
|
||||
|
||||
model_name = "en_core_web_sm"
|
||||
|
||||
try:
|
||||
# Try to load the model
|
||||
nlp = spacy.load(model_name)
|
||||
test_doc = nlp("This is a test sentence.")
|
||||
if test_doc and len(test_doc) > 0:
|
||||
print(f"✅ spaCy model '{model_name}' is available")
|
||||
return True
|
||||
except OSError:
|
||||
# Model not found - try to download it
|
||||
print(f"⚠️ spaCy model '{model_name}' not found, downloading...")
|
||||
try:
|
||||
subprocess.check_call([
|
||||
sys.executable, "-m", "spacy", "download", model_name
|
||||
])
|
||||
print(f"✅ spaCy model '{model_name}' downloaded successfully")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to download spaCy model: {e}")
|
||||
print(" Please download manually with: python -m spacy download en_core_web_sm")
|
||||
return False
|
||||
|
||||
except ImportError:
|
||||
print("⚠️ spaCy not installed - skipping model setup")
|
||||
return True # Don't fail for missing spaCy package
|
||||
|
||||
return True
|
||||
|
||||
def setup_nltk_data(self) -> bool:
|
||||
"""Set up NLTK data."""
|
||||
print("📚 Setting up NLTK data...")
|
||||
|
||||
try:
|
||||
import nltk
|
||||
|
||||
# Essential NLTK data packages
|
||||
essential_data = [
|
||||
('punkt_tab', 'tokenizers/punkt_tab'), # Updated tokenizer
|
||||
('stopwords', 'corpora/stopwords'),
|
||||
('averaged_perceptron_tagger', 'taggers/averaged_perceptron_tagger')
|
||||
]
|
||||
|
||||
for data_package, path in essential_data:
|
||||
try:
|
||||
nltk.data.find(path)
|
||||
print(f" ✅ {data_package}")
|
||||
except LookupError:
|
||||
print(f" ⚠️ {data_package} - downloading...")
|
||||
try:
|
||||
nltk.download(data_package, quiet=True)
|
||||
print(f" ✅ {data_package} downloaded")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ {data_package} download failed: {e}")
|
||||
# Try fallback for punkt_tab -> punkt
|
||||
if data_package == 'punkt_tab':
|
||||
try:
|
||||
nltk.download('punkt', quiet=True)
|
||||
print(f" ✅ punkt (fallback) downloaded")
|
||||
except:
|
||||
pass
|
||||
|
||||
print("✅ NLTK data setup complete")
|
||||
return True
|
||||
|
||||
except ImportError:
|
||||
print("⚠️ NLTK not installed - skipping data setup")
|
||||
return True # Don't fail for missing NLTK package
|
||||
|
||||
return True
|
||||
155
backend/alwrity_utils/environment_setup.py
Normal file
155
backend/alwrity_utils/environment_setup.py
Normal file
@@ -0,0 +1,155 @@
|
||||
"""
|
||||
Environment Setup Module
|
||||
Handles environment configuration and directory setup.
|
||||
"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any
|
||||
|
||||
|
||||
class EnvironmentSetup:
|
||||
"""Manages environment setup for ALwrity backend."""
|
||||
|
||||
def __init__(self, production_mode: bool = False):
|
||||
self.production_mode = production_mode
|
||||
# Use safer directory paths that don't conflict with deployment platforms
|
||||
if production_mode:
|
||||
# In production, use temp directories or skip directory creation
|
||||
self.required_directories = []
|
||||
else:
|
||||
# In development, use local directories
|
||||
self.required_directories = [
|
||||
"lib/workspace/alwrity_content",
|
||||
"lib/workspace/alwrity_web_research",
|
||||
"lib/workspace/alwrity_prompts",
|
||||
"lib/workspace/alwrity_config"
|
||||
]
|
||||
|
||||
def setup_directories(self) -> bool:
|
||||
"""Create necessary directories for ALwrity."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
if verbose:
|
||||
print("📁 Setting up directories...")
|
||||
|
||||
if not self.required_directories:
|
||||
if verbose:
|
||||
print(" ⚠️ Skipping directory creation in production mode")
|
||||
return True
|
||||
|
||||
for directory in self.required_directories:
|
||||
try:
|
||||
Path(directory).mkdir(parents=True, exist_ok=True)
|
||||
if verbose:
|
||||
print(f" ✅ Created: {directory}")
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f" ❌ Failed to create {directory}: {e}")
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
print("✅ All directories created successfully")
|
||||
return True
|
||||
|
||||
def setup_environment_variables(self) -> bool:
|
||||
"""Set up environment variables for the application."""
|
||||
print("🔧 Setting up environment variables...")
|
||||
|
||||
# Production environment variables
|
||||
if self.production_mode:
|
||||
env_vars = {
|
||||
"HOST": "0.0.0.0",
|
||||
"PORT": "8000",
|
||||
"RELOAD": "false",
|
||||
"LOG_LEVEL": "INFO",
|
||||
"DEBUG": "false"
|
||||
}
|
||||
else:
|
||||
env_vars = {
|
||||
"HOST": "0.0.0.0",
|
||||
"PORT": "8000",
|
||||
"RELOAD": "true",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"DEBUG": "true"
|
||||
}
|
||||
|
||||
for key, value in env_vars.items():
|
||||
os.environ.setdefault(key, value)
|
||||
print(f" ✅ {key}={value}")
|
||||
|
||||
print("✅ Environment variables configured")
|
||||
return True
|
||||
|
||||
def create_env_file(self) -> bool:
|
||||
"""Create .env file with default configuration (development only)."""
|
||||
if self.production_mode:
|
||||
print("⚠️ Skipping .env file creation in production mode")
|
||||
return True
|
||||
|
||||
print("🔧 Creating .env file...")
|
||||
|
||||
env_file = Path(".env")
|
||||
if env_file.exists():
|
||||
print(" ✅ .env file already exists")
|
||||
return True
|
||||
|
||||
env_content = """# ALwrity Backend Configuration
|
||||
|
||||
# API Keys (Configure these in the onboarding process)
|
||||
# OPENAI_API_KEY=your_openai_api_key_here
|
||||
# GEMINI_API_KEY=your_gemini_api_key_here
|
||||
# ANTHROPIC_API_KEY=your_anthropic_api_key_here
|
||||
# MISTRAL_API_KEY=your_mistral_api_key_here
|
||||
|
||||
# Research API Keys (Optional)
|
||||
# TAVILY_API_KEY=your_tavily_api_key_here
|
||||
# SERPER_API_KEY=your_serper_api_key_here
|
||||
# EXA_API_KEY=your_exa_api_key_here
|
||||
|
||||
# Authentication
|
||||
# CLERK_SECRET_KEY=your_clerk_secret_key_here
|
||||
|
||||
# OAuth Redirect URIs
|
||||
# GSC_REDIRECT_URI=https://your-frontend.vercel.app/gsc/callback
|
||||
# WORDPRESS_REDIRECT_URI=https://your-frontend.vercel.app/wp/callback
|
||||
# WIX_REDIRECT_URI=https://your-frontend.vercel.app/wix/callback
|
||||
|
||||
# Server Configuration
|
||||
HOST=0.0.0.0
|
||||
PORT=8000
|
||||
DEBUG=true
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=INFO
|
||||
"""
|
||||
|
||||
try:
|
||||
with open(env_file, 'w') as f:
|
||||
f.write(env_content)
|
||||
print("✅ .env file created successfully")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"❌ Error creating .env file: {e}")
|
||||
return False
|
||||
|
||||
def verify_environment(self) -> bool:
|
||||
"""Verify that the environment is properly configured."""
|
||||
print("🔍 Verifying environment setup...")
|
||||
|
||||
# Check required directories
|
||||
for directory in self.required_directories:
|
||||
if not Path(directory).exists():
|
||||
print(f"❌ Directory missing: {directory}")
|
||||
return False
|
||||
|
||||
# Check environment variables
|
||||
required_vars = ["HOST", "PORT", "LOG_LEVEL"]
|
||||
for var in required_vars:
|
||||
if not os.getenv(var):
|
||||
print(f"❌ Environment variable missing: {var}")
|
||||
return False
|
||||
|
||||
print("✅ Environment verification complete")
|
||||
return True
|
||||
156
backend/alwrity_utils/frontend_serving.py
Normal file
156
backend/alwrity_utils/frontend_serving.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""
|
||||
Frontend Serving Module
|
||||
Handles React frontend serving and static file mounting with cache headers.
|
||||
"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse, Response
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from loguru import logger
|
||||
from typing import Dict, Any
|
||||
|
||||
|
||||
class CacheHeadersMiddleware(BaseHTTPMiddleware):
|
||||
"""
|
||||
Middleware to add cache headers to static files.
|
||||
|
||||
This improves performance by allowing browsers to cache static assets
|
||||
(JS, CSS, images) for 1 year, reducing repeat visit load times.
|
||||
"""
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
response = await call_next(request)
|
||||
|
||||
# Only add cache headers to static files
|
||||
if request.url.path.startswith("/static/"):
|
||||
path = request.url.path.lower()
|
||||
|
||||
# Check if file has a hash in its name (React build pattern: filename.hash.ext)
|
||||
# Examples: bundle.abc123.js, main.def456.chunk.js, vendors.789abc.js
|
||||
import re
|
||||
# Pattern matches: filename.hexhash.ext or filename.hexhash.chunk.ext
|
||||
hash_pattern = r'\.[a-f0-9]{8,}\.'
|
||||
has_hash = bool(re.search(hash_pattern, path))
|
||||
|
||||
# File extensions that should be cached
|
||||
cacheable_extensions = ['.js', '.css', '.woff', '.woff2', '.ttf', '.otf',
|
||||
'.png', '.jpg', '.jpeg', '.webp', '.svg', '.ico', '.gif']
|
||||
is_cacheable_file = any(path.endswith(ext) for ext in cacheable_extensions)
|
||||
|
||||
if is_cacheable_file:
|
||||
if has_hash:
|
||||
# Immutable files (with hash) - cache for 1 year
|
||||
# These files never change (new hash = new file)
|
||||
response.headers["Cache-Control"] = "public, max-age=31536000, immutable"
|
||||
# Expires header calculated dynamically to match max-age
|
||||
# Modern browsers prefer Cache-Control, but Expires provides compatibility
|
||||
from datetime import datetime, timedelta
|
||||
expires_date = datetime.utcnow() + timedelta(seconds=31536000)
|
||||
response.headers["Expires"] = expires_date.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
else:
|
||||
# Non-hashed files - shorter cache (1 hour)
|
||||
# These might be updated, so cache for shorter time
|
||||
response.headers["Cache-Control"] = "public, max-age=3600"
|
||||
|
||||
# Never cache HTML files (index.html)
|
||||
elif request.url.path == "/" or request.url.path.endswith(".html"):
|
||||
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
|
||||
response.headers["Pragma"] = "no-cache"
|
||||
response.headers["Expires"] = "0"
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class FrontendServing:
|
||||
"""Manages React frontend serving and static file mounting with cache headers."""
|
||||
|
||||
def __init__(self, app: FastAPI):
|
||||
self.app = app
|
||||
self.frontend_build_path = os.path.join(os.path.dirname(__file__), "..", "..", "frontend", "build")
|
||||
self.static_path = os.path.join(self.frontend_build_path, "static")
|
||||
|
||||
def setup_frontend_serving(self) -> bool:
|
||||
"""
|
||||
Set up React frontend serving and static file mounting with cache headers.
|
||||
|
||||
This method:
|
||||
1. Adds cache headers middleware for static files
|
||||
2. Mounts static files directory
|
||||
3. Configures proper caching for performance
|
||||
"""
|
||||
try:
|
||||
logger.info("Setting up frontend serving with cache headers...")
|
||||
|
||||
# Add cache headers middleware BEFORE mounting static files
|
||||
self.app.add_middleware(CacheHeadersMiddleware)
|
||||
logger.info("Cache headers middleware added")
|
||||
|
||||
# Mount static files for React app (only if directory exists)
|
||||
if os.path.exists(self.static_path):
|
||||
self.app.mount("/static", StaticFiles(directory=self.static_path), name="static")
|
||||
logger.info("Frontend static files mounted successfully with cache headers")
|
||||
logger.info("Static files will be cached for 1 year (immutable files) or 1 hour (others)")
|
||||
return True
|
||||
else:
|
||||
logger.info("Frontend build directory not found. Static files not mounted.")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Could not mount static files: {e}")
|
||||
return False
|
||||
|
||||
def serve_frontend(self) -> FileResponse | Dict[str, Any]:
|
||||
"""
|
||||
Serve the React frontend index.html.
|
||||
|
||||
Note: index.html is never cached to ensure users always get the latest version.
|
||||
Static assets (JS/CSS) are cached separately via middleware.
|
||||
"""
|
||||
try:
|
||||
# Check if frontend build exists
|
||||
index_html = os.path.join(self.frontend_build_path, "index.html")
|
||||
|
||||
if os.path.exists(index_html):
|
||||
# Return FileResponse with no-cache headers for HTML
|
||||
response = FileResponse(index_html)
|
||||
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
|
||||
response.headers["Pragma"] = "no-cache"
|
||||
response.headers["Expires"] = "0"
|
||||
return response
|
||||
else:
|
||||
return {
|
||||
"message": "Frontend not built. Please run 'npm run build' in the frontend directory.",
|
||||
"api_docs": "/api/docs"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error serving frontend: {e}")
|
||||
return {
|
||||
"message": "Error serving frontend",
|
||||
"error": str(e),
|
||||
"api_docs": "/api/docs"
|
||||
}
|
||||
|
||||
def get_frontend_status(self) -> Dict[str, Any]:
|
||||
"""Get the status of frontend build and serving."""
|
||||
try:
|
||||
index_html = os.path.join(self.frontend_build_path, "index.html")
|
||||
static_exists = os.path.exists(self.static_path)
|
||||
|
||||
return {
|
||||
"frontend_build_path": self.frontend_build_path,
|
||||
"static_path": self.static_path,
|
||||
"index_html_exists": os.path.exists(index_html),
|
||||
"static_files_exist": static_exists,
|
||||
"frontend_ready": os.path.exists(index_html) and static_exists
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking frontend status: {e}")
|
||||
return {
|
||||
"error": str(e),
|
||||
"frontend_ready": False
|
||||
}
|
||||
129
backend/alwrity_utils/health_checker.py
Normal file
129
backend/alwrity_utils/health_checker.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""
|
||||
Health Check Module
|
||||
Handles health check endpoints and database health verification.
|
||||
"""
|
||||
|
||||
from fastapi import HTTPException
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class HealthChecker:
|
||||
"""Manages health check functionality for ALwrity backend."""
|
||||
|
||||
def __init__(self):
|
||||
self.startup_time = datetime.utcnow()
|
||||
|
||||
def basic_health_check(self) -> Dict[str, Any]:
|
||||
"""Basic health check endpoint."""
|
||||
try:
|
||||
return {
|
||||
"status": "healthy",
|
||||
"message": "ALwrity backend is running",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"uptime": str(datetime.utcnow() - self.startup_time)
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed: {e}")
|
||||
return {
|
||||
"status": "error",
|
||||
"message": f"Health check failed: {str(e)}",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
def database_health_check(self) -> Dict[str, Any]:
|
||||
"""Database health check endpoint including persona tables verification."""
|
||||
try:
|
||||
from services.database import get_db_session
|
||||
from models.persona_models import (
|
||||
WritingPersona,
|
||||
PlatformPersona,
|
||||
PersonaAnalysisResult,
|
||||
PersonaValidationResult
|
||||
)
|
||||
|
||||
session = get_db_session()
|
||||
if not session:
|
||||
return {
|
||||
"status": "error",
|
||||
"message": "Could not get database session",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
# Test all persona tables
|
||||
tables_status = {}
|
||||
try:
|
||||
session.query(WritingPersona).first()
|
||||
tables_status["writing_personas"] = "ok"
|
||||
except Exception as e:
|
||||
tables_status["writing_personas"] = f"error: {str(e)}"
|
||||
|
||||
try:
|
||||
session.query(PlatformPersona).first()
|
||||
tables_status["platform_personas"] = "ok"
|
||||
except Exception as e:
|
||||
tables_status["platform_personas"] = f"error: {str(e)}"
|
||||
|
||||
try:
|
||||
session.query(PersonaAnalysisResult).first()
|
||||
tables_status["persona_analysis_results"] = "ok"
|
||||
except Exception as e:
|
||||
tables_status["persona_analysis_results"] = f"error: {str(e)}"
|
||||
|
||||
try:
|
||||
session.query(PersonaValidationResult).first()
|
||||
tables_status["persona_validation_results"] = "ok"
|
||||
except Exception as e:
|
||||
tables_status["persona_validation_results"] = f"error: {str(e)}"
|
||||
|
||||
session.close()
|
||||
|
||||
# Check if all tables are ok
|
||||
all_ok = all(status == "ok" for status in tables_status.values())
|
||||
|
||||
return {
|
||||
"status": "healthy" if all_ok else "warning",
|
||||
"message": "Database connection successful" if all_ok else "Some persona tables may have issues",
|
||||
"persona_tables": tables_status,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Database health check failed: {e}")
|
||||
return {
|
||||
"status": "error",
|
||||
"message": f"Database health check failed: {str(e)}",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
def comprehensive_health_check(self) -> Dict[str, Any]:
|
||||
"""Comprehensive health check including all services."""
|
||||
try:
|
||||
# Basic health
|
||||
basic_health = self.basic_health_check()
|
||||
|
||||
# Database health
|
||||
db_health = self.database_health_check()
|
||||
|
||||
# Determine overall status
|
||||
overall_status = "healthy"
|
||||
if basic_health["status"] != "healthy" or db_health["status"] == "error":
|
||||
overall_status = "unhealthy"
|
||||
elif db_health["status"] == "warning":
|
||||
overall_status = "degraded"
|
||||
|
||||
return {
|
||||
"status": overall_status,
|
||||
"basic": basic_health,
|
||||
"database": db_health,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Comprehensive health check failed: {e}")
|
||||
return {
|
||||
"status": "error",
|
||||
"message": f"Comprehensive health check failed: {str(e)}",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
499
backend/alwrity_utils/onboarding_manager.py
Normal file
499
backend/alwrity_utils/onboarding_manager.py
Normal file
@@ -0,0 +1,499 @@
|
||||
"""
|
||||
Onboarding Manager Module
|
||||
Handles all onboarding-related endpoints and functionality.
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI, HTTPException, Depends, BackgroundTasks
|
||||
from fastapi.responses import FileResponse
|
||||
from typing import Dict, Any, Optional
|
||||
from loguru import logger
|
||||
|
||||
# Import onboarding functions
|
||||
from api.onboarding import (
|
||||
health_check,
|
||||
initialize_onboarding,
|
||||
get_onboarding_status,
|
||||
get_onboarding_progress_full,
|
||||
get_step_data,
|
||||
complete_step,
|
||||
skip_step,
|
||||
validate_step_access,
|
||||
get_api_keys,
|
||||
get_api_keys_for_onboarding,
|
||||
save_api_key,
|
||||
validate_api_keys,
|
||||
start_onboarding,
|
||||
complete_onboarding,
|
||||
reset_onboarding,
|
||||
get_resume_info,
|
||||
get_onboarding_config,
|
||||
get_provider_setup_info,
|
||||
get_all_providers_info,
|
||||
validate_provider_key,
|
||||
get_enhanced_validation_status,
|
||||
get_onboarding_summary,
|
||||
get_website_analysis_data,
|
||||
get_research_preferences_data,
|
||||
save_business_info,
|
||||
get_business_info,
|
||||
get_business_info_by_user,
|
||||
update_business_info,
|
||||
generate_writing_personas,
|
||||
generate_writing_personas_async,
|
||||
get_persona_task_status,
|
||||
assess_persona_quality,
|
||||
regenerate_persona,
|
||||
get_persona_generation_options,
|
||||
get_latest_persona,
|
||||
save_persona_update,
|
||||
StepCompletionRequest,
|
||||
APIKeyRequest
|
||||
)
|
||||
from middleware.auth_middleware import get_current_user
|
||||
|
||||
|
||||
class OnboardingManager:
|
||||
"""Manages all onboarding-related endpoints and functionality."""
|
||||
|
||||
def __init__(self, app: FastAPI):
|
||||
self.app = app
|
||||
self.setup_onboarding_endpoints()
|
||||
|
||||
def setup_onboarding_endpoints(self):
|
||||
"""Set up all onboarding-related endpoints."""
|
||||
|
||||
# Onboarding initialization - BATCH ENDPOINT (reduces 4 API calls to 1)
|
||||
@self.app.get("/api/onboarding/init")
|
||||
async def onboarding_init(current_user: dict = Depends(get_current_user)):
|
||||
"""
|
||||
Batch initialization endpoint - combines user info, status, and progress.
|
||||
This eliminates 3-4 separate API calls on initial load, reducing latency by 60-75%.
|
||||
"""
|
||||
try:
|
||||
return await initialize_onboarding(current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_init: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Onboarding status endpoints
|
||||
@self.app.get("/api/onboarding/status")
|
||||
async def onboarding_status(current_user: dict = Depends(get_current_user)):
|
||||
"""Get the current onboarding status."""
|
||||
try:
|
||||
return await get_onboarding_status(current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_status: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/progress")
|
||||
async def onboarding_progress(current_user: dict = Depends(get_current_user)):
|
||||
"""Get the full onboarding progress data."""
|
||||
try:
|
||||
return await get_onboarding_progress_full(current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_progress: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Step management endpoints
|
||||
@self.app.get("/api/onboarding/step/{step_number}")
|
||||
async def step_data(step_number: int, current_user: dict = Depends(get_current_user)):
|
||||
"""Get data for a specific step."""
|
||||
try:
|
||||
return await get_step_data(step_number, current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in step_data: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/step/{step_number}/complete")
|
||||
async def step_complete(step_number: int, request: StepCompletionRequest, current_user: dict = Depends(get_current_user)):
|
||||
"""Mark a step as completed."""
|
||||
try:
|
||||
return await complete_step(step_number, request, current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in step_complete: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/step/{step_number}/skip")
|
||||
async def step_skip(step_number: int, current_user: dict = Depends(get_current_user)):
|
||||
"""Skip a step (for optional steps)."""
|
||||
try:
|
||||
return await skip_step(step_number, current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in step_skip: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/step/{step_number}/validate")
|
||||
async def step_validate(step_number: int, current_user: dict = Depends(get_current_user)):
|
||||
"""Validate if user can access a specific step."""
|
||||
try:
|
||||
return await validate_step_access(step_number, current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in step_validate: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# API key management endpoints
|
||||
@self.app.get("/api/onboarding/api-keys")
|
||||
async def api_keys():
|
||||
"""Get all configured API keys (masked)."""
|
||||
try:
|
||||
return await get_api_keys()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in api_keys: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/api-keys/onboarding")
|
||||
async def api_keys_for_onboarding(current_user: dict = Depends(get_current_user)):
|
||||
"""Get all configured API keys for onboarding (unmasked)."""
|
||||
try:
|
||||
return await get_api_keys_for_onboarding(current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in api_keys_for_onboarding: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/api-keys")
|
||||
async def api_key_save(request: APIKeyRequest, current_user: dict = Depends(get_current_user)):
|
||||
"""Save an API key for a provider."""
|
||||
try:
|
||||
return await save_api_key(request, current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in api_key_save: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/api-keys/validate")
|
||||
async def api_key_validate():
|
||||
"""Get API key validation status and configuration."""
|
||||
try:
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
backend_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
env_path = os.path.join(backend_dir, ".env")
|
||||
load_dotenv(env_path, override=True)
|
||||
|
||||
# Check for required API keys (backend only)
|
||||
api_keys = {}
|
||||
required_keys = {
|
||||
'GEMINI_API_KEY': 'gemini',
|
||||
'EXA_API_KEY': 'exa'
|
||||
# Note: CopilotKit is frontend-only, validated separately
|
||||
}
|
||||
|
||||
missing_keys = []
|
||||
configured_providers = []
|
||||
|
||||
for env_var, provider in required_keys.items():
|
||||
key_value = os.getenv(env_var)
|
||||
if key_value and key_value.strip():
|
||||
api_keys[provider] = key_value.strip()
|
||||
configured_providers.append(provider)
|
||||
else:
|
||||
missing_keys.append(provider)
|
||||
|
||||
# Determine if all required keys are present
|
||||
required_providers = ['gemini', 'exa'] # Backend keys only
|
||||
all_required_present = all(provider in configured_providers for provider in required_providers)
|
||||
|
||||
result = {
|
||||
"api_keys": api_keys,
|
||||
"validation_results": {
|
||||
"gemini": {"valid": 'gemini' in configured_providers, "status": "configured" if 'gemini' in configured_providers else "missing"},
|
||||
"exa": {"valid": 'exa' in configured_providers, "status": "configured" if 'exa' in configured_providers else "missing"}
|
||||
},
|
||||
"all_valid": all_required_present,
|
||||
"total_providers": len(configured_providers),
|
||||
"configured_providers": configured_providers,
|
||||
"missing_keys": missing_keys
|
||||
}
|
||||
|
||||
logger.info(f"API Key Validation Result: {result}")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Error in api_key_validate: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Onboarding control endpoints
|
||||
@self.app.post("/api/onboarding/start")
|
||||
async def onboarding_start(current_user: dict = Depends(get_current_user)):
|
||||
"""Start a new onboarding session."""
|
||||
try:
|
||||
return await start_onboarding(current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_start: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/complete")
|
||||
async def onboarding_complete(current_user: dict = Depends(get_current_user)):
|
||||
"""Complete the onboarding process."""
|
||||
try:
|
||||
return await complete_onboarding(current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_complete: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/reset")
|
||||
async def onboarding_reset():
|
||||
"""Reset the onboarding progress."""
|
||||
try:
|
||||
return await reset_onboarding()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_reset: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Resume functionality
|
||||
@self.app.get("/api/onboarding/resume")
|
||||
async def onboarding_resume():
|
||||
"""Get information for resuming onboarding."""
|
||||
try:
|
||||
return await get_resume_info()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_resume: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Configuration endpoints
|
||||
@self.app.get("/api/onboarding/config")
|
||||
async def onboarding_config():
|
||||
"""Get onboarding configuration and requirements."""
|
||||
try:
|
||||
return get_onboarding_config()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_config: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Enhanced provider endpoints
|
||||
@self.app.get("/api/onboarding/providers/{provider}/setup")
|
||||
async def provider_setup_info(provider: str):
|
||||
"""Get setup information for a specific provider."""
|
||||
try:
|
||||
return await get_provider_setup_info(provider)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in provider_setup_info: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/providers")
|
||||
async def all_providers_info():
|
||||
"""Get setup information for all providers."""
|
||||
try:
|
||||
return await get_all_providers_info()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in all_providers_info: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/providers/{provider}/validate")
|
||||
async def validate_provider_key_endpoint(provider: str, request: APIKeyRequest):
|
||||
"""Validate a specific provider's API key."""
|
||||
try:
|
||||
return await validate_provider_key(provider, request)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in validate_provider_key: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/validation/enhanced")
|
||||
async def enhanced_validation_status():
|
||||
"""Get enhanced validation status for all configured services."""
|
||||
try:
|
||||
return await get_enhanced_validation_status()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in enhanced_validation_status: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# New endpoints for FinalStep data loading
|
||||
@self.app.get("/api/onboarding/summary")
|
||||
async def onboarding_summary(current_user: dict = Depends(get_current_user)):
|
||||
"""Get comprehensive onboarding summary for FinalStep."""
|
||||
try:
|
||||
return await get_onboarding_summary(current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in onboarding_summary: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/website-analysis")
|
||||
async def website_analysis_data(current_user: dict = Depends(get_current_user)):
|
||||
"""Get website analysis data for FinalStep."""
|
||||
try:
|
||||
return await get_website_analysis_data(current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in website_analysis_data: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/research-preferences")
|
||||
async def research_preferences_data(current_user: dict = Depends(get_current_user)):
|
||||
"""Get research preferences data for FinalStep."""
|
||||
try:
|
||||
return await get_research_preferences_data(current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in research_preferences_data: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Business Information endpoints
|
||||
@self.app.post("/api/onboarding/business-info")
|
||||
async def business_info_save(request: dict):
|
||||
"""Save business information for users without websites."""
|
||||
try:
|
||||
from models.business_info_request import BusinessInfoRequest
|
||||
return await save_business_info(request)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in business_info_save: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/business-info/{business_info_id}")
|
||||
async def business_info_get(business_info_id: int):
|
||||
"""Get business information by ID."""
|
||||
try:
|
||||
return await get_business_info(business_info_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in business_info_get: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/business-info/user/{user_id}")
|
||||
async def business_info_get_by_user(user_id: int):
|
||||
"""Get business information by user ID."""
|
||||
try:
|
||||
return await get_business_info_by_user(user_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in business_info_get_by_user: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.put("/api/onboarding/business-info/{business_info_id}")
|
||||
async def business_info_update(business_info_id: int, request: dict):
|
||||
"""Update business information."""
|
||||
try:
|
||||
from models.business_info_request import BusinessInfoRequest
|
||||
return await update_business_info(business_info_id, request)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in business_info_update: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Persona generation endpoints
|
||||
@self.app.post("/api/onboarding/step4/generate-personas")
|
||||
async def generate_personas(request: dict, current_user: dict = Depends(get_current_user)):
|
||||
"""Generate AI writing personas for Step 4."""
|
||||
try:
|
||||
return await generate_writing_personas(request, current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in generate_personas: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/step4/generate-personas-async")
|
||||
async def generate_personas_async(request: dict, background_tasks: BackgroundTasks, current_user: dict = Depends(get_current_user)):
|
||||
"""Start async persona generation task."""
|
||||
try:
|
||||
return await generate_writing_personas_async(request, current_user, background_tasks)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in generate_personas_async: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/step4/persona-task/{task_id}")
|
||||
async def get_persona_task(task_id: str):
|
||||
"""Get persona generation task status."""
|
||||
try:
|
||||
return await get_persona_task_status(task_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in get_persona_task: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/step4/persona-latest")
|
||||
async def persona_latest(current_user: dict = Depends(get_current_user)):
|
||||
"""Get latest cached persona for current user."""
|
||||
try:
|
||||
return await get_latest_persona(current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in persona_latest: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/step4/persona-save")
|
||||
async def persona_save(request: dict, current_user: dict = Depends(get_current_user)):
|
||||
"""Save edited persona back to cache."""
|
||||
try:
|
||||
return await save_persona_update(request, current_user)
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logger.error(f"Error in persona_save: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/step4/assess-persona-quality")
|
||||
async def assess_persona_quality_endpoint(request: dict, current_user: dict = Depends(get_current_user)):
|
||||
"""Assess the quality of generated personas."""
|
||||
try:
|
||||
return await assess_persona_quality(request, current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in assess_persona_quality: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.post("/api/onboarding/step4/regenerate-persona")
|
||||
async def regenerate_persona_endpoint(request: dict, current_user: dict = Depends(get_current_user)):
|
||||
"""Regenerate a specific persona with improvements."""
|
||||
try:
|
||||
return await regenerate_persona(request, current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in regenerate_persona: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@self.app.get("/api/onboarding/step4/persona-options")
|
||||
async def get_persona_options(current_user: dict = Depends(get_current_user)):
|
||||
"""Get persona generation options and configurations."""
|
||||
try:
|
||||
return await get_persona_generation_options(current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in get_persona_options: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
def get_onboarding_status(self) -> Dict[str, Any]:
|
||||
"""Get the status of onboarding endpoints."""
|
||||
return {
|
||||
"onboarding_endpoints": [
|
||||
"/api/onboarding/init",
|
||||
"/api/onboarding/status",
|
||||
"/api/onboarding/progress",
|
||||
"/api/onboarding/step/{step_number}",
|
||||
"/api/onboarding/step/{step_number}/complete",
|
||||
"/api/onboarding/step/{step_number}/skip",
|
||||
"/api/onboarding/step/{step_number}/validate",
|
||||
"/api/onboarding/api-keys",
|
||||
"/api/onboarding/api-keys/onboarding",
|
||||
"/api/onboarding/start",
|
||||
"/api/onboarding/complete",
|
||||
"/api/onboarding/reset",
|
||||
"/api/onboarding/resume",
|
||||
"/api/onboarding/config",
|
||||
"/api/onboarding/providers/{provider}/setup",
|
||||
"/api/onboarding/providers",
|
||||
"/api/onboarding/providers/{provider}/validate",
|
||||
"/api/onboarding/validation/enhanced",
|
||||
"/api/onboarding/summary",
|
||||
"/api/onboarding/website-analysis",
|
||||
"/api/onboarding/research-preferences",
|
||||
"/api/onboarding/business-info",
|
||||
"/api/onboarding/step4/generate-personas",
|
||||
"/api/onboarding/step4/generate-personas-async",
|
||||
"/api/onboarding/step4/persona-task/{task_id}",
|
||||
"/api/onboarding/step4/persona-latest",
|
||||
"/api/onboarding/step4/persona-save",
|
||||
"/api/onboarding/step4/assess-persona-quality",
|
||||
"/api/onboarding/step4/regenerate-persona",
|
||||
"/api/onboarding/step4/persona-options"
|
||||
],
|
||||
"total_endpoints": 30,
|
||||
"status": "active"
|
||||
}
|
||||
133
backend/alwrity_utils/production_optimizer.py
Normal file
133
backend/alwrity_utils/production_optimizer.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""
|
||||
Production Optimizer Module
|
||||
Handles production-specific optimizations and configurations.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Dict, Any
|
||||
|
||||
|
||||
class ProductionOptimizer:
|
||||
"""Optimizes ALwrity backend for production deployment."""
|
||||
|
||||
def __init__(self):
|
||||
self.production_optimizations = {
|
||||
'disable_spacy_download': False, # Allow spaCy verification (required for persona generation)
|
||||
'disable_nltk_download': False, # Allow NLTK verification (required for persona generation)
|
||||
'skip_linguistic_setup': False, # Always verify linguistic models are available
|
||||
'minimal_database_setup': True,
|
||||
'skip_file_creation': True
|
||||
}
|
||||
|
||||
def apply_production_optimizations(self) -> bool:
|
||||
"""Apply production-specific optimizations."""
|
||||
print("🚀 Applying production optimizations...")
|
||||
|
||||
# Set production environment variables
|
||||
self._set_production_env_vars()
|
||||
|
||||
# Disable heavy operations
|
||||
self._disable_heavy_operations()
|
||||
|
||||
# Optimize logging
|
||||
self._optimize_logging()
|
||||
|
||||
print("✅ Production optimizations applied")
|
||||
return True
|
||||
|
||||
def _set_production_env_vars(self) -> None:
|
||||
"""Set production-specific environment variables."""
|
||||
production_vars = {
|
||||
# Note: HOST is not set here - it's auto-detected by start_backend()
|
||||
# Based on deployment environment (cloud vs local)
|
||||
'PORT': '8000',
|
||||
'RELOAD': 'false',
|
||||
'LOG_LEVEL': 'INFO',
|
||||
'DEBUG': 'false',
|
||||
'PYTHONUNBUFFERED': '1', # Ensure logs are flushed immediately
|
||||
'PYTHONDONTWRITEBYTECODE': '1' # Don't create .pyc files
|
||||
}
|
||||
|
||||
for key, value in production_vars.items():
|
||||
os.environ.setdefault(key, value)
|
||||
print(f" ✅ {key}={value}")
|
||||
|
||||
def _disable_heavy_operations(self) -> None:
|
||||
"""Configure operations for production startup."""
|
||||
print(" ⚡ Configuring operations for production...")
|
||||
|
||||
# Note: spaCy and NLTK verification are allowed in production
|
||||
# Models should be pre-installed during build phase (via render.yaml or similar)
|
||||
# The setup will verify models exist without re-downloading
|
||||
|
||||
print(" ✅ Production operations configured")
|
||||
|
||||
def _optimize_logging(self) -> None:
|
||||
"""Optimize logging for production."""
|
||||
print(" 📝 Optimizing logging for production...")
|
||||
|
||||
# Set appropriate log level
|
||||
os.environ.setdefault('LOG_LEVEL', 'INFO')
|
||||
|
||||
# Disable debug logging
|
||||
os.environ.setdefault('DEBUG', 'false')
|
||||
|
||||
print(" ✅ Logging optimized")
|
||||
|
||||
def skip_linguistic_setup(self) -> bool:
|
||||
"""Skip linguistic analysis setup in production."""
|
||||
if os.getenv('SKIP_LINGUISTIC_SETUP', 'false').lower() == 'true':
|
||||
print("⚠️ Skipping linguistic analysis setup (production mode)")
|
||||
return True
|
||||
return False
|
||||
|
||||
def skip_spacy_setup(self) -> bool:
|
||||
"""Skip spaCy model setup in production."""
|
||||
if os.getenv('DISABLE_SPACY_DOWNLOAD', 'false').lower() == 'true':
|
||||
print("⚠️ Skipping spaCy model setup (production mode)")
|
||||
return True
|
||||
return False
|
||||
|
||||
def skip_nltk_setup(self) -> bool:
|
||||
"""Skip NLTK data setup in production."""
|
||||
if os.getenv('DISABLE_NLTK_DOWNLOAD', 'false').lower() == 'true':
|
||||
print("⚠️ Skipping NLTK data setup (production mode)")
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_production_config(self) -> Dict[str, Any]:
|
||||
"""Get production configuration settings."""
|
||||
return {
|
||||
'host': os.getenv('HOST', '0.0.0.0'),
|
||||
'port': int(os.getenv('PORT', '8000')),
|
||||
'reload': False, # Never reload in production
|
||||
'log_level': os.getenv('LOG_LEVEL', 'info'),
|
||||
'access_log': True,
|
||||
'workers': 1, # Single worker for Render
|
||||
'timeout_keep_alive': 30,
|
||||
'timeout_graceful_shutdown': 30
|
||||
}
|
||||
|
||||
def validate_production_environment(self) -> bool:
|
||||
"""Validate that the environment is ready for production."""
|
||||
print("🔍 Validating production environment...")
|
||||
|
||||
# Check critical environment variables
|
||||
required_vars = ['HOST', 'PORT', 'LOG_LEVEL']
|
||||
missing_vars = []
|
||||
|
||||
for var in required_vars:
|
||||
if not os.getenv(var):
|
||||
missing_vars.append(var)
|
||||
|
||||
if missing_vars:
|
||||
print(f"❌ Missing environment variables: {missing_vars}")
|
||||
return False
|
||||
|
||||
# Check that reload is disabled
|
||||
if os.getenv('RELOAD', 'false').lower() == 'true':
|
||||
print("⚠️ Warning: RELOAD is enabled in production")
|
||||
|
||||
print("✅ Production environment validated")
|
||||
return True
|
||||
134
backend/alwrity_utils/rate_limiter.py
Normal file
134
backend/alwrity_utils/rate_limiter.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""
|
||||
Rate Limiting Module
|
||||
Handles rate limiting middleware and request tracking.
|
||||
"""
|
||||
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from typing import Dict, List, Optional
|
||||
from fastapi import Request, Response
|
||||
from fastapi.responses import JSONResponse
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
"""Manages rate limiting for ALwrity backend."""
|
||||
|
||||
def __init__(self, window_seconds: int = 60, max_requests: int = 1000): # Increased for development
|
||||
self.window_seconds = window_seconds
|
||||
self.max_requests = max_requests
|
||||
self.request_counts: Dict[str, List[float]] = defaultdict(list)
|
||||
|
||||
# Endpoints exempt from rate limiting
|
||||
self.exempt_paths = [
|
||||
"/stream/strategies",
|
||||
"/stream/strategic-intelligence",
|
||||
"/stream/keyword-research",
|
||||
"/latest-strategy",
|
||||
"/ai-analytics",
|
||||
"/gap-analysis",
|
||||
"/calendar-events",
|
||||
# Research endpoints - exempt from rate limiting
|
||||
"/api/research",
|
||||
"/api/blog-writer",
|
||||
"/api/blog-writer/research",
|
||||
"/api/blog-writer/research/",
|
||||
"/api/blog/research/status",
|
||||
"/calendar-generation/progress",
|
||||
"/health",
|
||||
"/health/database",
|
||||
]
|
||||
# Prefixes to exempt entire route families (keep empty; rely on specific exemptions only)
|
||||
self.exempt_prefixes = []
|
||||
|
||||
def is_exempt_path(self, path: str) -> bool:
|
||||
"""Check if a path is exempt from rate limiting."""
|
||||
return any(exempt_path == path or exempt_path in path for exempt_path in self.exempt_paths) or any(
|
||||
path.startswith(prefix) for prefix in self.exempt_prefixes
|
||||
)
|
||||
|
||||
def clean_old_requests(self, client_ip: str, current_time: float) -> None:
|
||||
"""Clean old requests from the tracking dictionary."""
|
||||
self.request_counts[client_ip] = [
|
||||
req_time for req_time in self.request_counts[client_ip]
|
||||
if current_time - req_time < self.window_seconds
|
||||
]
|
||||
|
||||
def is_rate_limited(self, client_ip: str, current_time: float) -> bool:
|
||||
"""Check if a client has exceeded the rate limit."""
|
||||
self.clean_old_requests(client_ip, current_time)
|
||||
return len(self.request_counts[client_ip]) >= self.max_requests
|
||||
|
||||
def add_request(self, client_ip: str, current_time: float) -> None:
|
||||
"""Add a request to the tracking dictionary."""
|
||||
self.request_counts[client_ip].append(current_time)
|
||||
|
||||
def get_rate_limit_response(self) -> JSONResponse:
|
||||
"""Get a rate limit exceeded response."""
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"detail": "Too many requests",
|
||||
"retry_after": self.window_seconds
|
||||
},
|
||||
headers={
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "*",
|
||||
"Access-Control-Allow-Headers": "*"
|
||||
}
|
||||
)
|
||||
|
||||
async def rate_limit_middleware(self, request: Request, call_next) -> Response:
|
||||
"""Rate limiting middleware with exemptions for streaming endpoints."""
|
||||
try:
|
||||
client_ip = request.client.host if request.client else "unknown"
|
||||
current_time = time.time()
|
||||
path = request.url.path
|
||||
|
||||
# Check if path is exempt from rate limiting
|
||||
if self.is_exempt_path(path):
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
# Check rate limit
|
||||
if self.is_rate_limited(client_ip, current_time):
|
||||
logger.warning(f"Rate limit exceeded for {client_ip}")
|
||||
return self.get_rate_limit_response()
|
||||
|
||||
# Add current request
|
||||
self.add_request(client_ip, current_time)
|
||||
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in rate limiting middleware: {e}")
|
||||
# Continue without rate limiting if there's an error
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
def get_rate_limit_status(self, client_ip: str) -> Dict[str, any]:
|
||||
"""Get current rate limit status for a client."""
|
||||
current_time = time.time()
|
||||
self.clean_old_requests(client_ip, current_time)
|
||||
|
||||
request_count = len(self.request_counts[client_ip])
|
||||
remaining_requests = max(0, self.max_requests - request_count)
|
||||
|
||||
return {
|
||||
"client_ip": client_ip,
|
||||
"requests_in_window": request_count,
|
||||
"max_requests": self.max_requests,
|
||||
"remaining_requests": remaining_requests,
|
||||
"window_seconds": self.window_seconds,
|
||||
"is_limited": request_count >= self.max_requests
|
||||
}
|
||||
|
||||
def reset_rate_limit(self, client_ip: Optional[str] = None) -> Dict[str, any]:
|
||||
"""Reset rate limit for a specific client or all clients."""
|
||||
if client_ip:
|
||||
self.request_counts[client_ip] = []
|
||||
return {"message": f"Rate limit reset for {client_ip}"}
|
||||
else:
|
||||
self.request_counts.clear()
|
||||
return {"message": "Rate limit reset for all clients"}
|
||||
229
backend/alwrity_utils/router_manager.py
Normal file
229
backend/alwrity_utils/router_manager.py
Normal file
@@ -0,0 +1,229 @@
|
||||
"""
|
||||
Router Manager Module
|
||||
Handles FastAPI router inclusion and management.
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from loguru import logger
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
|
||||
class RouterManager:
|
||||
"""Manages FastAPI router inclusion and organization."""
|
||||
|
||||
def __init__(self, app: FastAPI):
|
||||
self.app = app
|
||||
self.included_routers = []
|
||||
self.failed_routers = []
|
||||
|
||||
def include_router_safely(self, router, router_name: str = None) -> bool:
|
||||
"""Include a router safely with error handling."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
try:
|
||||
self.app.include_router(router)
|
||||
router_name = router_name or getattr(router, 'prefix', 'unknown')
|
||||
self.included_routers.append(router_name)
|
||||
if verbose:
|
||||
logger.info(f"✅ Router included successfully: {router_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
router_name = router_name or 'unknown'
|
||||
self.failed_routers.append({"name": router_name, "error": str(e)})
|
||||
if verbose:
|
||||
logger.warning(f"❌ Router inclusion failed: {router_name} - {e}")
|
||||
return False
|
||||
|
||||
def include_core_routers(self) -> bool:
|
||||
"""Include core application routers."""
|
||||
import os
|
||||
verbose = os.getenv("ALWRITY_VERBOSE", "false").lower() == "true"
|
||||
|
||||
try:
|
||||
if verbose:
|
||||
logger.info("Including core routers...")
|
||||
|
||||
# Component logic router
|
||||
from api.component_logic import router as component_logic_router
|
||||
self.include_router_safely(component_logic_router, "component_logic")
|
||||
|
||||
# Subscription router
|
||||
from api.subscription_api import router as subscription_router
|
||||
self.include_router_safely(subscription_router, "subscription")
|
||||
|
||||
# Step 3 Research router (core onboarding functionality)
|
||||
from api.onboarding_utils.step3_routes import router as step3_research_router
|
||||
self.include_router_safely(step3_research_router, "step3_research")
|
||||
|
||||
# GSC router
|
||||
from routers.gsc_auth import router as gsc_auth_router
|
||||
self.include_router_safely(gsc_auth_router, "gsc_auth")
|
||||
|
||||
# WordPress router
|
||||
from routers.wordpress_oauth import router as wordpress_oauth_router
|
||||
self.include_router_safely(wordpress_oauth_router, "wordpress_oauth")
|
||||
|
||||
# Bing Webmaster router
|
||||
from routers.bing_oauth import router as bing_oauth_router
|
||||
self.include_router_safely(bing_oauth_router, "bing_oauth")
|
||||
|
||||
# Bing Analytics router
|
||||
from routers.bing_analytics import router as bing_analytics_router
|
||||
self.include_router_safely(bing_analytics_router, "bing_analytics")
|
||||
|
||||
# Bing Analytics Storage router
|
||||
from routers.bing_analytics_storage import router as bing_analytics_storage_router
|
||||
self.include_router_safely(bing_analytics_storage_router, "bing_analytics_storage")
|
||||
|
||||
# SEO tools router
|
||||
from routers.seo_tools import router as seo_tools_router
|
||||
self.include_router_safely(seo_tools_router, "seo_tools")
|
||||
|
||||
# Facebook Writer router
|
||||
from api.facebook_writer.routers import facebook_router
|
||||
self.include_router_safely(facebook_router, "facebook_writer")
|
||||
|
||||
# LinkedIn routers
|
||||
from routers.linkedin import router as linkedin_router
|
||||
self.include_router_safely(linkedin_router, "linkedin")
|
||||
|
||||
from api.linkedin_image_generation import router as linkedin_image_router
|
||||
self.include_router_safely(linkedin_image_router, "linkedin_image")
|
||||
|
||||
# Brainstorm router
|
||||
from api.brainstorm import router as brainstorm_router
|
||||
self.include_router_safely(brainstorm_router, "brainstorm")
|
||||
|
||||
# Hallucination detector and writing assistant
|
||||
from api.hallucination_detector import router as hallucination_detector_router
|
||||
self.include_router_safely(hallucination_detector_router, "hallucination_detector")
|
||||
|
||||
from api.writing_assistant import router as writing_assistant_router
|
||||
self.include_router_safely(writing_assistant_router, "writing_assistant")
|
||||
|
||||
# Content planning and user data
|
||||
from api.content_planning.api.router import router as content_planning_router
|
||||
self.include_router_safely(content_planning_router, "content_planning")
|
||||
|
||||
from api.user_data import router as user_data_router
|
||||
self.include_router_safely(user_data_router, "user_data")
|
||||
|
||||
from api.user_environment import router as user_environment_router
|
||||
self.include_router_safely(user_environment_router, "user_environment")
|
||||
|
||||
# Strategy copilot
|
||||
from api.content_planning.strategy_copilot import router as strategy_copilot_router
|
||||
self.include_router_safely(strategy_copilot_router, "strategy_copilot")
|
||||
|
||||
# Error logging router
|
||||
from routers.error_logging import router as error_logging_router
|
||||
self.include_router_safely(error_logging_router, "error_logging")
|
||||
|
||||
# Frontend environment manager router
|
||||
from routers.frontend_env_manager import router as frontend_env_router
|
||||
self.include_router_safely(frontend_env_router, "frontend_env_manager")
|
||||
|
||||
# Platform analytics router
|
||||
try:
|
||||
from routers.platform_analytics import router as platform_analytics_router
|
||||
self.include_router_safely(platform_analytics_router, "platform_analytics")
|
||||
logger.info("✅ Platform analytics router included successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to include platform analytics router: {e}")
|
||||
# Continue with other routers
|
||||
|
||||
# Bing insights router
|
||||
try:
|
||||
from routers.bing_insights import router as bing_insights_router
|
||||
self.include_router_safely(bing_insights_router, "bing_insights")
|
||||
logger.info("✅ Bing insights router included successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to include Bing insights router: {e}")
|
||||
# Continue with other routers
|
||||
|
||||
# Background jobs router
|
||||
try:
|
||||
from routers.background_jobs import router as background_jobs_router
|
||||
self.include_router_safely(background_jobs_router, "background_jobs")
|
||||
logger.info("✅ Background jobs router included successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to include Background jobs router: {e}")
|
||||
# Continue with other routers
|
||||
|
||||
logger.info("✅ Core routers included successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error including core routers: {e}")
|
||||
return False
|
||||
|
||||
def include_optional_routers(self) -> bool:
|
||||
"""Include optional routers with error handling."""
|
||||
try:
|
||||
logger.info("Including optional routers...")
|
||||
|
||||
# AI Blog Writer router
|
||||
try:
|
||||
from api.blog_writer.router import router as blog_writer_router
|
||||
self.include_router_safely(blog_writer_router, "blog_writer")
|
||||
except Exception as e:
|
||||
logger.warning(f"AI Blog Writer router not mounted: {e}")
|
||||
|
||||
# Story Writer router
|
||||
try:
|
||||
from api.story_writer.router import router as story_writer_router
|
||||
self.include_router_safely(story_writer_router, "story_writer")
|
||||
except Exception as e:
|
||||
logger.warning(f"Story Writer router not mounted: {e}")
|
||||
|
||||
# Wix Integration router
|
||||
try:
|
||||
from api.wix_routes import router as wix_router
|
||||
self.include_router_safely(wix_router, "wix")
|
||||
except Exception as e:
|
||||
logger.warning(f"Wix Integration router not mounted: {e}")
|
||||
|
||||
# Blog Writer SEO Analysis router
|
||||
try:
|
||||
from api.blog_writer.seo_analysis import router as blog_seo_analysis_router
|
||||
self.include_router_safely(blog_seo_analysis_router, "blog_seo_analysis")
|
||||
except Exception as e:
|
||||
logger.warning(f"Blog Writer SEO Analysis router not mounted: {e}")
|
||||
|
||||
# Persona router
|
||||
try:
|
||||
from api.persona_routes import router as persona_router
|
||||
self.include_router_safely(persona_router, "persona")
|
||||
except Exception as e:
|
||||
logger.warning(f"Persona router not mounted: {e}")
|
||||
|
||||
# Stability AI routers
|
||||
try:
|
||||
from routers.stability import router as stability_router
|
||||
self.include_router_safely(stability_router, "stability")
|
||||
|
||||
from routers.stability_advanced import router as stability_advanced_router
|
||||
self.include_router_safely(stability_advanced_router, "stability_advanced")
|
||||
|
||||
from routers.stability_admin import router as stability_admin_router
|
||||
self.include_router_safely(stability_admin_router, "stability_admin")
|
||||
except Exception as e:
|
||||
logger.warning(f"Stability AI routers not mounted: {e}")
|
||||
|
||||
|
||||
logger.info("✅ Optional routers processed")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error including optional routers: {e}")
|
||||
return False
|
||||
|
||||
def get_router_status(self) -> Dict[str, Any]:
|
||||
"""Get the status of router inclusion."""
|
||||
return {
|
||||
"included_routers": self.included_routers,
|
||||
"failed_routers": self.failed_routers,
|
||||
"total_included": len(self.included_routers),
|
||||
"total_failed": len(self.failed_routers)
|
||||
}
|
||||
Reference in New Issue
Block a user