Base code
This commit is contained in:
146
backend/scripts/add_ai_text_generation_limit_column.py
Normal file
146
backend/scripts/add_ai_text_generation_limit_column.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
Migration Script: Add ai_text_generation_calls_limit column to subscription_plans table.
|
||||
|
||||
This adds the unified AI text generation limit column that applies to all LLM providers
|
||||
(gemini, openai, anthropic, mistral) instead of per-provider limits.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine, text, inspect
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
|
||||
from models.subscription_models import SubscriptionPlan, SubscriptionTier
|
||||
from services.database import DATABASE_URL
|
||||
|
||||
def add_ai_text_generation_limit_column():
|
||||
"""Add ai_text_generation_calls_limit column to subscription_plans table."""
|
||||
|
||||
try:
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Check if column already exists
|
||||
inspector = inspect(engine)
|
||||
columns = [col['name'] for col in inspector.get_columns('subscription_plans')]
|
||||
|
||||
if 'ai_text_generation_calls_limit' in columns:
|
||||
logger.info("✅ Column 'ai_text_generation_calls_limit' already exists. Skipping migration.")
|
||||
return True
|
||||
|
||||
logger.info("📋 Adding 'ai_text_generation_calls_limit' column to subscription_plans table...")
|
||||
|
||||
# Add the column (SQLite compatible)
|
||||
alter_query = text("""
|
||||
ALTER TABLE subscription_plans
|
||||
ADD COLUMN ai_text_generation_calls_limit INTEGER DEFAULT 0
|
||||
""")
|
||||
|
||||
db.execute(alter_query)
|
||||
db.commit()
|
||||
|
||||
logger.info("✅ Column added successfully!")
|
||||
|
||||
# Update existing plans with unified limits based on their current limits
|
||||
logger.info("\n🔄 Updating existing subscription plans with unified limits...")
|
||||
|
||||
plans = db.query(SubscriptionPlan).all()
|
||||
updated_count = 0
|
||||
|
||||
for plan in plans:
|
||||
# Use the first non-zero LLM provider limit as the unified limit
|
||||
# Or use gemini_calls_limit as default
|
||||
unified_limit = (
|
||||
plan.ai_text_generation_calls_limit or
|
||||
plan.gemini_calls_limit or
|
||||
plan.openai_calls_limit or
|
||||
plan.anthropic_calls_limit or
|
||||
plan.mistral_calls_limit or
|
||||
0
|
||||
)
|
||||
|
||||
# For Basic plan, ensure it's set to 10 (from our recent update)
|
||||
if plan.tier == SubscriptionTier.BASIC:
|
||||
unified_limit = 10
|
||||
|
||||
if plan.ai_text_generation_calls_limit != unified_limit:
|
||||
plan.ai_text_generation_calls_limit = unified_limit
|
||||
plan.updated_at = datetime.now(timezone.utc)
|
||||
updated_count += 1
|
||||
|
||||
logger.info(f" ✅ Updated {plan.name} ({plan.tier.value}): ai_text_generation_calls_limit = {unified_limit}")
|
||||
else:
|
||||
logger.info(f" ℹ️ {plan.name} ({plan.tier.value}): already set to {unified_limit}")
|
||||
|
||||
if updated_count > 0:
|
||||
db.commit()
|
||||
logger.info(f"\n✅ Updated {updated_count} subscription plan(s)")
|
||||
else:
|
||||
logger.info("\nℹ️ No plans needed updating")
|
||||
|
||||
# Display summary
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("MIGRATION SUMMARY")
|
||||
logger.info("="*60)
|
||||
|
||||
all_plans = db.query(SubscriptionPlan).all()
|
||||
for plan in all_plans:
|
||||
logger.info(f"\n{plan.name} ({plan.tier.value}):")
|
||||
logger.info(f" Unified AI Text Gen Limit: {plan.ai_text_generation_calls_limit if plan.ai_text_generation_calls_limit else 'Not set'}")
|
||||
logger.info(f" Legacy Limits: gemini={plan.gemini_calls_limit}, mistral={plan.mistral_calls_limit}")
|
||||
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("✅ Migration completed successfully!")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"❌ Error during migration: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to connect to database: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting ai_text_generation_calls_limit column migration...")
|
||||
logger.info("="*60)
|
||||
logger.info("This will add the unified AI text generation limit column")
|
||||
logger.info("and update existing plans with appropriate values.")
|
||||
logger.info("="*60)
|
||||
|
||||
try:
|
||||
success = add_ai_text_generation_limit_column()
|
||||
|
||||
if success:
|
||||
logger.info("\n✅ Script completed successfully!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("\n❌ Script failed!")
|
||||
sys.exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("\n⚠️ Script cancelled by user")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.error(f"\n❌ Unexpected error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
82
backend/scripts/add_brand_analysis_columns.py
Normal file
82
backend/scripts/add_brand_analysis_columns.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""
|
||||
Add brand_analysis and content_strategy_insights columns to website_analyses table.
|
||||
These columns store rich brand insights and SWOT analysis from Step 2.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.append(str(Path(__file__).parent.parent))
|
||||
|
||||
from sqlalchemy import text, inspect
|
||||
from services.database import SessionLocal, engine
|
||||
|
||||
|
||||
def add_brand_analysis_columns():
|
||||
"""Add brand_analysis and content_strategy_insights columns if they don't exist."""
|
||||
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Check if columns already exist
|
||||
inspector = inspect(engine)
|
||||
columns = [col['name'] for col in inspector.get_columns('website_analyses')]
|
||||
|
||||
brand_analysis_exists = 'brand_analysis' in columns
|
||||
content_strategy_insights_exists = 'content_strategy_insights' in columns
|
||||
|
||||
if brand_analysis_exists and content_strategy_insights_exists:
|
||||
logger.info("✅ Columns already exist. No migration needed.")
|
||||
return True
|
||||
|
||||
logger.info("🔄 Starting migration to add brand analysis columns...")
|
||||
|
||||
# Add brand_analysis column if missing
|
||||
if not brand_analysis_exists:
|
||||
logger.info("Adding brand_analysis column...")
|
||||
db.execute(text("""
|
||||
ALTER TABLE website_analyses
|
||||
ADD COLUMN brand_analysis JSON
|
||||
"""))
|
||||
logger.success("✅ Added brand_analysis column")
|
||||
|
||||
# Add content_strategy_insights column if missing
|
||||
if not content_strategy_insights_exists:
|
||||
logger.info("Adding content_strategy_insights column...")
|
||||
db.execute(text("""
|
||||
ALTER TABLE website_analyses
|
||||
ADD COLUMN content_strategy_insights JSON
|
||||
"""))
|
||||
logger.success("✅ Added content_strategy_insights column")
|
||||
|
||||
db.commit()
|
||||
logger.success("🎉 Migration completed successfully!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Migration failed: {e}")
|
||||
db.rollback()
|
||||
return False
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("=" * 60)
|
||||
logger.info("DATABASE MIGRATION: Add Brand Analysis Columns")
|
||||
logger.info("=" * 60)
|
||||
|
||||
success = add_brand_analysis_columns()
|
||||
|
||||
if success:
|
||||
logger.success("\n✅ Migration completed successfully!")
|
||||
logger.info("The website_analyses table now includes:")
|
||||
logger.info(" - brand_analysis: Brand voice, values, positioning")
|
||||
logger.info(" - content_strategy_insights: SWOT analysis, recommendations")
|
||||
else:
|
||||
logger.error("\n❌ Migration failed. Please check the error messages above.")
|
||||
sys.exit(1)
|
||||
|
||||
210
backend/scripts/cap_basic_plan_usage.py
Normal file
210
backend/scripts/cap_basic_plan_usage.py
Normal file
@@ -0,0 +1,210 @@
|
||||
"""
|
||||
Standalone script to cap usage counters at new Basic plan limits.
|
||||
|
||||
This preserves historical usage data but caps it at the new limits so users
|
||||
can continue making new calls within their limits.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
|
||||
from models.subscription_models import SubscriptionPlan, SubscriptionTier, UserSubscription, UsageSummary, UsageStatus
|
||||
from services.database import DATABASE_URL
|
||||
from services.subscription import PricingService
|
||||
|
||||
def cap_basic_plan_usage():
|
||||
"""Cap usage counters at new Basic plan limits."""
|
||||
|
||||
try:
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Find Basic plan
|
||||
basic_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == SubscriptionTier.BASIC
|
||||
).first()
|
||||
|
||||
if not basic_plan:
|
||||
logger.error("❌ Basic plan not found in database!")
|
||||
return False
|
||||
|
||||
# New limits
|
||||
new_call_limit = basic_plan.gemini_calls_limit # Should be 10
|
||||
new_token_limit = basic_plan.gemini_tokens_limit # Should be 2000
|
||||
new_image_limit = basic_plan.stability_calls_limit # Should be 5
|
||||
|
||||
logger.info(f"📋 Basic Plan Limits:")
|
||||
logger.info(f" Calls: {new_call_limit}")
|
||||
logger.info(f" Tokens: {new_token_limit}")
|
||||
logger.info(f" Images: {new_image_limit}")
|
||||
|
||||
# Get all Basic plan users
|
||||
user_subscriptions = db.query(UserSubscription).filter(
|
||||
UserSubscription.plan_id == basic_plan.id,
|
||||
UserSubscription.is_active == True
|
||||
).all()
|
||||
|
||||
logger.info(f"\n👥 Found {len(user_subscriptions)} Basic plan user(s)")
|
||||
|
||||
pricing_service = PricingService(db)
|
||||
capped_count = 0
|
||||
|
||||
for sub in user_subscriptions:
|
||||
try:
|
||||
# Get current billing period for this user
|
||||
current_period = pricing_service.get_current_billing_period(sub.user_id) or datetime.now(timezone.utc).strftime("%Y-%m")
|
||||
|
||||
# Find usage summary for current period
|
||||
usage_summary = db.query(UsageSummary).filter(
|
||||
UsageSummary.user_id == sub.user_id,
|
||||
UsageSummary.billing_period == current_period
|
||||
).first()
|
||||
|
||||
if usage_summary:
|
||||
# Store old values for logging
|
||||
old_gemini = usage_summary.gemini_calls or 0
|
||||
old_mistral = usage_summary.mistral_calls or 0
|
||||
old_openai = usage_summary.openai_calls or 0
|
||||
old_anthropic = usage_summary.anthropic_calls or 0
|
||||
old_tokens = max(
|
||||
usage_summary.gemini_tokens or 0,
|
||||
usage_summary.openai_tokens or 0,
|
||||
usage_summary.anthropic_tokens or 0,
|
||||
usage_summary.mistral_tokens or 0
|
||||
)
|
||||
old_images = usage_summary.stability_calls or 0
|
||||
|
||||
# Check if capping is needed
|
||||
needs_cap = (
|
||||
old_gemini > new_call_limit or
|
||||
old_mistral > new_call_limit or
|
||||
old_openai > new_call_limit or
|
||||
old_anthropic > new_call_limit or
|
||||
old_images > new_image_limit or
|
||||
old_tokens > new_token_limit
|
||||
)
|
||||
|
||||
if needs_cap:
|
||||
# Cap LLM provider counters at new limits
|
||||
usage_summary.gemini_calls = min(old_gemini, new_call_limit)
|
||||
usage_summary.mistral_calls = min(old_mistral, new_call_limit)
|
||||
usage_summary.openai_calls = min(old_openai, new_call_limit)
|
||||
usage_summary.anthropic_calls = min(old_anthropic, new_call_limit)
|
||||
|
||||
# Cap token counters at new limits
|
||||
usage_summary.gemini_tokens = min(usage_summary.gemini_tokens or 0, new_token_limit)
|
||||
usage_summary.openai_tokens = min(usage_summary.openai_tokens or 0, new_token_limit)
|
||||
usage_summary.anthropic_tokens = min(usage_summary.anthropic_tokens or 0, new_token_limit)
|
||||
usage_summary.mistral_tokens = min(usage_summary.mistral_tokens or 0, new_token_limit)
|
||||
|
||||
# Cap image counter at new limit
|
||||
usage_summary.stability_calls = min(old_images, new_image_limit)
|
||||
|
||||
# Recalculate totals based on capped values
|
||||
total_capped_calls = (
|
||||
usage_summary.gemini_calls +
|
||||
usage_summary.mistral_calls +
|
||||
usage_summary.openai_calls +
|
||||
usage_summary.anthropic_calls +
|
||||
usage_summary.stability_calls
|
||||
)
|
||||
total_capped_tokens = (
|
||||
usage_summary.gemini_tokens +
|
||||
usage_summary.mistral_tokens +
|
||||
usage_summary.openai_tokens +
|
||||
usage_summary.anthropic_tokens
|
||||
)
|
||||
|
||||
usage_summary.total_calls = total_capped_calls
|
||||
usage_summary.total_tokens = total_capped_tokens
|
||||
|
||||
# Reset status to active to allow new calls
|
||||
usage_summary.usage_status = UsageStatus.ACTIVE
|
||||
usage_summary.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
db.commit()
|
||||
capped_count += 1
|
||||
|
||||
logger.info(f"\n✅ Capped usage for user {sub.user_id} (period {current_period}):")
|
||||
logger.info(f" Gemini Calls: {old_gemini} → {usage_summary.gemini_calls} (limit: {new_call_limit})")
|
||||
logger.info(f" Mistral Calls: {old_mistral} → {usage_summary.mistral_calls} (limit: {new_call_limit})")
|
||||
logger.info(f" OpenAI Calls: {old_openai} → {usage_summary.openai_calls} (limit: {new_call_limit})")
|
||||
logger.info(f" Anthropic Calls: {old_anthropic} → {usage_summary.anthropic_calls} (limit: {new_call_limit})")
|
||||
logger.info(f" Tokens: {old_tokens} → {max(usage_summary.gemini_tokens, usage_summary.mistral_tokens)} (limit: {new_token_limit})")
|
||||
logger.info(f" Images: {old_images} → {usage_summary.stability_calls} (limit: {new_image_limit})")
|
||||
else:
|
||||
logger.info(f" ℹ️ User {sub.user_id} usage is within limits - no capping needed")
|
||||
else:
|
||||
logger.info(f" ℹ️ No usage summary found for user {sub.user_id} (period {current_period})")
|
||||
|
||||
except Exception as cap_error:
|
||||
logger.error(f" ❌ Error capping usage for user {sub.user_id}: {cap_error}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
db.rollback()
|
||||
|
||||
if capped_count > 0:
|
||||
logger.info(f"\n✅ Successfully capped usage for {capped_count} user(s)")
|
||||
logger.info(" Historical usage preserved, but capped at new limits")
|
||||
logger.info(" Users can now make new calls within their limits")
|
||||
else:
|
||||
logger.info("\nℹ️ No usage counters needed capping")
|
||||
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("CAPPING COMPLETE")
|
||||
logger.info("="*60)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"❌ Error capping usage: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to connect to database: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting Basic plan usage capping...")
|
||||
logger.info("="*60)
|
||||
logger.info("This will cap usage counters at new Basic plan limits")
|
||||
logger.info("while preserving historical usage data.")
|
||||
logger.info("="*60)
|
||||
|
||||
try:
|
||||
success = cap_basic_plan_usage()
|
||||
|
||||
if success:
|
||||
logger.info("\n✅ Script completed successfully!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("\n❌ Script failed!")
|
||||
sys.exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("\n⚠️ Script cancelled by user")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.error(f"\n❌ Unexpected error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
48
backend/scripts/check_database_tables.py
Normal file
48
backend/scripts/check_database_tables.py
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to check database tables and debug foreign key issues.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from services.database import engine
|
||||
from sqlalchemy import inspect
|
||||
from loguru import logger
|
||||
|
||||
def check_database_tables():
|
||||
"""Check what tables exist in the database"""
|
||||
try:
|
||||
logger.info("Checking database tables...")
|
||||
|
||||
# Get inspector
|
||||
inspector = inspect(engine)
|
||||
|
||||
# Get all table names
|
||||
table_names = inspector.get_table_names()
|
||||
|
||||
logger.info(f"Found {len(table_names)} tables:")
|
||||
for table_name in sorted(table_names):
|
||||
logger.info(f" - {table_name}")
|
||||
|
||||
# Check if enhanced_content_strategies exists
|
||||
if 'enhanced_content_strategies' in table_names:
|
||||
logger.info("✅ enhanced_content_strategies table exists!")
|
||||
|
||||
# Get columns for this table
|
||||
columns = inspector.get_columns('enhanced_content_strategies')
|
||||
logger.info(f"Columns in enhanced_content_strategies:")
|
||||
for column in columns:
|
||||
logger.info(f" - {column['name']}: {column['type']}")
|
||||
else:
|
||||
logger.error("❌ enhanced_content_strategies table does not exist!")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error checking database tables: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
check_database_tables()
|
||||
143
backend/scripts/check_wix_config.py
Normal file
143
backend/scripts/check_wix_config.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""
|
||||
Quick diagnostic script to check Wix configuration.
|
||||
|
||||
Run this to verify your WIX_API_KEY is properly loaded.
|
||||
|
||||
Usage:
|
||||
python backend/scripts/check_wix_config.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
def check_wix_config():
|
||||
"""Check if Wix configuration is properly set up."""
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("🔍 WIX CONFIGURATION DIAGNOSTIC")
|
||||
print("="*60 + "\n")
|
||||
|
||||
# 1. Check if .env file exists
|
||||
env_locations = [
|
||||
Path.cwd() / ".env",
|
||||
Path.cwd() / "backend" / ".env",
|
||||
Path.cwd() / ".env.local",
|
||||
]
|
||||
|
||||
print("📁 Checking for .env files:")
|
||||
env_file_found = False
|
||||
for env_path in env_locations:
|
||||
exists = env_path.exists()
|
||||
status = "✅ FOUND" if exists else "❌ NOT FOUND"
|
||||
print(f" {status}: {env_path}")
|
||||
if exists:
|
||||
env_file_found = True
|
||||
|
||||
if not env_file_found:
|
||||
print("\n⚠️ WARNING: No .env file found!")
|
||||
print(" Create a .env file in your project root.")
|
||||
|
||||
print("\n" + "-"*60 + "\n")
|
||||
|
||||
# 2. Try loading .env file
|
||||
try:
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
print("✅ dotenv loaded successfully")
|
||||
except ImportError:
|
||||
print("❌ python-dotenv not installed")
|
||||
print(" Install: pip install python-dotenv")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Error loading .env: {e}")
|
||||
|
||||
print("\n" + "-"*60 + "\n")
|
||||
|
||||
# 3. Check WIX_API_KEY environment variable
|
||||
print("🔑 Checking WIX_API_KEY environment variable:")
|
||||
api_key = os.getenv('WIX_API_KEY')
|
||||
|
||||
if not api_key:
|
||||
print(" ❌ NOT FOUND")
|
||||
print("\n⚠️ CRITICAL: WIX_API_KEY is not set!")
|
||||
print("\nTo fix:")
|
||||
print(" 1. Add this line to your .env file:")
|
||||
print(" WIX_API_KEY=your_api_key_from_wix_dashboard")
|
||||
print(" 2. Restart your backend server")
|
||||
print(" 3. Run this script again to verify")
|
||||
return False
|
||||
|
||||
print(" ✅ FOUND")
|
||||
print(f" Length: {len(api_key)} characters")
|
||||
print(f" Preview: {api_key[:30]}...")
|
||||
|
||||
# 4. Validate API key format
|
||||
print("\n" + "-"*60 + "\n")
|
||||
print("🔍 Validating API key format:")
|
||||
|
||||
if api_key.startswith("JWS."):
|
||||
print(" ✅ Starts with 'JWS.' (correct format)")
|
||||
else:
|
||||
print(f" ⚠️ Doesn't start with 'JWS.' (got: {api_key[:10]}...)")
|
||||
print(" This might not be a valid Wix API key")
|
||||
|
||||
if len(api_key) > 200:
|
||||
print(f" ✅ Length looks correct ({len(api_key)} chars)")
|
||||
else:
|
||||
print(f" ⚠️ API key seems too short ({len(api_key)} chars)")
|
||||
print(" Wix API keys are typically 500+ characters")
|
||||
|
||||
dot_count = api_key.count('.')
|
||||
print(f" 📊 Contains {dot_count} dots (JWT tokens have 2+ dots)")
|
||||
|
||||
# 5. Test import of Wix services
|
||||
print("\n" + "-"*60 + "\n")
|
||||
print("📦 Testing Wix service imports:")
|
||||
|
||||
try:
|
||||
from services.integrations.wix.auth_utils import get_wix_api_key
|
||||
test_key = get_wix_api_key()
|
||||
|
||||
if test_key:
|
||||
print(" ✅ auth_utils.get_wix_api_key() works")
|
||||
print(f" ✅ Returned key length: {len(test_key)}")
|
||||
print(f" ✅ Keys match: {test_key == api_key}")
|
||||
else:
|
||||
print(" ❌ auth_utils.get_wix_api_key() returned None")
|
||||
print(" Even though os.getenv('WIX_API_KEY') found it!")
|
||||
print(" This indicates an environment loading issue.")
|
||||
except Exception as e:
|
||||
print(f" ❌ Error importing: {e}")
|
||||
|
||||
# 6. Final summary
|
||||
print("\n" + "="*60)
|
||||
print("📋 SUMMARY")
|
||||
print("="*60 + "\n")
|
||||
|
||||
if api_key and len(api_key) > 200 and api_key.startswith("JWS."):
|
||||
print("✅ Configuration looks GOOD!")
|
||||
print("\nNext steps:")
|
||||
print(" 1. Restart your backend server")
|
||||
print(" 2. Try publishing a blog post")
|
||||
print(" 3. Check logs for 'Using API key' messages")
|
||||
print(" 4. Verify no 403 Forbidden errors")
|
||||
else:
|
||||
print("❌ Configuration has ISSUES!")
|
||||
print("\nPlease review the warnings above and:")
|
||||
print(" 1. Ensure WIX_API_KEY is set in your .env file")
|
||||
print(" 2. Verify the API key is correct (from Wix Dashboard)")
|
||||
print(" 3. Restart your backend server")
|
||||
print(" 4. Run this script again")
|
||||
|
||||
print("\n" + "="*60 + "\n")
|
||||
|
||||
return bool(api_key)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = check_wix_config()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
247
backend/scripts/cleanup_alpha_plans.py
Normal file
247
backend/scripts/cleanup_alpha_plans.py
Normal file
@@ -0,0 +1,247 @@
|
||||
"""
|
||||
Script to remove Alpha subscription plans and update limits for production testing.
|
||||
Only keeps: Free, Basic, Pro, Enterprise with updated feature limits.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
|
||||
from models.subscription_models import SubscriptionPlan, SubscriptionTier
|
||||
from services.database import DATABASE_URL
|
||||
|
||||
def cleanup_alpha_plans():
|
||||
"""Remove alpha subscription plans and update limits."""
|
||||
|
||||
try:
|
||||
engine = create_engine(DATABASE_URL, echo=True)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Delete all plans with "Alpha" in the name
|
||||
alpha_plans = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.name.like('%Alpha%')
|
||||
).all()
|
||||
|
||||
for plan in alpha_plans:
|
||||
logger.info(f"Deleting Alpha plan: {plan.name}")
|
||||
db.delete(plan)
|
||||
|
||||
db.commit()
|
||||
logger.info(f"✅ Deleted {len(alpha_plans)} Alpha plans")
|
||||
|
||||
# Update existing plans with new limits
|
||||
logger.info("Updating plan limits...")
|
||||
|
||||
# Free Plan - Blog, LinkedIn, Facebook writers + Text & Image only
|
||||
free_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == SubscriptionTier.FREE
|
||||
).first()
|
||||
|
||||
if free_plan:
|
||||
free_plan.name = "Free"
|
||||
free_plan.description = "Perfect for trying ALwrity with Blog, LinkedIn & Facebook writers"
|
||||
free_plan.gemini_calls_limit = 100
|
||||
free_plan.openai_calls_limit = 50
|
||||
free_plan.anthropic_calls_limit = 0
|
||||
free_plan.mistral_calls_limit = 50
|
||||
free_plan.tavily_calls_limit = 20
|
||||
free_plan.serper_calls_limit = 20
|
||||
free_plan.metaphor_calls_limit = 10
|
||||
free_plan.firecrawl_calls_limit = 10
|
||||
free_plan.stability_calls_limit = 10 # Image generation
|
||||
free_plan.gemini_tokens_limit = 100000
|
||||
free_plan.monthly_cost_limit = 5.0
|
||||
free_plan.features = [
|
||||
"blog_writer",
|
||||
"linkedin_writer",
|
||||
"facebook_writer",
|
||||
"text_generation",
|
||||
"image_generation",
|
||||
"wix_integration",
|
||||
"wordpress_integration",
|
||||
"gsc_integration"
|
||||
]
|
||||
logger.info("✅ Updated Free plan")
|
||||
|
||||
# Basic Plan - Blog, LinkedIn, Facebook writers + Text & Image only
|
||||
basic_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == SubscriptionTier.BASIC
|
||||
).first()
|
||||
|
||||
if basic_plan:
|
||||
basic_plan.name = "Basic"
|
||||
basic_plan.description = "Great for solopreneurs with Blog, LinkedIn & Facebook writers"
|
||||
basic_plan.price_monthly = 29.0
|
||||
basic_plan.price_yearly = 278.0 # ~20% discount
|
||||
basic_plan.gemini_calls_limit = 500
|
||||
basic_plan.openai_calls_limit = 250
|
||||
basic_plan.anthropic_calls_limit = 100
|
||||
basic_plan.mistral_calls_limit = 250
|
||||
basic_plan.tavily_calls_limit = 100
|
||||
basic_plan.serper_calls_limit = 100
|
||||
basic_plan.metaphor_calls_limit = 50
|
||||
basic_plan.firecrawl_calls_limit = 50
|
||||
basic_plan.stability_calls_limit = 50 # Image generation
|
||||
basic_plan.gemini_tokens_limit = 500000
|
||||
basic_plan.openai_tokens_limit = 250000
|
||||
basic_plan.monthly_cost_limit = 25.0
|
||||
basic_plan.features = [
|
||||
"blog_writer",
|
||||
"linkedin_writer",
|
||||
"facebook_writer",
|
||||
"text_generation",
|
||||
"image_generation",
|
||||
"wix_integration",
|
||||
"wordpress_integration",
|
||||
"gsc_integration",
|
||||
"priority_support"
|
||||
]
|
||||
logger.info("✅ Updated Basic plan")
|
||||
|
||||
# Pro Plan - 6 Social Platforms + Website Management + Text, Image, Audio, Video
|
||||
pro_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == SubscriptionTier.PRO
|
||||
).first()
|
||||
|
||||
if pro_plan:
|
||||
pro_plan.name = "Pro"
|
||||
pro_plan.description = "Perfect for businesses with 6 social platforms & multimodal AI"
|
||||
pro_plan.price_monthly = 79.0
|
||||
pro_plan.price_yearly = 758.0 # ~20% discount
|
||||
pro_plan.gemini_calls_limit = 2000
|
||||
pro_plan.openai_calls_limit = 1000
|
||||
pro_plan.anthropic_calls_limit = 500
|
||||
pro_plan.mistral_calls_limit = 1000
|
||||
pro_plan.tavily_calls_limit = 500
|
||||
pro_plan.serper_calls_limit = 500
|
||||
pro_plan.metaphor_calls_limit = 250
|
||||
pro_plan.firecrawl_calls_limit = 250
|
||||
pro_plan.stability_calls_limit = 200 # Image generation
|
||||
pro_plan.gemini_tokens_limit = 2000000
|
||||
pro_plan.openai_tokens_limit = 1000000
|
||||
pro_plan.anthropic_tokens_limit = 500000
|
||||
pro_plan.monthly_cost_limit = 100.0
|
||||
pro_plan.features = [
|
||||
"blog_writer",
|
||||
"linkedin_writer",
|
||||
"facebook_writer",
|
||||
"instagram_writer",
|
||||
"twitter_writer",
|
||||
"tiktok_writer",
|
||||
"youtube_writer",
|
||||
"text_generation",
|
||||
"image_generation",
|
||||
"audio_generation",
|
||||
"video_generation",
|
||||
"wix_integration",
|
||||
"wordpress_integration",
|
||||
"gsc_integration",
|
||||
"website_management",
|
||||
"content_scheduling",
|
||||
"advanced_analytics",
|
||||
"priority_support"
|
||||
]
|
||||
logger.info("✅ Updated Pro plan")
|
||||
|
||||
# Enterprise Plan - Unlimited with all features
|
||||
enterprise_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == SubscriptionTier.ENTERPRISE
|
||||
).first()
|
||||
|
||||
if enterprise_plan:
|
||||
enterprise_plan.name = "Enterprise"
|
||||
enterprise_plan.description = "For large teams with unlimited usage & custom integrations"
|
||||
enterprise_plan.price_monthly = 199.0
|
||||
enterprise_plan.price_yearly = 1908.0 # ~20% discount
|
||||
enterprise_plan.gemini_calls_limit = 0 # Unlimited
|
||||
enterprise_plan.openai_calls_limit = 0
|
||||
enterprise_plan.anthropic_calls_limit = 0
|
||||
enterprise_plan.mistral_calls_limit = 0
|
||||
enterprise_plan.tavily_calls_limit = 0
|
||||
enterprise_plan.serper_calls_limit = 0
|
||||
enterprise_plan.metaphor_calls_limit = 0
|
||||
enterprise_plan.firecrawl_calls_limit = 0
|
||||
enterprise_plan.stability_calls_limit = 0
|
||||
enterprise_plan.gemini_tokens_limit = 0
|
||||
enterprise_plan.openai_tokens_limit = 0
|
||||
enterprise_plan.anthropic_tokens_limit = 0
|
||||
enterprise_plan.mistral_tokens_limit = 0
|
||||
enterprise_plan.monthly_cost_limit = 0.0 # Unlimited
|
||||
enterprise_plan.features = [
|
||||
"blog_writer",
|
||||
"linkedin_writer",
|
||||
"facebook_writer",
|
||||
"instagram_writer",
|
||||
"twitter_writer",
|
||||
"tiktok_writer",
|
||||
"youtube_writer",
|
||||
"text_generation",
|
||||
"image_generation",
|
||||
"audio_generation",
|
||||
"video_generation",
|
||||
"wix_integration",
|
||||
"wordpress_integration",
|
||||
"gsc_integration",
|
||||
"website_management",
|
||||
"content_scheduling",
|
||||
"advanced_analytics",
|
||||
"custom_integrations",
|
||||
"dedicated_account_manager",
|
||||
"white_label",
|
||||
"priority_support"
|
||||
]
|
||||
logger.info("✅ Updated Enterprise plan")
|
||||
|
||||
db.commit()
|
||||
logger.info("✅ All plans updated successfully!")
|
||||
|
||||
# Display summary
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("SUBSCRIPTION PLANS SUMMARY")
|
||||
logger.info("="*60)
|
||||
|
||||
all_plans = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.is_active == True
|
||||
).order_by(SubscriptionPlan.price_monthly).all()
|
||||
|
||||
for plan in all_plans:
|
||||
logger.info(f"\n{plan.name} ({plan.tier.value})")
|
||||
logger.info(f" Price: ${plan.price_monthly}/mo, ${plan.price_yearly}/yr")
|
||||
logger.info(f" Gemini: {plan.gemini_calls_limit if plan.gemini_calls_limit > 0 else 'Unlimited'} calls/month")
|
||||
logger.info(f" OpenAI: {plan.openai_calls_limit if plan.openai_calls_limit > 0 else 'Unlimited'} calls/month")
|
||||
logger.info(f" Research: {plan.tavily_calls_limit if plan.tavily_calls_limit > 0 else 'Unlimited'} searches/month")
|
||||
logger.info(f" Images: {plan.stability_calls_limit if plan.stability_calls_limit > 0 else 'Unlimited'} images/month")
|
||||
logger.info(f" Features: {', '.join(plan.features or [])}")
|
||||
|
||||
logger.info("\n" + "="*60)
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error cleaning up plans: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting subscription plans cleanup...")
|
||||
|
||||
try:
|
||||
cleanup_alpha_plans()
|
||||
logger.info("✅ Cleanup completed successfully!")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Cleanup failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
109
backend/scripts/cleanup_onboarding_json_files.py
Normal file
109
backend/scripts/cleanup_onboarding_json_files.py
Normal file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cleanup Onboarding JSON Files Script
|
||||
|
||||
This script removes any stale .onboarding_progress*.json files from the backend root.
|
||||
These files were used in the old file-based onboarding system and are no longer needed
|
||||
since we've migrated to database-only storage.
|
||||
|
||||
Usage:
|
||||
python backend/scripts/cleanup_onboarding_json_files.py [--dry-run] [--force]
|
||||
|
||||
Options:
|
||||
--dry-run Show what would be deleted without actually deleting
|
||||
--force Skip confirmation prompt (use with caution)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
# Add backend to path for imports
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
def find_onboarding_json_files(backend_root: Path) -> list:
|
||||
"""Find all .onboarding_progress*.json files in backend root."""
|
||||
pattern = str(backend_root / ".onboarding_progress*.json")
|
||||
files = glob.glob(pattern)
|
||||
return [Path(f) for f in files]
|
||||
|
||||
def cleanup_json_files(backend_root: Path, dry_run: bool = False, force: bool = False) -> int:
|
||||
"""
|
||||
Clean up onboarding JSON files.
|
||||
|
||||
Args:
|
||||
backend_root: Path to backend directory
|
||||
dry_run: If True, only show what would be deleted
|
||||
force: If True, skip confirmation prompt
|
||||
|
||||
Returns:
|
||||
Number of files processed
|
||||
"""
|
||||
files = find_onboarding_json_files(backend_root)
|
||||
|
||||
if not files:
|
||||
logger.info("✅ No onboarding JSON files found to clean up")
|
||||
return 0
|
||||
|
||||
logger.info(f"Found {len(files)} onboarding JSON file(s):")
|
||||
for file in files:
|
||||
logger.info(f" - {file.name}")
|
||||
|
||||
if dry_run:
|
||||
logger.info("🔍 DRY RUN: Would delete the above files")
|
||||
return len(files)
|
||||
|
||||
if not force:
|
||||
response = input(f"\nDelete {len(files)} onboarding JSON file(s)? (y/N): ").strip().lower()
|
||||
if response not in ['y', 'yes']:
|
||||
logger.info("❌ Cleanup cancelled by user")
|
||||
return 0
|
||||
|
||||
deleted_count = 0
|
||||
for file in files:
|
||||
try:
|
||||
file.unlink()
|
||||
logger.info(f"🗑️ Deleted: {file.name}")
|
||||
deleted_count += 1
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to delete {file.name}: {e}")
|
||||
|
||||
logger.info(f"✅ Cleanup complete: {deleted_count}/{len(files)} files deleted")
|
||||
return deleted_count
|
||||
|
||||
def main():
|
||||
"""Main function."""
|
||||
parser = argparse.ArgumentParser(description="Clean up onboarding JSON files")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Show what would be deleted without actually deleting")
|
||||
parser.add_argument("--force", action="store_true", help="Skip confirmation prompt")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get backend root directory
|
||||
script_dir = Path(__file__).parent
|
||||
backend_root = script_dir.parent
|
||||
|
||||
logger.info(f"🧹 Onboarding JSON Cleanup Script")
|
||||
logger.info(f"Backend root: {backend_root}")
|
||||
|
||||
if args.dry_run:
|
||||
logger.info("🔍 Running in DRY RUN mode")
|
||||
|
||||
try:
|
||||
deleted_count = cleanup_json_files(backend_root, args.dry_run, args.force)
|
||||
|
||||
if deleted_count > 0:
|
||||
logger.info("✅ Cleanup completed successfully")
|
||||
else:
|
||||
logger.info("ℹ️ No files needed cleanup")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Cleanup failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
46
backend/scripts/create_all_tables.py
Normal file
46
backend/scripts/create_all_tables.py
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to create all database tables in the correct order.
|
||||
This ensures foreign key dependencies are satisfied.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from services.database import engine
|
||||
from models.enhanced_strategy_models import Base as EnhancedStrategyBase
|
||||
from models.monitoring_models import Base as MonitoringBase
|
||||
from models.persona_models import Base as PersonaBase
|
||||
from loguru import logger
|
||||
|
||||
def create_all_tables():
|
||||
"""Create all tables in the correct order"""
|
||||
try:
|
||||
logger.info("Creating all database tables...")
|
||||
|
||||
# Step 1: Create enhanced strategy tables first
|
||||
logger.info("Step 1: Creating enhanced strategy tables...")
|
||||
EnhancedStrategyBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Enhanced strategy tables created!")
|
||||
|
||||
# Step 2: Create monitoring tables
|
||||
logger.info("Step 2: Creating monitoring tables...")
|
||||
MonitoringBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Monitoring tables created!")
|
||||
|
||||
# Step 3: Create persona tables
|
||||
logger.info("Step 3: Creating persona tables...")
|
||||
PersonaBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Persona tables created!")
|
||||
|
||||
logger.info("✅ All tables created successfully!")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating tables: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_all_tables()
|
||||
217
backend/scripts/create_billing_tables.py
Normal file
217
backend/scripts/create_billing_tables.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""
|
||||
Database Migration Script for Billing System
|
||||
Creates all tables needed for billing, usage tracking, and subscription management.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
import traceback
|
||||
|
||||
# Import models
|
||||
from models.subscription_models import Base as SubscriptionBase
|
||||
from services.database import DATABASE_URL
|
||||
from services.subscription.pricing_service import PricingService
|
||||
|
||||
def create_billing_tables():
|
||||
"""Create all billing and subscription-related tables."""
|
||||
|
||||
try:
|
||||
# Create engine
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
|
||||
# Create all tables
|
||||
logger.debug("Creating billing and subscription system tables...")
|
||||
SubscriptionBase.metadata.create_all(bind=engine)
|
||||
logger.debug("✅ Billing and subscription tables created successfully")
|
||||
|
||||
# Create session for data initialization
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Initialize pricing and plans
|
||||
pricing_service = PricingService(db)
|
||||
|
||||
logger.debug("Initializing default API pricing...")
|
||||
pricing_service.initialize_default_pricing()
|
||||
logger.debug("✅ Default API pricing initialized")
|
||||
|
||||
logger.debug("Initializing default subscription plans...")
|
||||
pricing_service.initialize_default_plans()
|
||||
logger.debug("✅ Default subscription plans initialized")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing default data: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
logger.info("✅ Billing system setup completed successfully!")
|
||||
|
||||
# Display summary
|
||||
display_setup_summary(engine)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating billing tables: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def display_setup_summary(engine):
|
||||
"""Display a summary of the created tables and data."""
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("BILLING SYSTEM SETUP SUMMARY")
|
||||
logger.info("="*60)
|
||||
|
||||
# Check tables
|
||||
tables_query = text("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND (
|
||||
name LIKE '%subscription%' OR
|
||||
name LIKE '%usage%' OR
|
||||
name LIKE '%billing%' OR
|
||||
name LIKE '%pricing%' OR
|
||||
name LIKE '%alert%'
|
||||
)
|
||||
ORDER BY name
|
||||
""")
|
||||
|
||||
result = conn.execute(tables_query)
|
||||
tables = result.fetchall()
|
||||
|
||||
logger.info(f"\n📊 Created Tables ({len(tables)}):")
|
||||
for table in tables:
|
||||
logger.debug(f" • {table[0]}")
|
||||
|
||||
# Check subscription plans
|
||||
try:
|
||||
plans_query = text("SELECT COUNT(*) FROM subscription_plans")
|
||||
result = conn.execute(plans_query)
|
||||
plan_count = result.fetchone()[0]
|
||||
logger.info(f"\n💳 Subscription Plans: {plan_count}")
|
||||
|
||||
if plan_count > 0:
|
||||
plans_detail_query = text("""
|
||||
SELECT name, tier, price_monthly, price_yearly
|
||||
FROM subscription_plans
|
||||
ORDER BY price_monthly
|
||||
""")
|
||||
result = conn.execute(plans_detail_query)
|
||||
plans = result.fetchall()
|
||||
|
||||
for plan in plans:
|
||||
name, tier, monthly, yearly = plan
|
||||
logger.debug(f" • {name} ({tier}): ${monthly}/month, ${yearly}/year")
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not check subscription plans: {e}")
|
||||
|
||||
# Check API pricing
|
||||
try:
|
||||
pricing_query = text("SELECT COUNT(*) FROM api_provider_pricing")
|
||||
result = conn.execute(pricing_query)
|
||||
pricing_count = result.fetchone()[0]
|
||||
logger.info(f"\n💰 API Pricing Entries: {pricing_count}")
|
||||
|
||||
if pricing_count > 0:
|
||||
pricing_detail_query = text("""
|
||||
SELECT provider, model_name, cost_per_input_token, cost_per_output_token
|
||||
FROM api_provider_pricing
|
||||
WHERE cost_per_input_token > 0 OR cost_per_output_token > 0
|
||||
ORDER BY provider, model_name
|
||||
LIMIT 10
|
||||
""")
|
||||
result = conn.execute(pricing_detail_query)
|
||||
pricing_entries = result.fetchall()
|
||||
|
||||
logger.info("\n LLM Pricing (per token) - Top 10:")
|
||||
for entry in pricing_entries:
|
||||
provider, model, input_cost, output_cost = entry
|
||||
logger.debug(f" • {provider}/{model}: ${input_cost:.8f} in, ${output_cost:.8f} out")
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not check API pricing: {e}")
|
||||
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("NEXT STEPS:")
|
||||
logger.info("="*60)
|
||||
logger.info("1. Billing system is ready for use")
|
||||
logger.info("2. API endpoints are available at:")
|
||||
logger.info(" GET /api/subscription/plans")
|
||||
logger.info(" GET /api/subscription/usage/{user_id}")
|
||||
logger.info(" GET /api/subscription/dashboard/{user_id}")
|
||||
logger.info(" GET /api/subscription/pricing")
|
||||
logger.info("\n3. Frontend billing dashboard is integrated")
|
||||
logger.info("4. Usage tracking middleware is active")
|
||||
logger.info("5. Real-time cost monitoring is enabled")
|
||||
logger.info("="*60)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error displaying summary: {e}")
|
||||
|
||||
def check_existing_tables(engine):
|
||||
"""Check if billing tables already exist."""
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
# Check for billing tables
|
||||
check_query = text("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND (
|
||||
name = 'subscription_plans' OR
|
||||
name = 'user_subscriptions' OR
|
||||
name = 'api_usage_logs' OR
|
||||
name = 'usage_summaries' OR
|
||||
name = 'api_provider_pricing' OR
|
||||
name = 'usage_alerts'
|
||||
)
|
||||
""")
|
||||
|
||||
result = conn.execute(check_query)
|
||||
existing_tables = result.fetchall()
|
||||
|
||||
if existing_tables:
|
||||
logger.warning(f"Found existing billing tables: {[t[0] for t in existing_tables]}")
|
||||
logger.debug("Tables already exist. Skipping creation to preserve data.")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking existing tables: {e}")
|
||||
return True # Proceed anyway
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.debug("🚀 Starting billing system database migration...")
|
||||
|
||||
try:
|
||||
# Create engine to check existing tables
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
|
||||
# Check existing tables
|
||||
if not check_existing_tables(engine):
|
||||
logger.debug("✅ Billing tables already exist, skipping creation")
|
||||
sys.exit(0)
|
||||
|
||||
# Create tables and initialize data
|
||||
create_billing_tables()
|
||||
|
||||
logger.info("✅ Billing system migration completed successfully!")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.warning("Migration cancelled by user")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Migration failed: {e}")
|
||||
sys.exit(1)
|
||||
140
backend/scripts/create_cache_table.py
Normal file
140
backend/scripts/create_cache_table.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""
|
||||
Database migration script to create comprehensive user data cache table.
|
||||
Run this script to add the cache table to your database.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
import os
|
||||
|
||||
def create_cache_table():
|
||||
"""Create the comprehensive user data cache table."""
|
||||
try:
|
||||
# Get database URL from environment or use default
|
||||
database_url = os.getenv('DATABASE_URL', 'sqlite:///alwrity.db')
|
||||
|
||||
# Create engine
|
||||
engine = create_engine(database_url)
|
||||
|
||||
# Create session
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
# SQL to create the cache table
|
||||
create_table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS comprehensive_user_data_cache (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
strategy_id INTEGER,
|
||||
data_hash VARCHAR(64) NOT NULL,
|
||||
comprehensive_data JSON NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
expires_at DATETIME NOT NULL,
|
||||
last_accessed DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
access_count INTEGER DEFAULT 0
|
||||
);
|
||||
"""
|
||||
|
||||
# Create indexes
|
||||
create_indexes_sql = [
|
||||
"CREATE INDEX IF NOT EXISTS idx_user_strategy ON comprehensive_user_data_cache(user_id, strategy_id);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_expires_at ON comprehensive_user_data_cache(expires_at);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_data_hash ON comprehensive_user_data_cache(data_hash);"
|
||||
]
|
||||
|
||||
# Execute table creation
|
||||
logger.info("Creating comprehensive_user_data_cache table...")
|
||||
db.execute(text(create_table_sql))
|
||||
|
||||
# Execute index creation
|
||||
logger.info("Creating indexes...")
|
||||
for index_sql in create_indexes_sql:
|
||||
db.execute(text(index_sql))
|
||||
|
||||
# Commit changes
|
||||
db.commit()
|
||||
|
||||
# Verify table creation
|
||||
result = db.execute(text("SELECT name FROM sqlite_master WHERE type='table' AND name='comprehensive_user_data_cache';"))
|
||||
table_exists = result.fetchone()
|
||||
|
||||
if table_exists:
|
||||
logger.info("✅ Comprehensive user data cache table created successfully!")
|
||||
|
||||
# Show table structure
|
||||
result = db.execute(text("PRAGMA table_info(comprehensive_user_data_cache);"))
|
||||
columns = result.fetchall()
|
||||
|
||||
logger.info("Table structure:")
|
||||
for column in columns:
|
||||
logger.info(f" - {column[1]} ({column[2]})")
|
||||
|
||||
else:
|
||||
logger.error("❌ Failed to create comprehensive_user_data_cache table")
|
||||
return False
|
||||
|
||||
db.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating cache table: {str(e)}")
|
||||
if 'db' in locals():
|
||||
db.close()
|
||||
return False
|
||||
|
||||
def drop_cache_table():
|
||||
"""Drop the comprehensive user data cache table (for testing)."""
|
||||
try:
|
||||
# Get database URL from environment or use default
|
||||
database_url = os.getenv('DATABASE_URL', 'sqlite:///alwrity.db')
|
||||
|
||||
# Create engine
|
||||
engine = create_engine(database_url)
|
||||
|
||||
# Create session
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
# Drop table
|
||||
logger.info("Dropping comprehensive_user_data_cache table...")
|
||||
db.execute(text("DROP TABLE IF EXISTS comprehensive_user_data_cache;"))
|
||||
db.commit()
|
||||
|
||||
logger.info("✅ Comprehensive user data cache table dropped successfully!")
|
||||
db.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error dropping cache table: {str(e)}")
|
||||
if 'db' in locals():
|
||||
db.close()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Manage comprehensive user data cache table")
|
||||
parser.add_argument("--action", choices=["create", "drop"], default="create",
|
||||
help="Action to perform (create or drop table)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.action == "create":
|
||||
success = create_cache_table()
|
||||
if success:
|
||||
logger.info("🎉 Cache table setup completed successfully!")
|
||||
else:
|
||||
logger.error("💥 Cache table setup failed!")
|
||||
sys.exit(1)
|
||||
elif args.action == "drop":
|
||||
success = drop_cache_table()
|
||||
if success:
|
||||
logger.info("🗑️ Cache table dropped successfully!")
|
||||
else:
|
||||
logger.error("💥 Failed to drop cache table!")
|
||||
sys.exit(1)
|
||||
32
backend/scripts/create_enhanced_strategy_tables.py
Normal file
32
backend/scripts/create_enhanced_strategy_tables.py
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to create enhanced strategy tables in the database.
|
||||
Run this script to ensure all enhanced strategy tables are created before monitoring tables.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from services.database import engine
|
||||
from models.enhanced_strategy_models import Base as EnhancedStrategyBase
|
||||
from loguru import logger
|
||||
|
||||
def create_enhanced_strategy_tables():
|
||||
"""Create all enhanced strategy tables"""
|
||||
try:
|
||||
logger.info("Creating enhanced strategy tables...")
|
||||
|
||||
# Create enhanced strategy tables first
|
||||
EnhancedStrategyBase.metadata.create_all(bind=engine)
|
||||
|
||||
logger.info("✅ Enhanced strategy tables created successfully!")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating enhanced strategy tables: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_enhanced_strategy_tables()
|
||||
195
backend/scripts/create_monitoring_tables.py
Normal file
195
backend/scripts/create_monitoring_tables.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
Database migration script to create API monitoring tables.
|
||||
Run this script to add the monitoring tables to your database.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
import os
|
||||
|
||||
def create_monitoring_tables():
|
||||
"""Create the API monitoring tables."""
|
||||
try:
|
||||
# Get database URL from environment or use default
|
||||
database_url = os.getenv('DATABASE_URL', 'sqlite:///alwrity.db')
|
||||
|
||||
# Create engine
|
||||
engine = create_engine(database_url)
|
||||
|
||||
# Create session
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
# SQL to create the monitoring tables
|
||||
create_tables_sql = [
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS api_requests (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
path VARCHAR(500) NOT NULL,
|
||||
method VARCHAR(10) NOT NULL,
|
||||
status_code INTEGER NOT NULL,
|
||||
duration FLOAT NOT NULL,
|
||||
user_id VARCHAR(50),
|
||||
cache_hit BOOLEAN,
|
||||
request_size INTEGER,
|
||||
response_size INTEGER,
|
||||
user_agent VARCHAR(500),
|
||||
ip_address VARCHAR(45)
|
||||
);
|
||||
""",
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS api_endpoint_stats (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
endpoint VARCHAR(500) NOT NULL UNIQUE,
|
||||
total_requests INTEGER DEFAULT 0,
|
||||
total_errors INTEGER DEFAULT 0,
|
||||
total_duration FLOAT DEFAULT 0.0,
|
||||
avg_duration FLOAT DEFAULT 0.0,
|
||||
min_duration FLOAT,
|
||||
max_duration FLOAT,
|
||||
last_called DATETIME,
|
||||
cache_hits INTEGER DEFAULT 0,
|
||||
cache_misses INTEGER DEFAULT 0,
|
||||
cache_hit_rate FLOAT DEFAULT 0.0,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
""",
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS system_health (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
status VARCHAR(20) NOT NULL,
|
||||
total_requests INTEGER DEFAULT 0,
|
||||
total_errors INTEGER DEFAULT 0,
|
||||
error_rate FLOAT DEFAULT 0.0,
|
||||
avg_response_time FLOAT DEFAULT 0.0,
|
||||
cache_hit_rate FLOAT DEFAULT 0.0,
|
||||
active_endpoints INTEGER DEFAULT 0,
|
||||
metrics JSON
|
||||
);
|
||||
""",
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS cache_performance (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
cache_type VARCHAR(50) NOT NULL,
|
||||
hits INTEGER DEFAULT 0,
|
||||
misses INTEGER DEFAULT 0,
|
||||
hit_rate FLOAT DEFAULT 0.0,
|
||||
avg_response_time FLOAT DEFAULT 0.0,
|
||||
total_requests INTEGER DEFAULT 0
|
||||
);
|
||||
"""
|
||||
]
|
||||
|
||||
# Create indexes
|
||||
create_indexes_sql = [
|
||||
"CREATE INDEX IF NOT EXISTS idx_api_requests_timestamp ON api_requests(timestamp);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_api_requests_path_method ON api_requests(path, method);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_api_requests_status_code ON api_requests(status_code);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_api_requests_user_id ON api_requests(user_id);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_api_endpoint_stats_endpoint ON api_endpoint_stats(endpoint);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_api_endpoint_stats_total_requests ON api_endpoint_stats(total_requests);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_api_endpoint_stats_avg_duration ON api_endpoint_stats(avg_duration);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_system_health_timestamp ON system_health(timestamp);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_system_health_status ON system_health(status);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_cache_performance_timestamp ON cache_performance(timestamp);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_cache_performance_cache_type ON cache_performance(cache_type);"
|
||||
]
|
||||
|
||||
# Execute table creation
|
||||
logger.info("Creating API monitoring tables...")
|
||||
for table_sql in create_tables_sql:
|
||||
db.execute(text(table_sql))
|
||||
|
||||
# Execute index creation
|
||||
logger.info("Creating indexes...")
|
||||
for index_sql in create_indexes_sql:
|
||||
db.execute(text(index_sql))
|
||||
|
||||
# Commit changes
|
||||
db.commit()
|
||||
|
||||
# Verify table creation
|
||||
tables_to_check = ['api_requests', 'api_endpoint_stats', 'system_health', 'cache_performance']
|
||||
for table_name in tables_to_check:
|
||||
result = db.execute(text(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table_name}';"))
|
||||
table_exists = result.fetchone()
|
||||
|
||||
if table_exists:
|
||||
logger.info(f"✅ {table_name} table created successfully!")
|
||||
else:
|
||||
logger.error(f"❌ Failed to create {table_name} table")
|
||||
return False
|
||||
|
||||
logger.info("🎉 All API monitoring tables created successfully!")
|
||||
db.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating monitoring tables: {str(e)}")
|
||||
if 'db' in locals():
|
||||
db.close()
|
||||
return False
|
||||
|
||||
def drop_monitoring_tables():
|
||||
"""Drop the API monitoring tables (for testing)."""
|
||||
try:
|
||||
# Get database URL from environment or use default
|
||||
database_url = os.getenv('DATABASE_URL', 'sqlite:///alwrity.db')
|
||||
|
||||
# Create engine
|
||||
engine = create_engine(database_url)
|
||||
|
||||
# Create session
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
# Drop tables
|
||||
tables_to_drop = ['api_requests', 'api_endpoint_stats', 'system_health', 'cache_performance']
|
||||
logger.info("Dropping API monitoring tables...")
|
||||
|
||||
for table_name in tables_to_drop:
|
||||
db.execute(text(f"DROP TABLE IF EXISTS {table_name};"))
|
||||
|
||||
db.commit()
|
||||
|
||||
logger.info("✅ API monitoring tables dropped successfully!")
|
||||
db.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error dropping monitoring tables: {str(e)}")
|
||||
if 'db' in locals():
|
||||
db.close()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Manage API monitoring tables")
|
||||
parser.add_argument("--action", choices=["create", "drop"], default="create",
|
||||
help="Action to perform (create or drop tables)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.action == "create":
|
||||
success = create_monitoring_tables()
|
||||
if success:
|
||||
logger.info("🎉 API monitoring tables setup completed successfully!")
|
||||
else:
|
||||
logger.error("💥 API monitoring tables setup failed!")
|
||||
sys.exit(1)
|
||||
elif args.action == "drop":
|
||||
success = drop_monitoring_tables()
|
||||
if success:
|
||||
logger.info("🗑️ API monitoring tables dropped successfully!")
|
||||
else:
|
||||
logger.error("💥 Failed to drop API monitoring tables!")
|
||||
sys.exit(1)
|
||||
41
backend/scripts/create_monitoring_tables_direct.py
Normal file
41
backend/scripts/create_monitoring_tables_direct.py
Normal file
@@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to create monitoring tables directly.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from services.database import engine
|
||||
from models.monitoring_models import (
|
||||
StrategyMonitoringPlan,
|
||||
MonitoringTask,
|
||||
TaskExecutionLog,
|
||||
StrategyPerformanceMetrics,
|
||||
StrategyActivationStatus
|
||||
)
|
||||
from loguru import logger
|
||||
|
||||
def create_monitoring_tables_direct():
|
||||
"""Create monitoring tables directly"""
|
||||
try:
|
||||
logger.info("Creating monitoring tables directly...")
|
||||
|
||||
# Create tables directly
|
||||
StrategyMonitoringPlan.__table__.create(engine, checkfirst=True)
|
||||
MonitoringTask.__table__.create(engine, checkfirst=True)
|
||||
TaskExecutionLog.__table__.create(engine, checkfirst=True)
|
||||
StrategyPerformanceMetrics.__table__.create(engine, checkfirst=True)
|
||||
StrategyActivationStatus.__table__.create(engine, checkfirst=True)
|
||||
|
||||
logger.info("✅ Monitoring tables created successfully!")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating monitoring tables: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_monitoring_tables_direct()
|
||||
124
backend/scripts/create_persona_data_table.py
Normal file
124
backend/scripts/create_persona_data_table.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""
|
||||
Script to create the persona_data table for onboarding step 4.
|
||||
This migration adds support for storing persona generation data.
|
||||
|
||||
Usage:
|
||||
python backend/scripts/create_persona_data_table.py
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add backend directory to path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from loguru import logger
|
||||
from sqlalchemy import inspect
|
||||
|
||||
def create_persona_data_table():
|
||||
"""Create the persona_data table."""
|
||||
try:
|
||||
# Import after path is set
|
||||
from services.database import engine
|
||||
from models.onboarding import Base as OnboardingBase, PersonaData
|
||||
|
||||
logger.info("🔍 Checking if persona_data table exists...")
|
||||
|
||||
# Check if table already exists
|
||||
inspector = inspect(engine)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
if 'persona_data' in existing_tables:
|
||||
logger.info("✅ persona_data table already exists")
|
||||
return True
|
||||
|
||||
logger.info("📊 Creating persona_data table...")
|
||||
|
||||
# Create only the persona_data table
|
||||
PersonaData.__table__.create(bind=engine, checkfirst=True)
|
||||
|
||||
logger.info("✅ persona_data table created successfully")
|
||||
|
||||
# Verify creation
|
||||
inspector = inspect(engine)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
if 'persona_data' in existing_tables:
|
||||
logger.info("✅ Verification successful - persona_data table exists")
|
||||
|
||||
# Show table structure
|
||||
columns = inspector.get_columns('persona_data')
|
||||
logger.info(f"📋 Table structure ({len(columns)} columns):")
|
||||
for col in columns:
|
||||
logger.info(f" - {col['name']}: {col['type']}")
|
||||
|
||||
return True
|
||||
else:
|
||||
logger.error("❌ Table creation verification failed")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating persona_data table: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
def check_onboarding_tables():
|
||||
"""Check all onboarding-related tables."""
|
||||
try:
|
||||
from services.database import engine
|
||||
from sqlalchemy import inspect
|
||||
|
||||
inspector = inspect(engine)
|
||||
existing_tables = inspector.get_table_names()
|
||||
|
||||
onboarding_tables = [
|
||||
'onboarding_sessions',
|
||||
'api_keys',
|
||||
'website_analyses',
|
||||
'research_preferences',
|
||||
'persona_data'
|
||||
]
|
||||
|
||||
logger.info("📋 Onboarding Tables Status:")
|
||||
for table in onboarding_tables:
|
||||
status = "✅" if table in existing_tables else "❌"
|
||||
logger.info(f" {status} {table}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking tables: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("=" * 60)
|
||||
logger.info("Persona Data Table Migration")
|
||||
logger.info("=" * 60)
|
||||
|
||||
# Check existing tables
|
||||
check_onboarding_tables()
|
||||
|
||||
logger.info("")
|
||||
|
||||
# Create persona_data table
|
||||
if create_persona_data_table():
|
||||
logger.info("")
|
||||
logger.info("=" * 60)
|
||||
logger.info("✅ Migration completed successfully!")
|
||||
logger.info("=" * 60)
|
||||
|
||||
# Check again to confirm
|
||||
logger.info("")
|
||||
check_onboarding_tables()
|
||||
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("")
|
||||
logger.error("=" * 60)
|
||||
logger.error("❌ Migration failed!")
|
||||
logger.error("=" * 60)
|
||||
sys.exit(1)
|
||||
|
||||
53
backend/scripts/create_persona_tables.py
Normal file
53
backend/scripts/create_persona_tables.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to create persona database tables.
|
||||
This script creates the new persona-related tables for storing writing personas.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from services.database import engine
|
||||
from models.persona_models import Base as PersonaBase
|
||||
from loguru import logger
|
||||
|
||||
def create_persona_tables():
|
||||
"""Create all persona-related tables"""
|
||||
try:
|
||||
logger.info("Creating persona database tables...")
|
||||
|
||||
# Create persona tables
|
||||
logger.info("Creating persona tables...")
|
||||
PersonaBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Persona tables created!")
|
||||
|
||||
logger.info("✅ All persona tables created successfully!")
|
||||
|
||||
# Verify tables were created
|
||||
from sqlalchemy import inspect
|
||||
inspector = inspect(engine)
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
persona_tables = [
|
||||
'writing_personas',
|
||||
'platform_personas',
|
||||
'persona_analysis_results',
|
||||
'persona_validation_results'
|
||||
]
|
||||
|
||||
created_tables = [table for table in persona_tables if table in tables]
|
||||
logger.info(f"✅ Verified tables created: {created_tables}")
|
||||
|
||||
if len(created_tables) != len(persona_tables):
|
||||
missing = [table for table in persona_tables if table not in created_tables]
|
||||
logger.warning(f"⚠️ Missing tables: {missing}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating persona tables: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_persona_tables()
|
||||
149
backend/scripts/create_podcast_tables.py
Normal file
149
backend/scripts/create_podcast_tables.py
Normal file
@@ -0,0 +1,149 @@
|
||||
"""
|
||||
Database Migration Script for Podcast Maker
|
||||
Creates the podcast_projects table for cross-device project persistence.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
from loguru import logger
|
||||
import traceback
|
||||
|
||||
# Import models - PodcastProject uses SubscriptionBase
|
||||
from models.subscription_models import Base as SubscriptionBase
|
||||
from models.podcast_models import PodcastProject
|
||||
from services.database import DATABASE_URL
|
||||
|
||||
def create_podcast_tables():
|
||||
"""Create podcast-related tables."""
|
||||
|
||||
try:
|
||||
# Create engine
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
|
||||
# Create all tables (PodcastProject uses SubscriptionBase, so it will be created)
|
||||
logger.info("Creating podcast maker tables...")
|
||||
SubscriptionBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Podcast tables created successfully")
|
||||
|
||||
# Verify table was created
|
||||
display_setup_summary(engine)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating podcast tables: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def display_setup_summary(engine):
|
||||
"""Display a summary of the created tables."""
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("PODCAST MAKER SETUP SUMMARY")
|
||||
logger.info("="*60)
|
||||
|
||||
# Check if table exists
|
||||
check_query = text("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='podcast_projects'
|
||||
""")
|
||||
|
||||
result = conn.execute(check_query)
|
||||
table_exists = result.fetchone()
|
||||
|
||||
if table_exists:
|
||||
logger.info("✅ Table 'podcast_projects' created successfully")
|
||||
|
||||
# Get table schema
|
||||
schema_query = text("""
|
||||
SELECT sql FROM sqlite_master
|
||||
WHERE type='table' AND name='podcast_projects'
|
||||
""")
|
||||
result = conn.execute(schema_query)
|
||||
schema = result.fetchone()
|
||||
if schema:
|
||||
logger.info("\n📋 Table Schema:")
|
||||
logger.info(schema[0])
|
||||
|
||||
# Check indexes
|
||||
indexes_query = text("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='index' AND tbl_name='podcast_projects'
|
||||
""")
|
||||
result = conn.execute(indexes_query)
|
||||
indexes = result.fetchall()
|
||||
|
||||
if indexes:
|
||||
logger.info(f"\n📊 Indexes ({len(indexes)}):")
|
||||
for idx in indexes:
|
||||
logger.info(f" • {idx[0]}")
|
||||
|
||||
else:
|
||||
logger.warning("⚠️ Table 'podcast_projects' not found after creation")
|
||||
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("NEXT STEPS:")
|
||||
logger.info("="*60)
|
||||
logger.info("1. The podcast_projects table is ready for use")
|
||||
logger.info("2. Projects will automatically sync to database after major steps")
|
||||
logger.info("3. Users can resume projects from any device")
|
||||
logger.info("4. Use the 'My Projects' button in the Podcast Dashboard to view saved projects")
|
||||
logger.info("="*60)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error displaying summary: {e}")
|
||||
|
||||
def check_existing_table(engine):
|
||||
"""Check if podcast_projects table already exists."""
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
check_query = text("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='podcast_projects'
|
||||
""")
|
||||
|
||||
result = conn.execute(check_query)
|
||||
table_exists = result.fetchone()
|
||||
|
||||
if table_exists:
|
||||
logger.info("ℹ️ Table 'podcast_projects' already exists")
|
||||
logger.info(" Running migration will ensure schema is up to date...")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking existing table: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting podcast maker database migration...")
|
||||
|
||||
try:
|
||||
# Create engine to check existing table
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
|
||||
# Check existing table
|
||||
table_exists = check_existing_table(engine)
|
||||
|
||||
# Create tables (idempotent - won't recreate if exists)
|
||||
create_podcast_tables()
|
||||
|
||||
logger.info("✅ Migration completed successfully!")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Migration cancelled by user")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Migration failed: {e}")
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
88
backend/scripts/create_product_asset_tables.py
Normal file
88
backend/scripts/create_product_asset_tables.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Database Migration Script for Product Asset Tables
|
||||
Creates all tables needed for Product Marketing Suite (product asset creation).
|
||||
These tables are separate from campaign-related tables and focus on product-specific assets.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine, text, inspect
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
import traceback
|
||||
|
||||
# Import models - Product Asset models use SubscriptionBase
|
||||
from models.subscription_models import Base as SubscriptionBase
|
||||
from models.product_asset_models import ProductAsset, ProductStyleTemplate, EcommerceExport
|
||||
from services.database import DATABASE_URL
|
||||
|
||||
|
||||
def create_product_asset_tables():
|
||||
"""Create all product asset tables."""
|
||||
|
||||
try:
|
||||
# Create engine
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
|
||||
# Create all tables (product asset models share SubscriptionBase)
|
||||
logger.info("Creating product asset tables for Product Marketing Suite...")
|
||||
SubscriptionBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Product asset tables created successfully")
|
||||
|
||||
# Verify tables were created
|
||||
with engine.connect() as conn:
|
||||
# Check if tables exist
|
||||
inspector = inspect(engine)
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
expected_tables = [
|
||||
'product_assets',
|
||||
'product_style_templates',
|
||||
'product_ecommerce_exports'
|
||||
]
|
||||
|
||||
created_tables = [t for t in expected_tables if t in tables]
|
||||
missing_tables = [t for t in expected_tables if t not in tables]
|
||||
|
||||
if created_tables:
|
||||
logger.info(f"✅ Created tables: {', '.join(created_tables)}")
|
||||
|
||||
if missing_tables:
|
||||
logger.warning(f"⚠️ Missing tables: {', '.join(missing_tables)}")
|
||||
else:
|
||||
logger.info("🎉 All product asset tables verified!")
|
||||
|
||||
# Verify indexes were created
|
||||
with engine.connect() as conn:
|
||||
inspector = inspect(engine)
|
||||
|
||||
# Check ProductAsset indexes
|
||||
product_asset_indexes = inspector.get_indexes('product_assets')
|
||||
logger.info(f"✅ ProductAsset indexes: {len(product_asset_indexes)} indexes created")
|
||||
|
||||
# Check ProductStyleTemplate indexes
|
||||
style_template_indexes = inspector.get_indexes('product_style_templates')
|
||||
logger.info(f"✅ ProductStyleTemplate indexes: {len(style_template_indexes)} indexes created")
|
||||
|
||||
# Check EcommerceExport indexes
|
||||
ecommerce_export_indexes = inspector.get_indexes('product_ecommerce_exports')
|
||||
logger.info(f"✅ EcommerceExport indexes: {len(ecommerce_export_indexes)} indexes created")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating product asset tables: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = create_product_asset_tables()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
71
backend/scripts/create_product_marketing_tables.py
Normal file
71
backend/scripts/create_product_marketing_tables.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""
|
||||
Database Migration Script for Product Marketing Suite
|
||||
Creates all tables needed for campaigns, proposals, and generated assets.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine, text, inspect
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
import traceback
|
||||
|
||||
# Import models - Product Marketing uses SubscriptionBase
|
||||
# Import the Base first, then import product marketing models to register them
|
||||
from models.subscription_models import Base as SubscriptionBase
|
||||
from models.product_marketing_models import Campaign, CampaignProposal, CampaignAsset
|
||||
from services.database import DATABASE_URL
|
||||
|
||||
def create_product_marketing_tables():
|
||||
"""Create all product marketing tables."""
|
||||
|
||||
try:
|
||||
# Create engine
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
|
||||
# Create all tables (product marketing models share SubscriptionBase)
|
||||
logger.info("Creating product marketing tables...")
|
||||
SubscriptionBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Product marketing tables created successfully")
|
||||
|
||||
# Verify tables were created
|
||||
with engine.connect() as conn:
|
||||
# Check if tables exist
|
||||
from sqlalchemy import inspect as sqlalchemy_inspect
|
||||
inspector = sqlalchemy_inspect(engine)
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
expected_tables = [
|
||||
'product_marketing_campaigns',
|
||||
'product_marketing_proposals',
|
||||
'product_marketing_assets'
|
||||
]
|
||||
|
||||
created_tables = [t for t in expected_tables if t in tables]
|
||||
missing_tables = [t for t in expected_tables if t not in tables]
|
||||
|
||||
if created_tables:
|
||||
logger.info(f"✅ Created tables: {', '.join(created_tables)}")
|
||||
|
||||
if missing_tables:
|
||||
logger.warning(f"⚠️ Missing tables: {', '.join(missing_tables)}")
|
||||
else:
|
||||
logger.info("🎉 All product marketing tables verified!")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating product marketing tables: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = create_product_marketing_tables()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
206
backend/scripts/create_subscription_tables.py
Normal file
206
backend/scripts/create_subscription_tables.py
Normal file
@@ -0,0 +1,206 @@
|
||||
"""
|
||||
Database Migration Script for Subscription System
|
||||
Creates all tables needed for usage-based subscription and monitoring.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
import traceback
|
||||
|
||||
# Import models
|
||||
from models.subscription_models import Base as SubscriptionBase
|
||||
from services.database import DATABASE_URL
|
||||
from services.subscription.pricing_service import PricingService
|
||||
|
||||
def create_subscription_tables():
|
||||
"""Create all subscription-related tables."""
|
||||
|
||||
try:
|
||||
# Create engine
|
||||
engine = create_engine(DATABASE_URL, echo=True)
|
||||
|
||||
# Create all tables
|
||||
logger.info("Creating subscription system tables...")
|
||||
SubscriptionBase.metadata.create_all(bind=engine)
|
||||
logger.info("✅ Subscription tables created successfully")
|
||||
|
||||
# Create session for data initialization
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Initialize pricing and plans
|
||||
pricing_service = PricingService(db)
|
||||
|
||||
logger.info("Initializing default API pricing...")
|
||||
pricing_service.initialize_default_pricing()
|
||||
logger.info("✅ Default API pricing initialized")
|
||||
|
||||
logger.info("Initializing default subscription plans...")
|
||||
pricing_service.initialize_default_plans()
|
||||
logger.info("✅ Default subscription plans initialized")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing default data: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
logger.info("🎉 Subscription system setup completed successfully!")
|
||||
|
||||
# Display summary
|
||||
display_setup_summary(engine)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating subscription tables: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def display_setup_summary(engine):
|
||||
"""Display a summary of the created tables and data."""
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("SUBSCRIPTION SYSTEM SETUP SUMMARY")
|
||||
logger.info("="*60)
|
||||
|
||||
# Check tables
|
||||
tables_query = text("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name LIKE '%subscription%' OR name LIKE '%usage%' OR name LIKE '%pricing%'
|
||||
ORDER BY name
|
||||
""")
|
||||
|
||||
result = conn.execute(tables_query)
|
||||
tables = result.fetchall()
|
||||
|
||||
logger.info(f"\n📊 Created Tables ({len(tables)}):")
|
||||
for table in tables:
|
||||
logger.info(f" • {table[0]}")
|
||||
|
||||
# Check subscription plans
|
||||
plans_query = text("SELECT COUNT(*) FROM subscription_plans")
|
||||
result = conn.execute(plans_query)
|
||||
plan_count = result.fetchone()[0]
|
||||
logger.info(f"\n💳 Subscription Plans: {plan_count}")
|
||||
|
||||
if plan_count > 0:
|
||||
plans_detail_query = text("""
|
||||
SELECT name, tier, price_monthly, price_yearly
|
||||
FROM subscription_plans
|
||||
ORDER BY price_monthly
|
||||
""")
|
||||
result = conn.execute(plans_detail_query)
|
||||
plans = result.fetchall()
|
||||
|
||||
for plan in plans:
|
||||
name, tier, monthly, yearly = plan
|
||||
logger.info(f" • {name} ({tier}): ${monthly}/month, ${yearly}/year")
|
||||
|
||||
# Check API pricing
|
||||
pricing_query = text("SELECT COUNT(*) FROM api_provider_pricing")
|
||||
result = conn.execute(pricing_query)
|
||||
pricing_count = result.fetchone()[0]
|
||||
logger.info(f"\n💰 API Pricing Entries: {pricing_count}")
|
||||
|
||||
if pricing_count > 0:
|
||||
pricing_detail_query = text("""
|
||||
SELECT provider, model_name, cost_per_input_token, cost_per_output_token
|
||||
FROM api_provider_pricing
|
||||
WHERE cost_per_input_token > 0 OR cost_per_output_token > 0
|
||||
ORDER BY provider, model_name
|
||||
""")
|
||||
result = conn.execute(pricing_detail_query)
|
||||
pricing_entries = result.fetchall()
|
||||
|
||||
logger.info("\n LLM Pricing (per token):")
|
||||
for entry in pricing_entries:
|
||||
provider, model, input_cost, output_cost = entry
|
||||
logger.info(f" • {provider}/{model}: ${input_cost:.8f} in, ${output_cost:.8f} out")
|
||||
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("NEXT STEPS:")
|
||||
logger.info("="*60)
|
||||
logger.info("1. Update your FastAPI app to include subscription routes:")
|
||||
logger.info(" from api.subscription_api import router as subscription_router")
|
||||
logger.info(" app.include_router(subscription_router)")
|
||||
logger.info("\n2. Update database service to include subscription models:")
|
||||
logger.info(" Add SubscriptionBase.metadata.create_all(bind=engine) to init_database()")
|
||||
logger.info("\n3. Test the API endpoints:")
|
||||
logger.info(" GET /api/subscription/plans")
|
||||
logger.info(" GET /api/subscription/usage/{user_id}")
|
||||
logger.info(" GET /api/subscription/dashboard/{user_id}")
|
||||
logger.info("\n4. Configure user identification in middleware")
|
||||
logger.info(" Ensure user_id is properly extracted from requests")
|
||||
logger.info("\n5. Set up monitoring dashboard frontend integration")
|
||||
logger.info("="*60)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error displaying summary: {e}")
|
||||
|
||||
def check_existing_tables(engine):
|
||||
"""Check if subscription tables already exist."""
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
# Check for subscription tables
|
||||
check_query = text("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND (
|
||||
name = 'subscription_plans' OR
|
||||
name = 'user_subscriptions' OR
|
||||
name = 'api_usage_logs' OR
|
||||
name = 'usage_summaries'
|
||||
)
|
||||
""")
|
||||
|
||||
result = conn.execute(check_query)
|
||||
existing_tables = result.fetchall()
|
||||
|
||||
if existing_tables:
|
||||
logger.warning(f"Found existing subscription tables: {[t[0] for t in existing_tables]}")
|
||||
response = input("Tables already exist. Do you want to continue and potentially overwrite data? (y/N): ")
|
||||
if response.lower() != 'y':
|
||||
logger.info("Migration cancelled by user")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking existing tables: {e}")
|
||||
return True # Proceed anyway
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting subscription system database migration...")
|
||||
|
||||
try:
|
||||
# Create engine to check existing tables
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
|
||||
# Check existing tables
|
||||
if not check_existing_tables(engine):
|
||||
sys.exit(0)
|
||||
|
||||
# Create tables and initialize data
|
||||
create_subscription_tables()
|
||||
|
||||
logger.info("✅ Migration completed successfully!")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Migration cancelled by user")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Migration failed: {e}")
|
||||
sys.exit(1)
|
||||
90
backend/scripts/fix_website_analysis_indexes.py
Normal file
90
backend/scripts/fix_website_analysis_indexes.py
Normal file
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Fix website analysis index name conflicts.
|
||||
Drops old conflicting indexes and ensures proper index names.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
def fix_indexes():
|
||||
"""Fix index name conflicts."""
|
||||
db_path = backend_dir / "alwrity.db"
|
||||
|
||||
if not db_path.exists():
|
||||
logger.error(f"Database not found at {db_path}")
|
||||
return False
|
||||
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
# Check for old conflicting indexes
|
||||
cursor.execute("""
|
||||
SELECT name, tbl_name
|
||||
FROM sqlite_master
|
||||
WHERE type='index'
|
||||
AND name = 'idx_status'
|
||||
AND tbl_name IN ('website_analysis_tasks', 'website_analysis_execution_logs')
|
||||
""")
|
||||
|
||||
conflicting = cursor.fetchall()
|
||||
|
||||
if conflicting:
|
||||
logger.warning(f"Found {len(conflicting)} conflicting indexes:")
|
||||
for name, tbl_name in conflicting:
|
||||
logger.warning(f" - {name} on {tbl_name}")
|
||||
|
||||
# Drop old indexes
|
||||
for name, tbl_name in conflicting:
|
||||
try:
|
||||
cursor.execute(f"DROP INDEX IF EXISTS {name}")
|
||||
logger.info(f"✅ Dropped old index: {name} on {tbl_name}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error dropping index {name}: {e}")
|
||||
|
||||
conn.commit()
|
||||
logger.info("✅ Index conflicts resolved")
|
||||
else:
|
||||
logger.info("✅ No conflicting indexes found")
|
||||
|
||||
# Verify correct indexes exist
|
||||
cursor.execute("""
|
||||
SELECT name, tbl_name
|
||||
FROM sqlite_master
|
||||
WHERE type='index'
|
||||
AND (name LIKE '%website_analysis%' OR name LIKE '%competitor_analyses%')
|
||||
ORDER BY tbl_name, name
|
||||
""")
|
||||
|
||||
indexes = cursor.fetchall()
|
||||
logger.info(f"\n📋 Current website analysis indexes ({len(indexes)}):")
|
||||
for name, tbl_name in indexes:
|
||||
logger.info(f" - {name} on {tbl_name}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fixing indexes: {e}")
|
||||
conn.rollback()
|
||||
return False
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🔧 Fixing website analysis index conflicts...")
|
||||
success = fix_indexes()
|
||||
if success:
|
||||
logger.info("✅ Index fix complete. You can now restart the backend.")
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("❌ Index fix failed")
|
||||
sys.exit(1)
|
||||
|
||||
203
backend/scripts/generate_test_monitoring_data.py
Normal file
203
backend/scripts/generate_test_monitoring_data.py
Normal file
@@ -0,0 +1,203 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate Test Monitoring Data
|
||||
Creates sample API monitoring data to demonstrate the dashboard charts and animations.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import random
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import text
|
||||
|
||||
# Add the backend directory to the path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from services.database import get_db
|
||||
from models.api_monitoring import APIRequest, APIEndpointStats
|
||||
from loguru import logger
|
||||
|
||||
def generate_test_monitoring_data():
|
||||
"""Generate test monitoring data for demonstration."""
|
||||
logger.info("🎯 Generating test monitoring data...")
|
||||
|
||||
db = next(get_db())
|
||||
|
||||
try:
|
||||
# Sample endpoints
|
||||
endpoints = [
|
||||
("GET", "/api/content-planning/strategies"),
|
||||
("POST", "/api/content-planning/calendar-generation/start"),
|
||||
("GET", "/api/content-planning/monitoring/lightweight-stats"),
|
||||
("GET", "/api/content-planning/health"),
|
||||
("POST", "/api/content-planning/ai-analytics/analyze"),
|
||||
("GET", "/api/content-planning/gap-analysis"),
|
||||
("PUT", "/api/content-planning/strategies/1"),
|
||||
("DELETE", "/api/content-planning/strategies/2"),
|
||||
]
|
||||
|
||||
# Generate requests for the last 30 minutes
|
||||
now = datetime.utcnow()
|
||||
start_time = now - timedelta(minutes=30)
|
||||
|
||||
logger.info(f"📊 Generating data from {start_time} to {now}")
|
||||
|
||||
for i in range(100): # Generate 100 requests
|
||||
# Random time within the last 30 minutes
|
||||
timestamp = start_time + timedelta(
|
||||
seconds=random.randint(0, 30 * 60)
|
||||
)
|
||||
|
||||
# Random endpoint
|
||||
method, path = random.choice(endpoints)
|
||||
|
||||
# Random status code (mostly 200, some errors)
|
||||
if random.random() < 0.9: # 90% success rate
|
||||
status_code = 200
|
||||
else:
|
||||
status_code = random.choice([400, 401, 403, 404, 500, 502, 503])
|
||||
|
||||
# Random duration (0.1 to 2.0 seconds)
|
||||
duration = random.uniform(0.1, 2.0)
|
||||
|
||||
# Random cache hit
|
||||
cache_hit = random.choice([True, False, None])
|
||||
|
||||
# Create API request
|
||||
api_request = APIRequest(
|
||||
path=path,
|
||||
method=method,
|
||||
status_code=status_code,
|
||||
duration=duration,
|
||||
user_id=f"user_{random.randint(1, 10)}",
|
||||
cache_hit=cache_hit,
|
||||
request_size=random.randint(100, 5000),
|
||||
response_size=random.randint(500, 10000),
|
||||
user_agent="Mozilla/5.0 (Test Browser)",
|
||||
ip_address=f"192.168.1.{random.randint(1, 255)}",
|
||||
timestamp=timestamp
|
||||
)
|
||||
db.add(api_request)
|
||||
|
||||
# Generate endpoint stats
|
||||
for method, path in endpoints:
|
||||
endpoint_key = f"{method} {path}"
|
||||
|
||||
# Check if stats already exist
|
||||
existing_stats = db.query(APIEndpointStats).filter(
|
||||
APIEndpointStats.endpoint == endpoint_key
|
||||
).first()
|
||||
|
||||
if existing_stats:
|
||||
# Update existing stats
|
||||
total_requests = random.randint(50, 200)
|
||||
total_errors = random.randint(0, total_requests // 10)
|
||||
total_duration = random.uniform(10.0, 100.0)
|
||||
|
||||
existing_stats.total_requests = total_requests
|
||||
existing_stats.total_errors = total_errors
|
||||
existing_stats.total_duration = total_duration
|
||||
existing_stats.avg_duration = total_duration / total_requests
|
||||
existing_stats.min_duration = random.uniform(0.05, 0.5)
|
||||
existing_stats.max_duration = random.uniform(1.0, 3.0)
|
||||
existing_stats.cache_hits = random.randint(0, total_requests // 2)
|
||||
existing_stats.cache_misses = random.randint(0, total_requests // 3)
|
||||
existing_stats.last_called = now
|
||||
|
||||
if existing_stats.cache_hits + existing_stats.cache_misses > 0:
|
||||
existing_stats.cache_hit_rate = (
|
||||
existing_stats.cache_hits /
|
||||
(existing_stats.cache_hits + existing_stats.cache_misses)
|
||||
) * 100
|
||||
else:
|
||||
# Create new stats
|
||||
total_requests = random.randint(50, 200)
|
||||
total_errors = random.randint(0, total_requests // 10)
|
||||
total_duration = random.uniform(10.0, 100.0)
|
||||
cache_hits = random.randint(0, total_requests // 2)
|
||||
cache_misses = random.randint(0, total_requests // 3)
|
||||
|
||||
endpoint_stats = APIEndpointStats(
|
||||
endpoint=endpoint_key,
|
||||
total_requests=total_requests,
|
||||
total_errors=total_errors,
|
||||
total_duration=total_duration,
|
||||
avg_duration=total_duration / total_requests,
|
||||
min_duration=random.uniform(0.05, 0.5),
|
||||
max_duration=random.uniform(1.0, 3.0),
|
||||
cache_hits=cache_hits,
|
||||
cache_misses=cache_misses,
|
||||
cache_hit_rate=(cache_hits / (cache_hits + cache_misses)) * 100 if (cache_hits + cache_misses) > 0 else 0,
|
||||
last_called=now
|
||||
)
|
||||
db.add(endpoint_stats)
|
||||
|
||||
db.commit()
|
||||
logger.info("✅ Test monitoring data generated successfully!")
|
||||
|
||||
# Show summary
|
||||
total_requests = db.query(APIRequest).count()
|
||||
total_errors = db.query(APIRequest).filter(APIRequest.status_code >= 400).count()
|
||||
total_endpoints = db.query(APIEndpointStats).count()
|
||||
|
||||
logger.info(f"📈 Generated {total_requests} API requests")
|
||||
logger.info(f"❌ Generated {total_errors} error requests")
|
||||
logger.info(f"🔗 Generated stats for {total_endpoints} endpoints")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error generating test data: {str(e)}")
|
||||
db.rollback()
|
||||
return False
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def clear_test_data():
|
||||
"""Clear all test monitoring data."""
|
||||
logger.info("🗑️ Clearing test monitoring data...")
|
||||
|
||||
db = next(get_db())
|
||||
|
||||
try:
|
||||
# Clear all data
|
||||
db.execute(text("DELETE FROM api_requests"))
|
||||
db.execute(text("DELETE FROM api_endpoint_stats"))
|
||||
db.execute(text("DELETE FROM system_health"))
|
||||
db.execute(text("DELETE FROM cache_performance"))
|
||||
|
||||
db.commit()
|
||||
logger.info("✅ Test monitoring data cleared successfully!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error clearing test data: {str(e)}")
|
||||
db.rollback()
|
||||
return False
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Generate test monitoring data")
|
||||
parser.add_argument("--action", choices=["generate", "clear"], default="generate",
|
||||
help="Action to perform (generate or clear test data)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.action == "generate":
|
||||
success = generate_test_monitoring_data()
|
||||
if success:
|
||||
logger.info("🎉 Test data generation completed successfully!")
|
||||
else:
|
||||
logger.error("💥 Test data generation failed!")
|
||||
sys.exit(1)
|
||||
elif args.action == "clear":
|
||||
success = clear_test_data()
|
||||
if success:
|
||||
logger.info("🗑️ Test data cleared successfully!")
|
||||
else:
|
||||
logger.error("💥 Failed to clear test data!")
|
||||
sys.exit(1)
|
||||
304
backend/scripts/init_alpha_subscription_tiers.py
Normal file
304
backend/scripts/init_alpha_subscription_tiers.py
Normal file
@@ -0,0 +1,304 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Initialize Alpha Tester Subscription Tiers
|
||||
Creates subscription plans for alpha testing with appropriate limits.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from models.subscription_models import (
|
||||
SubscriptionPlan, SubscriptionTier, APIProviderPricing, APIProvider
|
||||
)
|
||||
from services.database import get_db_session
|
||||
from datetime import datetime
|
||||
from loguru import logger
|
||||
|
||||
def create_alpha_subscription_tiers():
|
||||
"""Create subscription tiers for alpha testers."""
|
||||
if os.getenv('ENABLE_ALPHA', 'false').lower() not in {'1','true','yes','on'}:
|
||||
logger.info("Alpha tier initialization is disabled (ENABLE_ALPHA is false)")
|
||||
return False
|
||||
|
||||
db = get_db_session()
|
||||
if not db:
|
||||
logger.error("❌ Could not get database session")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Define alpha subscription tiers
|
||||
alpha_tiers = [
|
||||
{
|
||||
"name": "Free Alpha",
|
||||
"tier": SubscriptionTier.FREE,
|
||||
"price_monthly": 0.0,
|
||||
"price_yearly": 0.0,
|
||||
"description": "Free tier for alpha testing - Limited usage",
|
||||
"features": ["blog_writer", "basic_seo", "content_planning"],
|
||||
"limits": {
|
||||
"gemini_calls_limit": 50, # 50 calls per day
|
||||
"gemini_tokens_limit": 10000, # 10k tokens per day
|
||||
"tavily_calls_limit": 20, # 20 searches per day
|
||||
"serper_calls_limit": 10, # 10 SEO searches per day
|
||||
"stability_calls_limit": 5, # 5 images per day
|
||||
"monthly_cost_limit": 5.0 # $5 monthly limit
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Basic Alpha",
|
||||
"tier": SubscriptionTier.BASIC,
|
||||
"price_monthly": 29.0,
|
||||
"price_yearly": 290.0,
|
||||
"description": "Basic alpha tier - Moderate usage for testing",
|
||||
"features": ["blog_writer", "seo_analysis", "content_planning", "strategy_copilot"],
|
||||
"limits": {
|
||||
"gemini_calls_limit": 200, # 200 calls per day
|
||||
"gemini_tokens_limit": 50000, # 50k tokens per day
|
||||
"tavily_calls_limit": 100, # 100 searches per day
|
||||
"serper_calls_limit": 50, # 50 SEO searches per day
|
||||
"stability_calls_limit": 25, # 25 images per day
|
||||
"monthly_cost_limit": 25.0 # $25 monthly limit
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Pro Alpha",
|
||||
"tier": SubscriptionTier.PRO,
|
||||
"price_monthly": 99.0,
|
||||
"price_yearly": 990.0,
|
||||
"description": "Pro alpha tier - High usage for power users",
|
||||
"features": ["blog_writer", "seo_analysis", "content_planning", "strategy_copilot", "advanced_analytics"],
|
||||
"limits": {
|
||||
"gemini_calls_limit": 500, # 500 calls per day
|
||||
"gemini_tokens_limit": 150000, # 150k tokens per day
|
||||
"tavily_calls_limit": 300, # 300 searches per day
|
||||
"serper_calls_limit": 150, # 150 SEO searches per day
|
||||
"stability_calls_limit": 100, # 100 images per day
|
||||
"monthly_cost_limit": 100.0 # $100 monthly limit
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Enterprise Alpha",
|
||||
"tier": SubscriptionTier.ENTERPRISE,
|
||||
"price_monthly": 299.0,
|
||||
"price_yearly": 2990.0,
|
||||
"description": "Enterprise alpha tier - Unlimited usage for enterprise testing",
|
||||
"features": ["blog_writer", "seo_analysis", "content_planning", "strategy_copilot", "advanced_analytics", "custom_integrations"],
|
||||
"limits": {
|
||||
"gemini_calls_limit": 0, # Unlimited calls
|
||||
"gemini_tokens_limit": 0, # Unlimited tokens
|
||||
"tavily_calls_limit": 0, # Unlimited searches
|
||||
"serper_calls_limit": 0, # Unlimited SEO searches
|
||||
"stability_calls_limit": 0, # Unlimited images
|
||||
"monthly_cost_limit": 500.0 # $500 monthly limit
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
# Create subscription plans
|
||||
for tier_data in alpha_tiers:
|
||||
# Check if plan already exists
|
||||
existing_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.name == tier_data["name"]
|
||||
).first()
|
||||
|
||||
if existing_plan:
|
||||
logger.info(f"✅ Plan '{tier_data['name']}' already exists, updating...")
|
||||
# Update existing plan
|
||||
for key, value in tier_data["limits"].items():
|
||||
setattr(existing_plan, key, value)
|
||||
existing_plan.description = tier_data["description"]
|
||||
existing_plan.features = tier_data["features"]
|
||||
existing_plan.updated_at = datetime.utcnow()
|
||||
else:
|
||||
logger.info(f"🆕 Creating new plan: {tier_data['name']}")
|
||||
# Create new plan
|
||||
plan = SubscriptionPlan(
|
||||
name=tier_data["name"],
|
||||
tier=tier_data["tier"],
|
||||
price_monthly=tier_data["price_monthly"],
|
||||
price_yearly=tier_data["price_yearly"],
|
||||
description=tier_data["description"],
|
||||
features=tier_data["features"],
|
||||
**tier_data["limits"]
|
||||
)
|
||||
db.add(plan)
|
||||
|
||||
db.commit()
|
||||
logger.info("✅ Alpha subscription tiers created/updated successfully!")
|
||||
|
||||
# Create API provider pricing
|
||||
create_api_pricing(db)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating alpha subscription tiers: {e}")
|
||||
db.rollback()
|
||||
return False
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def create_api_pricing(db: Session):
|
||||
"""Create API provider pricing configuration."""
|
||||
|
||||
try:
|
||||
# Gemini pricing (based on current Google AI pricing)
|
||||
gemini_pricing = [
|
||||
{
|
||||
"model_name": "gemini-2.0-flash-exp",
|
||||
"cost_per_input_token": 0.00000075, # $0.75 per 1M tokens
|
||||
"cost_per_output_token": 0.000003, # $3 per 1M tokens
|
||||
"description": "Gemini 2.0 Flash Experimental"
|
||||
},
|
||||
{
|
||||
"model_name": "gemini-1.5-flash",
|
||||
"cost_per_input_token": 0.00000075, # $0.75 per 1M tokens
|
||||
"cost_per_output_token": 0.000003, # $3 per 1M tokens
|
||||
"description": "Gemini 1.5 Flash"
|
||||
},
|
||||
{
|
||||
"model_name": "gemini-1.5-pro",
|
||||
"cost_per_input_token": 0.00000125, # $1.25 per 1M tokens
|
||||
"cost_per_output_token": 0.000005, # $5 per 1M tokens
|
||||
"description": "Gemini 1.5 Pro"
|
||||
}
|
||||
]
|
||||
|
||||
# Tavily pricing
|
||||
tavily_pricing = [
|
||||
{
|
||||
"model_name": "search",
|
||||
"cost_per_search": 0.001, # $0.001 per search
|
||||
"description": "Tavily Search API"
|
||||
}
|
||||
]
|
||||
|
||||
# Serper pricing
|
||||
serper_pricing = [
|
||||
{
|
||||
"model_name": "search",
|
||||
"cost_per_search": 0.001, # $0.001 per search
|
||||
"description": "Serper Google Search API"
|
||||
}
|
||||
]
|
||||
|
||||
# Stability AI pricing
|
||||
stability_pricing = [
|
||||
{
|
||||
"model_name": "stable-diffusion-xl",
|
||||
"cost_per_image": 0.01, # $0.01 per image
|
||||
"description": "Stable Diffusion XL"
|
||||
}
|
||||
]
|
||||
|
||||
# Create pricing records
|
||||
pricing_configs = [
|
||||
(APIProvider.GEMINI, gemini_pricing),
|
||||
(APIProvider.TAVILY, tavily_pricing),
|
||||
(APIProvider.SERPER, serper_pricing),
|
||||
(APIProvider.STABILITY, stability_pricing)
|
||||
]
|
||||
|
||||
for provider, pricing_list in pricing_configs:
|
||||
for pricing_data in pricing_list:
|
||||
# Check if pricing already exists
|
||||
existing_pricing = db.query(APIProviderPricing).filter(
|
||||
APIProviderPricing.provider == provider,
|
||||
APIProviderPricing.model_name == pricing_data["model_name"]
|
||||
).first()
|
||||
|
||||
if existing_pricing:
|
||||
logger.info(f"✅ Pricing for {provider.value}/{pricing_data['model_name']} already exists")
|
||||
else:
|
||||
logger.info(f"🆕 Creating pricing for {provider.value}/{pricing_data['model_name']}")
|
||||
pricing = APIProviderPricing(
|
||||
provider=provider,
|
||||
**pricing_data
|
||||
)
|
||||
db.add(pricing)
|
||||
|
||||
db.commit()
|
||||
logger.info("✅ API provider pricing created successfully!")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating API pricing: {e}")
|
||||
db.rollback()
|
||||
|
||||
def assign_default_plan_to_users():
|
||||
"""Assign Free Alpha plan to all existing users."""
|
||||
if os.getenv('ENABLE_ALPHA', 'false').lower() not in {'1','true','yes','on'}:
|
||||
logger.info("Alpha default plan assignment is disabled (ENABLE_ALPHA is false)")
|
||||
return False
|
||||
|
||||
db = get_db_session()
|
||||
if not db:
|
||||
logger.error("❌ Could not get database session")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Get Free Alpha plan
|
||||
free_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.name == "Free Alpha"
|
||||
).first()
|
||||
|
||||
if not free_plan:
|
||||
logger.error("❌ Free Alpha plan not found")
|
||||
return False
|
||||
|
||||
# For now, we'll create a default user subscription
|
||||
# In a real system, you'd query actual users
|
||||
from models.subscription_models import UserSubscription, BillingCycle, UsageStatus
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Create default user subscription for testing
|
||||
default_user_id = "default_user"
|
||||
existing_subscription = db.query(UserSubscription).filter(
|
||||
UserSubscription.user_id == default_user_id
|
||||
).first()
|
||||
|
||||
if not existing_subscription:
|
||||
logger.info(f"🆕 Creating default subscription for {default_user_id}")
|
||||
subscription = UserSubscription(
|
||||
user_id=default_user_id,
|
||||
plan_id=free_plan.id,
|
||||
billing_cycle=BillingCycle.MONTHLY,
|
||||
current_period_start=datetime.utcnow(),
|
||||
current_period_end=datetime.utcnow() + timedelta(days=30),
|
||||
status=UsageStatus.ACTIVE,
|
||||
is_active=True,
|
||||
auto_renew=True
|
||||
)
|
||||
db.add(subscription)
|
||||
db.commit()
|
||||
logger.info(f"✅ Default subscription created for {default_user_id}")
|
||||
else:
|
||||
logger.info(f"✅ Default subscription already exists for {default_user_id}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error assigning default plan: {e}")
|
||||
db.rollback()
|
||||
return False
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Initializing Alpha Subscription Tiers...")
|
||||
|
||||
success = create_alpha_subscription_tiers()
|
||||
if success:
|
||||
logger.info("✅ Subscription tiers created successfully!")
|
||||
|
||||
# Assign default plan
|
||||
assign_success = assign_default_plan_to_users()
|
||||
if assign_success:
|
||||
logger.info("✅ Default plan assigned successfully!")
|
||||
else:
|
||||
logger.error("❌ Failed to assign default plan")
|
||||
else:
|
||||
logger.error("❌ Failed to create subscription tiers")
|
||||
|
||||
logger.info("🎉 Alpha subscription system initialization complete!")
|
||||
265
backend/scripts/init_stability_service.py
Normal file
265
backend/scripts/init_stability_service.py
Normal file
@@ -0,0 +1,265 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Initialization script for Stability AI service."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
# Add backend directory to path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from services.stability_service import StabilityAIService
|
||||
from config.stability_config import get_stability_config
|
||||
from loguru import logger
|
||||
|
||||
|
||||
async def test_stability_connection():
|
||||
"""Test connection to Stability AI API."""
|
||||
try:
|
||||
print("🔧 Initializing Stability AI service...")
|
||||
|
||||
# Get configuration
|
||||
config = get_stability_config()
|
||||
print(f"✅ Configuration loaded")
|
||||
print(f" - API Key: {config.api_key[:8]}..." if config.api_key else " - API Key: Not set")
|
||||
print(f" - Base URL: {config.base_url}")
|
||||
print(f" - Timeout: {config.timeout}s")
|
||||
|
||||
# Initialize service
|
||||
service = StabilityAIService(api_key=config.api_key)
|
||||
print("✅ Service initialized")
|
||||
|
||||
# Test API connection
|
||||
print("\n🌐 Testing API connection...")
|
||||
|
||||
async with service:
|
||||
# Test account endpoint
|
||||
try:
|
||||
account_info = await service.get_account_details()
|
||||
print("✅ Account API test successful")
|
||||
print(f" - Account ID: {account_info.get('id', 'Unknown')}")
|
||||
print(f" - Email: {account_info.get('email', 'Unknown')}")
|
||||
|
||||
# Get balance
|
||||
balance_info = await service.get_account_balance()
|
||||
credits = balance_info.get('credits', 0)
|
||||
print(f" - Credits: {credits}")
|
||||
|
||||
if credits < 10:
|
||||
print("⚠️ Warning: Low credit balance")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Account API test failed: {str(e)}")
|
||||
return False
|
||||
|
||||
# Test engines endpoint
|
||||
try:
|
||||
engines = await service.list_engines()
|
||||
print("✅ Engines API test successful")
|
||||
print(f" - Available engines: {len(engines)}")
|
||||
|
||||
# List some engines
|
||||
for engine in engines[:3]:
|
||||
print(f" - {engine.get('name', 'Unknown')}: {engine.get('id', 'Unknown')}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Engines API test failed: {str(e)}")
|
||||
return False
|
||||
|
||||
print("\n🎉 Stability AI service initialization completed successfully!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Initialization failed: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
async def validate_service_setup():
|
||||
"""Validate complete service setup."""
|
||||
print("\n🔍 Validating service setup...")
|
||||
|
||||
validation_results = {
|
||||
"api_key": False,
|
||||
"dependencies": False,
|
||||
"file_permissions": False,
|
||||
"network_access": False
|
||||
}
|
||||
|
||||
# Check API key
|
||||
api_key = os.getenv("STABILITY_API_KEY")
|
||||
if api_key and api_key.startswith("sk-"):
|
||||
validation_results["api_key"] = True
|
||||
print("✅ API key format valid")
|
||||
else:
|
||||
print("❌ Invalid or missing API key")
|
||||
|
||||
# Check dependencies
|
||||
try:
|
||||
import aiohttp
|
||||
import PIL
|
||||
from pydantic import BaseModel
|
||||
validation_results["dependencies"] = True
|
||||
print("✅ Required dependencies available")
|
||||
except ImportError as e:
|
||||
print(f"❌ Missing dependency: {e}")
|
||||
|
||||
# Check file permissions
|
||||
try:
|
||||
test_dir = backend_dir / "temp_test"
|
||||
test_dir.mkdir(exist_ok=True)
|
||||
test_file = test_dir / "test.txt"
|
||||
test_file.write_text("test")
|
||||
test_file.unlink()
|
||||
test_dir.rmdir()
|
||||
validation_results["file_permissions"] = True
|
||||
print("✅ File system permissions OK")
|
||||
except Exception as e:
|
||||
print(f"❌ File permission error: {e}")
|
||||
|
||||
# Check network access
|
||||
try:
|
||||
import aiohttp
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get("https://api.stability.ai", timeout=aiohttp.ClientTimeout(total=10)) as response:
|
||||
validation_results["network_access"] = True
|
||||
print("✅ Network access to Stability AI API OK")
|
||||
except Exception as e:
|
||||
print(f"❌ Network access error: {e}")
|
||||
|
||||
# Summary
|
||||
passed = sum(validation_results.values())
|
||||
total = len(validation_results)
|
||||
|
||||
print(f"\n📊 Validation Summary: {passed}/{total} checks passed")
|
||||
|
||||
if passed == total:
|
||||
print("🎉 All validations passed! Service is ready to use.")
|
||||
else:
|
||||
print("⚠️ Some validations failed. Please address the issues above.")
|
||||
|
||||
return passed == total
|
||||
|
||||
|
||||
def setup_environment():
|
||||
"""Set up environment for Stability AI service."""
|
||||
print("🔧 Setting up environment...")
|
||||
|
||||
# Create necessary directories
|
||||
directories = [
|
||||
backend_dir / "generated_content",
|
||||
backend_dir / "generated_content" / "images",
|
||||
backend_dir / "generated_content" / "audio",
|
||||
backend_dir / "generated_content" / "3d_models",
|
||||
backend_dir / "logs",
|
||||
backend_dir / "cache"
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
print(f"✅ Created directory: {directory}")
|
||||
|
||||
# Copy example environment file if .env doesn't exist
|
||||
env_file = backend_dir / ".env"
|
||||
example_env = backend_dir / ".env.stability.example"
|
||||
|
||||
if not env_file.exists() and example_env.exists():
|
||||
import shutil
|
||||
shutil.copy(example_env, env_file)
|
||||
print("✅ Created .env file from example")
|
||||
print("⚠️ Please edit .env file and add your Stability AI API key")
|
||||
|
||||
print("✅ Environment setup completed")
|
||||
|
||||
|
||||
def print_usage_examples():
|
||||
"""Print usage examples."""
|
||||
print("\n📚 Usage Examples:")
|
||||
print("\n1. Generate an image:")
|
||||
print("""
|
||||
curl -X POST "http://localhost:8000/api/stability/generate/ultra" \\
|
||||
-F "prompt=A majestic mountain landscape at sunset" \\
|
||||
-F "aspect_ratio=16:9" \\
|
||||
-F "style_preset=photographic" \\
|
||||
-o generated_image.png
|
||||
""")
|
||||
|
||||
print("2. Upscale an image:")
|
||||
print("""
|
||||
curl -X POST "http://localhost:8000/api/stability/upscale/fast" \\
|
||||
-F "image=@input_image.png" \\
|
||||
-o upscaled_image.png
|
||||
""")
|
||||
|
||||
print("3. Edit an image with inpainting:")
|
||||
print("""
|
||||
curl -X POST "http://localhost:8000/api/stability/edit/inpaint" \\
|
||||
-F "image=@input_image.png" \\
|
||||
-F "mask=@mask_image.png" \\
|
||||
-F "prompt=a beautiful garden" \\
|
||||
-o edited_image.png
|
||||
""")
|
||||
|
||||
print("4. Generate 3D model:")
|
||||
print("""
|
||||
curl -X POST "http://localhost:8000/api/stability/3d/stable-fast-3d" \\
|
||||
-F "image=@object_image.png" \\
|
||||
-o model.glb
|
||||
""")
|
||||
|
||||
print("5. Generate audio:")
|
||||
print("""
|
||||
curl -X POST "http://localhost:8000/api/stability/audio/text-to-audio" \\
|
||||
-F "prompt=Peaceful piano music with nature sounds" \\
|
||||
-F "duration=60" \\
|
||||
-o generated_audio.mp3
|
||||
""")
|
||||
|
||||
|
||||
def main():
|
||||
"""Main initialization function."""
|
||||
print("🚀 Stability AI Service Initialization")
|
||||
print("=" * 50)
|
||||
|
||||
# Setup environment
|
||||
setup_environment()
|
||||
|
||||
# Load environment variables
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
|
||||
# Run async validation
|
||||
async def run_validation():
|
||||
# Test connection
|
||||
connection_ok = await test_stability_connection()
|
||||
|
||||
# Validate setup
|
||||
setup_ok = await validate_service_setup()
|
||||
|
||||
return connection_ok and setup_ok
|
||||
|
||||
# Run validation
|
||||
success = asyncio.run(run_validation())
|
||||
|
||||
if success:
|
||||
print("\n🎉 Initialization completed successfully!")
|
||||
print("\n📋 Next steps:")
|
||||
print("1. Start the FastAPI server: python app.py")
|
||||
print("2. Visit http://localhost:8000/docs for API documentation")
|
||||
print("3. Test the endpoints using the examples below")
|
||||
|
||||
print_usage_examples()
|
||||
else:
|
||||
print("\n❌ Initialization failed!")
|
||||
print("\n🔧 Troubleshooting steps:")
|
||||
print("1. Check your STABILITY_API_KEY in .env file")
|
||||
print("2. Verify network connectivity to api.stability.ai")
|
||||
print("3. Ensure all dependencies are installed: pip install -r requirements.txt")
|
||||
print("4. Check account balance at https://platform.stability.ai/account")
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
141
backend/scripts/migrate_all_tables_to_string.py
Normal file
141
backend/scripts/migrate_all_tables_to_string.py
Normal file
@@ -0,0 +1,141 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from loguru import logger
|
||||
from sqlalchemy import text
|
||||
from services.database import SessionLocal, engine
|
||||
|
||||
# Import models to ensure they are registered and we can recreate them
|
||||
from models.content_planning import (
|
||||
ContentStrategy, ContentGapAnalysis, ContentRecommendation, AIAnalysisResult,
|
||||
Base as ContentPlanningBase
|
||||
)
|
||||
from models.enhanced_calendar_models import (
|
||||
ContentCalendarTemplate, AICalendarRecommendation, ContentPerformanceTracking,
|
||||
ContentTrendAnalysis, ContentOptimization, CalendarGenerationSession,
|
||||
Base as EnhancedCalendarBase
|
||||
)
|
||||
|
||||
def migrate_table(db, table_name, base_metadata):
|
||||
"""Migrate user_id column for a specific table from INTEGER to VARCHAR(255)."""
|
||||
try:
|
||||
logger.info(f"Checking table: {table_name}")
|
||||
|
||||
# Check if table exists
|
||||
check_table_query = f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table_name}';"
|
||||
result = db.execute(text(check_table_query))
|
||||
if not result.scalar():
|
||||
logger.warning(f"Table '{table_name}' does not exist. Skipping check, but will try to create it.")
|
||||
# If it doesn't exist, we can just create it with the new schema
|
||||
try:
|
||||
base_metadata.create_all(bind=engine, tables=[base_metadata.tables[table_name]], checkfirst=True)
|
||||
logger.success(f"✅ Created {table_name} with new schema")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create {table_name}: {e}")
|
||||
return True
|
||||
|
||||
# Check current column type
|
||||
check_column_query = f"SELECT type FROM pragma_table_info('{table_name}') WHERE name = 'user_id';"
|
||||
result = db.execute(text(check_column_query))
|
||||
current_type = result.scalar()
|
||||
|
||||
if not current_type:
|
||||
logger.info(f"Table {table_name} does not have user_id column. Skipping.")
|
||||
return True
|
||||
|
||||
if 'varchar' in current_type.lower() or 'text' in current_type.lower():
|
||||
logger.info(f"✅ {table_name}.user_id is already {current_type}. No migration needed.")
|
||||
return True
|
||||
|
||||
logger.info(f"Migrating {table_name}.user_id from {current_type} to VARCHAR...")
|
||||
|
||||
# Backup data
|
||||
backup_table = f"{table_name}_backup"
|
||||
db.execute(text(f"DROP TABLE IF EXISTS {backup_table}")) # Ensure clean state
|
||||
db.execute(text(f"CREATE TABLE {backup_table} AS SELECT * FROM {table_name}"))
|
||||
|
||||
# Drop old table
|
||||
db.execute(text(f"DROP TABLE {table_name}"))
|
||||
|
||||
# Recreate table
|
||||
# We need to find the Table object in metadata
|
||||
table_obj = base_metadata.tables.get(table_name)
|
||||
if table_obj is not None:
|
||||
base_metadata.create_all(bind=engine, tables=[table_obj], checkfirst=False)
|
||||
else:
|
||||
logger.error(f"Could not find Table object for {table_name} in metadata")
|
||||
# Restore backup and abort
|
||||
db.execute(text(f"ALTER TABLE {backup_table} RENAME TO {table_name}"))
|
||||
return False
|
||||
|
||||
# Restore data
|
||||
# We need to list columns to construct INSERT statement, excluding those that might be auto-generated if needed,
|
||||
# but usually for restore we want all.
|
||||
# However, we need to cast user_id to TEXT.
|
||||
|
||||
# Get columns from backup
|
||||
columns_result = db.execute(text(f"PRAGMA table_info({backup_table})"))
|
||||
columns = [row[1] for row in columns_result]
|
||||
|
||||
cols_str = ", ".join(columns)
|
||||
|
||||
# Construct select list with cast
|
||||
select_parts = []
|
||||
for col in columns:
|
||||
if col == 'user_id':
|
||||
select_parts.append("CAST(user_id AS TEXT)")
|
||||
else:
|
||||
select_parts.append(col)
|
||||
select_str = ", ".join(select_parts)
|
||||
|
||||
restore_query = f"INSERT INTO {table_name} ({cols_str}) SELECT {select_str} FROM {backup_table}"
|
||||
db.execute(text(restore_query))
|
||||
|
||||
# Drop backup
|
||||
db.execute(text(f"DROP TABLE {backup_table}"))
|
||||
|
||||
db.commit()
|
||||
logger.success(f"✅ Migrated {table_name} successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to migrate {table_name}: {e}")
|
||||
db.rollback()
|
||||
return False
|
||||
|
||||
def migrate_all():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
# Content Planning Tables
|
||||
cp_tables = [
|
||||
"content_strategies",
|
||||
"content_gap_analyses",
|
||||
"content_recommendations",
|
||||
"ai_analysis_results"
|
||||
]
|
||||
|
||||
for table in cp_tables:
|
||||
migrate_table(db, table, ContentPlanningBase.metadata)
|
||||
|
||||
# Enhanced Calendar Tables
|
||||
ec_tables = [
|
||||
"content_calendar_templates",
|
||||
"ai_calendar_recommendations",
|
||||
"content_performance_tracking",
|
||||
"content_trend_analysis",
|
||||
"content_optimizations",
|
||||
"calendar_generation_sessions"
|
||||
]
|
||||
|
||||
for table in ec_tables:
|
||||
migrate_table(db, table, EnhancedCalendarBase.metadata)
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("Starting comprehensive user_id migration...")
|
||||
migrate_all()
|
||||
logger.info("Migration finished.")
|
||||
129
backend/scripts/migrate_user_id_to_string.py
Normal file
129
backend/scripts/migrate_user_id_to_string.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""
|
||||
Migration Script: Update onboarding_sessions.user_id from INTEGER to STRING
|
||||
This script updates the database schema to support Clerk user IDs (strings)
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from loguru import logger
|
||||
from sqlalchemy import text
|
||||
from services.database import SessionLocal, engine
|
||||
|
||||
def migrate_user_id_column():
|
||||
"""Migrate user_id column from INTEGER to VARCHAR(255)."""
|
||||
try:
|
||||
db = SessionLocal()
|
||||
|
||||
logger.info("Starting migration: user_id INTEGER -> VARCHAR(255)")
|
||||
|
||||
# Check if table exists (SQLite compatible)
|
||||
check_table_query = """
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='onboarding_sessions';
|
||||
"""
|
||||
|
||||
result = db.execute(text(check_table_query))
|
||||
table_exists = result.scalar()
|
||||
|
||||
if not table_exists:
|
||||
logger.warning("Table 'onboarding_sessions' does not exist. Creating it instead.")
|
||||
# Create tables using the updated models
|
||||
from models.onboarding import Base
|
||||
Base.metadata.create_all(bind=engine, checkfirst=True)
|
||||
logger.success("✅ Created onboarding_sessions table with VARCHAR user_id")
|
||||
return True
|
||||
|
||||
# Check current column type (SQLite compatible)
|
||||
check_column_query = """
|
||||
SELECT type FROM pragma_table_info('onboarding_sessions')
|
||||
WHERE name = 'user_id';
|
||||
"""
|
||||
|
||||
result = db.execute(text(check_column_query))
|
||||
current_type = result.scalar()
|
||||
|
||||
if current_type and 'varchar' in current_type.lower():
|
||||
logger.info(f"✅ Column user_id is already VARCHAR ({current_type}). No migration needed.")
|
||||
return True
|
||||
|
||||
logger.info(f"Current user_id type: {current_type}")
|
||||
|
||||
# Backup existing data count
|
||||
count_query = "SELECT COUNT(*) FROM onboarding_sessions;"
|
||||
result = db.execute(text(count_query))
|
||||
record_count = result.scalar()
|
||||
logger.info(f"Found {record_count} existing records")
|
||||
|
||||
if record_count > 0:
|
||||
logger.warning("⚠️ Found existing records. Backing up data...")
|
||||
# You may want to add backup logic here if needed
|
||||
|
||||
# SQLite doesn't support ALTER COLUMN TYPE directly
|
||||
# We need to recreate the table
|
||||
logger.info("Recreating table with VARCHAR user_id (SQLite limitation)...")
|
||||
|
||||
# Backup data
|
||||
logger.info("Backing up existing data...")
|
||||
backup_query = """
|
||||
CREATE TABLE onboarding_sessions_backup AS
|
||||
SELECT * FROM onboarding_sessions;
|
||||
"""
|
||||
db.execute(text(backup_query))
|
||||
db.commit()
|
||||
|
||||
# Drop old table
|
||||
logger.info("Dropping old table...")
|
||||
db.execute(text("DROP TABLE onboarding_sessions;"))
|
||||
db.commit()
|
||||
|
||||
# Recreate table with correct schema
|
||||
logger.info("Creating new table with VARCHAR user_id...")
|
||||
from models.onboarding import Base
|
||||
Base.metadata.create_all(bind=engine, tables=[Base.metadata.tables['onboarding_sessions']], checkfirst=False)
|
||||
db.commit()
|
||||
|
||||
# Restore data (converting integers to strings)
|
||||
logger.info("Restoring data...")
|
||||
restore_query = """
|
||||
INSERT INTO onboarding_sessions (id, user_id, current_step, progress, started_at, updated_at)
|
||||
SELECT id, CAST(user_id AS TEXT), current_step, progress, started_at, updated_at
|
||||
FROM onboarding_sessions_backup;
|
||||
"""
|
||||
db.execute(text(restore_query))
|
||||
db.commit()
|
||||
|
||||
# Drop backup table
|
||||
logger.info("Cleaning up backup table...")
|
||||
db.execute(text("DROP TABLE onboarding_sessions_backup;"))
|
||||
db.commit()
|
||||
|
||||
logger.success("✅ Table recreated successfully")
|
||||
|
||||
logger.success("🎉 Migration completed successfully!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Migration failed: {e}")
|
||||
if db:
|
||||
db.rollback()
|
||||
return False
|
||||
finally:
|
||||
if db:
|
||||
db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("="*60)
|
||||
logger.info("DATABASE MIGRATION: user_id INTEGER -> VARCHAR(255)")
|
||||
logger.info("="*60)
|
||||
|
||||
success = migrate_user_id_column()
|
||||
|
||||
if success:
|
||||
logger.success("\n✅ Migration completed successfully!")
|
||||
logger.info("The onboarding system now supports Clerk user IDs (strings)")
|
||||
else:
|
||||
logger.error("\n❌ Migration failed. Please check the logs above.")
|
||||
sys.exit(1)
|
||||
|
||||
168
backend/scripts/reset_basic_plan_usage.py
Normal file
168
backend/scripts/reset_basic_plan_usage.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Quick script to reset usage counters for Basic plan users.
|
||||
|
||||
This fixes the issue where plan limits were updated but old usage data remained.
|
||||
Resets all usage counters (calls, tokens, images) to 0 for the current billing period.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
|
||||
from models.subscription_models import SubscriptionPlan, SubscriptionTier, UserSubscription, UsageSummary, UsageStatus
|
||||
from services.database import DATABASE_URL
|
||||
from services.subscription import PricingService
|
||||
|
||||
def reset_basic_plan_usage():
|
||||
"""Reset usage counters for all Basic plan users."""
|
||||
|
||||
try:
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Find Basic plan
|
||||
basic_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == SubscriptionTier.BASIC
|
||||
).first()
|
||||
|
||||
if not basic_plan:
|
||||
logger.error("❌ Basic plan not found in database!")
|
||||
return False
|
||||
|
||||
# Get all Basic plan users
|
||||
user_subscriptions = db.query(UserSubscription).filter(
|
||||
UserSubscription.plan_id == basic_plan.id,
|
||||
UserSubscription.is_active == True
|
||||
).all()
|
||||
|
||||
logger.info(f"Found {len(user_subscriptions)} Basic plan user(s)")
|
||||
|
||||
pricing_service = PricingService(db)
|
||||
reset_count = 0
|
||||
|
||||
for sub in user_subscriptions:
|
||||
try:
|
||||
# Get current billing period for this user
|
||||
current_period = pricing_service.get_current_billing_period(sub.user_id) or datetime.now(timezone.utc).strftime("%Y-%m")
|
||||
|
||||
# Find usage summary for current period
|
||||
usage_summary = db.query(UsageSummary).filter(
|
||||
UsageSummary.user_id == sub.user_id,
|
||||
UsageSummary.billing_period == current_period
|
||||
).first()
|
||||
|
||||
if usage_summary:
|
||||
# Store old values for logging
|
||||
old_gemini = usage_summary.gemini_calls or 0
|
||||
old_mistral = usage_summary.mistral_calls or 0
|
||||
old_tokens = (usage_summary.mistral_tokens or 0) + (usage_summary.gemini_tokens or 0)
|
||||
old_images = usage_summary.stability_calls or 0
|
||||
old_total_calls = usage_summary.total_calls or 0
|
||||
old_total_tokens = usage_summary.total_tokens or 0
|
||||
|
||||
# Reset all LLM provider counters
|
||||
usage_summary.gemini_calls = 0
|
||||
usage_summary.openai_calls = 0
|
||||
usage_summary.anthropic_calls = 0
|
||||
usage_summary.mistral_calls = 0
|
||||
|
||||
# Reset all token counters
|
||||
usage_summary.gemini_tokens = 0
|
||||
usage_summary.openai_tokens = 0
|
||||
usage_summary.anthropic_tokens = 0
|
||||
usage_summary.mistral_tokens = 0
|
||||
|
||||
# Reset image counter
|
||||
usage_summary.stability_calls = 0
|
||||
|
||||
# Reset totals
|
||||
usage_summary.total_calls = 0
|
||||
usage_summary.total_tokens = 0
|
||||
usage_summary.total_cost = 0.0
|
||||
|
||||
# Reset status to active
|
||||
usage_summary.usage_status = UsageStatus.ACTIVE
|
||||
usage_summary.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
db.commit()
|
||||
reset_count += 1
|
||||
|
||||
logger.info(f"\n✅ Reset usage for user {sub.user_id} (period {current_period}):")
|
||||
logger.info(f" Calls: {old_gemini + old_mistral} (gemini: {old_gemini}, mistral: {old_mistral}) → 0")
|
||||
logger.info(f" Tokens: {old_tokens} → 0")
|
||||
logger.info(f" Images: {old_images} → 0")
|
||||
logger.info(f" Total Calls: {old_total_calls} → 0")
|
||||
logger.info(f" Total Tokens: {old_total_tokens} → 0")
|
||||
else:
|
||||
logger.info(f" ℹ️ No usage summary found for user {sub.user_id} (period {current_period}) - nothing to reset")
|
||||
|
||||
except Exception as reset_error:
|
||||
logger.error(f" ❌ Error resetting usage for user {sub.user_id}: {reset_error}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
db.rollback()
|
||||
|
||||
if reset_count > 0:
|
||||
logger.info(f"\n✅ Successfully reset usage counters for {reset_count} user(s)")
|
||||
else:
|
||||
logger.info("\nℹ️ No usage counters to reset")
|
||||
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("RESET COMPLETE")
|
||||
logger.info("="*60)
|
||||
logger.info("\n💡 Usage counters have been reset. Users can now use their new limits.")
|
||||
logger.info(" Next API call will start counting from 0.")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"❌ Error resetting usage: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to connect to database: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting Basic plan usage counter reset...")
|
||||
logger.info("="*60)
|
||||
logger.info("This will reset all usage counters (calls, tokens, images) to 0")
|
||||
logger.info("for all Basic plan users in their current billing period.")
|
||||
logger.info("="*60)
|
||||
|
||||
try:
|
||||
success = reset_basic_plan_usage()
|
||||
|
||||
if success:
|
||||
logger.info("\n✅ Script completed successfully!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("\n❌ Script failed!")
|
||||
sys.exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("\n⚠️ Script cancelled by user")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.error(f"\n❌ Unexpected error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
100
backend/scripts/run_business_info_migration.py
Normal file
100
backend/scripts/run_business_info_migration.py
Normal file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to create the user_business_info table.
|
||||
This script should be run once to set up the database schema.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
def run_migration():
|
||||
"""Run the business info table migration."""
|
||||
try:
|
||||
# Get the database path
|
||||
db_path = backend_dir / "alwrity.db"
|
||||
|
||||
logger.info(f"🔄 Starting business info table migration...")
|
||||
logger.info(f"📁 Database path: {db_path}")
|
||||
|
||||
# Check if database exists
|
||||
if not db_path.exists():
|
||||
logger.warning(f"⚠️ Database file not found at {db_path}")
|
||||
logger.info("📝 Creating new database file...")
|
||||
|
||||
# Read the migration SQL
|
||||
migration_file = backend_dir / "database" / "migrations" / "add_business_info_table.sql"
|
||||
|
||||
if not migration_file.exists():
|
||||
logger.error(f"❌ Migration file not found: {migration_file}")
|
||||
return False
|
||||
|
||||
with open(migration_file, 'r') as f:
|
||||
migration_sql = f.read()
|
||||
|
||||
logger.info("📋 Migration SQL loaded successfully")
|
||||
|
||||
# Connect to database and run migration
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if table already exists
|
||||
cursor.execute("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='user_business_info'
|
||||
""")
|
||||
|
||||
if cursor.fetchone():
|
||||
logger.info("ℹ️ Table 'user_business_info' already exists, skipping migration")
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
# Execute the migration
|
||||
cursor.executescript(migration_sql)
|
||||
conn.commit()
|
||||
|
||||
# Verify the table was created
|
||||
cursor.execute("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='user_business_info'
|
||||
""")
|
||||
|
||||
if cursor.fetchone():
|
||||
logger.success("✅ Migration completed successfully!")
|
||||
logger.info("📊 Table 'user_business_info' created with the following structure:")
|
||||
|
||||
# Show table structure
|
||||
cursor.execute("PRAGMA table_info(user_business_info)")
|
||||
columns = cursor.fetchall()
|
||||
|
||||
for col in columns:
|
||||
logger.info(f" - {col[1]} ({col[2]}) {'NOT NULL' if col[3] else 'NULL'}")
|
||||
|
||||
conn.close()
|
||||
return True
|
||||
else:
|
||||
logger.error("❌ Migration failed - table was not created")
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Migration failed with error: {str(e)}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting ALwrity Business Info Migration")
|
||||
|
||||
success = run_migration()
|
||||
|
||||
if success:
|
||||
logger.success("🎉 Migration completed successfully!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("💥 Migration failed!")
|
||||
sys.exit(1)
|
||||
35
backend/scripts/run_cumulative_stats_migration.py
Normal file
35
backend/scripts/run_cumulative_stats_migration.py
Normal file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to run the cumulative stats migration.
|
||||
This creates the scheduler_cumulative_stats table.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Get the database path
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
backend_dir = os.path.dirname(script_dir)
|
||||
db_path = os.path.join(backend_dir, 'alwrity.db')
|
||||
migration_path = os.path.join(backend_dir, 'database', 'migrations', 'create_scheduler_cumulative_stats.sql')
|
||||
|
||||
if not os.path.exists(db_path):
|
||||
print(f"❌ Database not found at {db_path}")
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.exists(migration_path):
|
||||
print(f"❌ Migration file not found at {migration_path}")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
with open(migration_path, 'r') as f:
|
||||
conn.executescript(f.read())
|
||||
conn.commit()
|
||||
print("✅ Migration executed successfully")
|
||||
conn.close()
|
||||
except Exception as e:
|
||||
print(f"❌ Error running migration: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
85
backend/scripts/run_failure_tracking_migration.py
Normal file
85
backend/scripts/run_failure_tracking_migration.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Script to run the failure tracking migration.
|
||||
Adds consecutive_failures and failure_pattern columns to task tables.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add parent directory to path to import migration
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
def run_migration():
|
||||
"""Run the failure tracking migration."""
|
||||
# Get database path
|
||||
db_path = os.getenv('DATABASE_URL', 'sqlite:///alwrity.db')
|
||||
|
||||
# Extract path from SQLite URL if needed
|
||||
if db_path.startswith('sqlite:///'):
|
||||
db_path = db_path.replace('sqlite:///', '')
|
||||
|
||||
if not os.path.exists(db_path):
|
||||
print(f"Database not found at {db_path}")
|
||||
return False
|
||||
|
||||
print(f"Running migration on database: {db_path}")
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Read migration SQL
|
||||
migration_file = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
'database',
|
||||
'migrations',
|
||||
'add_failure_tracking_to_tasks.sql'
|
||||
)
|
||||
|
||||
if not os.path.exists(migration_file):
|
||||
print(f"Migration file not found: {migration_file}")
|
||||
return False
|
||||
|
||||
with open(migration_file, 'r') as f:
|
||||
migration_sql = f.read()
|
||||
|
||||
# Execute migration (SQLite doesn't support multiple statements in execute, so split)
|
||||
statements = [s.strip() for s in migration_sql.split(';') if s.strip()]
|
||||
|
||||
for statement in statements:
|
||||
try:
|
||||
cursor.execute(statement)
|
||||
print(f"✓ Executed: {statement[:50]}...")
|
||||
except sqlite3.OperationalError as e:
|
||||
# Column might already exist - that's okay
|
||||
if 'duplicate column name' in str(e).lower() or 'already exists' in str(e).lower():
|
||||
print(f"⚠ Column already exists (skipping): {statement[:50]}...")
|
||||
else:
|
||||
raise
|
||||
|
||||
conn.commit()
|
||||
print("\n✅ Migration completed successfully!")
|
||||
|
||||
# Verify columns were added
|
||||
cursor.execute("PRAGMA table_info(oauth_token_monitoring_tasks)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
|
||||
if 'consecutive_failures' in columns and 'failure_pattern' in columns:
|
||||
print("✓ Verified: consecutive_failures and failure_pattern columns exist")
|
||||
else:
|
||||
print("⚠ Warning: Could not verify columns were added")
|
||||
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error running migration: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = run_migration()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
91
backend/scripts/run_final_video_url_migration.py
Normal file
91
backend/scripts/run_final_video_url_migration.py
Normal file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to add final_video_url column to podcast_projects table.
|
||||
This script should be run once to add the column to existing databases.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
# Add the backend directory to the Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
def run_migration():
|
||||
"""Run the final_video_url column migration."""
|
||||
try:
|
||||
# Get the database path
|
||||
db_path = backend_dir / "alwrity.db"
|
||||
|
||||
logger.info(f"🔄 Starting final_video_url column migration...")
|
||||
logger.info(f"📁 Database path: {db_path}")
|
||||
|
||||
# Check if database exists
|
||||
if not db_path.exists():
|
||||
logger.warning(f"⚠️ Database file not found at {db_path}")
|
||||
logger.info("ℹ️ New databases will have this column created automatically by SQLAlchemy")
|
||||
return True
|
||||
|
||||
# Read the migration SQL
|
||||
migration_file = backend_dir / "database" / "migrations" / "009_add_final_video_url_to_podcast_projects.sql"
|
||||
|
||||
if not migration_file.exists():
|
||||
logger.error(f"❌ Migration file not found: {migration_file}")
|
||||
return False
|
||||
|
||||
with open(migration_file, 'r') as f:
|
||||
migration_sql = f.read()
|
||||
|
||||
logger.info("📋 Migration SQL loaded successfully")
|
||||
|
||||
# Connect to database and run migration
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if column already exists
|
||||
cursor.execute("PRAGMA table_info(podcast_projects)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
|
||||
if 'final_video_url' in columns:
|
||||
logger.info("ℹ️ Column 'final_video_url' already exists, skipping migration")
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
# Execute the migration
|
||||
logger.info("🔧 Adding final_video_url column...")
|
||||
cursor.execute("ALTER TABLE podcast_projects ADD COLUMN final_video_url VARCHAR(1000) NULL")
|
||||
conn.commit()
|
||||
|
||||
# Verify the column was added
|
||||
cursor.execute("PRAGMA table_info(podcast_projects)")
|
||||
columns_after = [row[1] for row in cursor.fetchall()]
|
||||
|
||||
if 'final_video_url' in columns_after:
|
||||
logger.info("✅ Migration completed successfully! Column 'final_video_url' added to podcast_projects table")
|
||||
conn.close()
|
||||
return True
|
||||
else:
|
||||
logger.error("❌ Migration failed: Column was not added")
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
except sqlite3.OperationalError as e:
|
||||
if "duplicate column name" in str(e).lower():
|
||||
logger.info("ℹ️ Column 'final_video_url' already exists, skipping migration")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"❌ Database error: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error running migration: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = run_migration()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
165
backend/scripts/seed_seo_action_types.py
Normal file
165
backend/scripts/seed_seo_action_types.py
Normal file
@@ -0,0 +1,165 @@
|
||||
"""
|
||||
Seed the seo_action_types table with the canonical set of SEO actions.
|
||||
|
||||
Run (from backend/):
|
||||
python scripts/seed_seo_action_types.py
|
||||
"""
|
||||
|
||||
from typing import List, Dict
|
||||
from loguru import logger
|
||||
import sys, os
|
||||
|
||||
# Ensure backend/ is on sys.path when running as a script
|
||||
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
BACKEND_ROOT = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir))
|
||||
if BACKEND_ROOT not in sys.path:
|
||||
sys.path.insert(0, BACKEND_ROOT)
|
||||
|
||||
from services.database import init_database, get_db_session
|
||||
from models.seo_analysis import SEOActionType
|
||||
|
||||
|
||||
def get_actions() -> List[Dict]:
|
||||
return [
|
||||
{
|
||||
"code": "analyze_seo_comprehensive",
|
||||
"name": "Analyze SEO (Comprehensive)",
|
||||
"category": "analysis",
|
||||
"description": "Perform a comprehensive SEO analysis across technical, on-page, and performance.",
|
||||
},
|
||||
{
|
||||
"code": "generate_meta_descriptions",
|
||||
"name": "Generate Meta Descriptions",
|
||||
"category": "content",
|
||||
"description": "Generate optimized meta description suggestions for pages.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_page_speed",
|
||||
"name": "Analyze Page Speed",
|
||||
"category": "performance",
|
||||
"description": "Run page speed and Core Web Vitals checks for mobile/desktop.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_sitemap",
|
||||
"name": "Analyze Sitemap",
|
||||
"category": "discovery",
|
||||
"description": "Analyze sitemap structure, coverage, and publishing patterns.",
|
||||
},
|
||||
{
|
||||
"code": "generate_image_alt_text",
|
||||
"name": "Generate Image Alt Text",
|
||||
"category": "content",
|
||||
"description": "Propose SEO-friendly alt text for images.",
|
||||
},
|
||||
{
|
||||
"code": "generate_opengraph_tags",
|
||||
"name": "Generate OpenGraph Tags",
|
||||
"category": "content",
|
||||
"description": "Create OpenGraph/Twitter meta tags for better social previews.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_on_page_seo",
|
||||
"name": "Analyze On-Page SEO",
|
||||
"category": "on_page",
|
||||
"description": "Audit titles, headings, keyword usage, and internal links.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_technical_seo",
|
||||
"name": "Analyze Technical SEO",
|
||||
"category": "technical",
|
||||
"description": "Audit crawlability, canonicals, schema, security, and redirects.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_enterprise_seo",
|
||||
"name": "Analyze Enterprise SEO",
|
||||
"category": "enterprise",
|
||||
"description": "Advanced enterprise-level audits and recommendations.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_content_strategy",
|
||||
"name": "Analyze Content Strategy",
|
||||
"category": "content",
|
||||
"description": "Analyze content themes, gaps, and strategy effectiveness.",
|
||||
},
|
||||
{
|
||||
"code": "perform_website_audit",
|
||||
"name": "Perform Website Audit",
|
||||
"category": "analysis",
|
||||
"description": "Holistic website audit with prioritized issues and actions.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_content_comprehensive",
|
||||
"name": "Analyze Content (Comprehensive)",
|
||||
"category": "content",
|
||||
"description": "Deep content analysis including readability and structure.",
|
||||
},
|
||||
{
|
||||
"code": "check_seo_health",
|
||||
"name": "Check SEO Health",
|
||||
"category": "analysis",
|
||||
"description": "Quick health check and score snapshot.",
|
||||
},
|
||||
{
|
||||
"code": "explain_seo_concept",
|
||||
"name": "Explain SEO Concept",
|
||||
"category": "education",
|
||||
"description": "Explain SEO concepts in simple terms with examples.",
|
||||
},
|
||||
{
|
||||
"code": "update_seo_charts",
|
||||
"name": "Update SEO Charts",
|
||||
"category": "visualization",
|
||||
"description": "Update dashboard charts and visualizations per user request.",
|
||||
},
|
||||
{
|
||||
"code": "customize_seo_dashboard",
|
||||
"name": "Customize SEO Dashboard",
|
||||
"category": "visualization",
|
||||
"description": "Modify dashboard layout, widgets, and focus areas.",
|
||||
},
|
||||
{
|
||||
"code": "analyze_seo_full",
|
||||
"name": "Analyze SEO (Full)",
|
||||
"category": "analysis",
|
||||
"description": "Full analysis variant (alternate flow or endpoint).",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def seed_action_types():
|
||||
init_database()
|
||||
db = get_db_session()
|
||||
if db is None:
|
||||
raise RuntimeError("Could not get DB session")
|
||||
|
||||
try:
|
||||
actions = get_actions()
|
||||
created, updated, skipped = 0, 0, 0
|
||||
for action in actions:
|
||||
existing = db.query(SEOActionType).filter(SEOActionType.code == action["code"]).one_or_none()
|
||||
if existing:
|
||||
# Update name/category/description if changed
|
||||
changed = False
|
||||
if existing.name != action["name"]:
|
||||
existing.name = action["name"]; changed = True
|
||||
if existing.category != action["category"]:
|
||||
existing.category = action["category"]; changed = True
|
||||
if existing.description != action["description"]:
|
||||
existing.description = action["description"]; changed = True
|
||||
if changed:
|
||||
updated += 1
|
||||
else:
|
||||
skipped += 1
|
||||
else:
|
||||
db.add(SEOActionType(**action))
|
||||
created += 1
|
||||
db.commit()
|
||||
logger.info(f"SEO action types seeding done. created={created}, updated={updated}, unchanged={skipped}")
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
seed_action_types()
|
||||
|
||||
|
||||
197
backend/scripts/setup_gsc.py
Normal file
197
backend/scripts/setup_gsc.py
Normal file
@@ -0,0 +1,197 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Google Search Console Setup Script for ALwrity
|
||||
|
||||
This script helps set up the GSC integration by:
|
||||
1. Checking if credentials file exists
|
||||
2. Validating database tables
|
||||
3. Testing OAuth flow
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
def check_credentials_file():
|
||||
"""Check if GSC credentials file exists and is valid."""
|
||||
credentials_path = Path("gsc_credentials.json")
|
||||
|
||||
if not credentials_path.exists():
|
||||
print("❌ GSC credentials file not found!")
|
||||
print("📝 Please create gsc_credentials.json with your Google OAuth credentials.")
|
||||
print("📋 Use gsc_credentials_template.json as a template.")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(credentials_path, 'r') as f:
|
||||
credentials = json.load(f)
|
||||
|
||||
required_fields = ['web', 'client_id', 'client_secret']
|
||||
web_config = credentials.get('web', {})
|
||||
|
||||
if not all(field in web_config for field in ['client_id', 'client_secret']):
|
||||
print("❌ GSC credentials file is missing required fields!")
|
||||
print("📝 Please ensure client_id and client_secret are present.")
|
||||
return False
|
||||
|
||||
if 'YOUR_GOOGLE_CLIENT_ID' in web_config.get('client_id', ''):
|
||||
print("❌ GSC credentials file contains placeholder values!")
|
||||
print("📝 Please replace placeholder values with actual Google OAuth credentials.")
|
||||
return False
|
||||
|
||||
print("✅ GSC credentials file is valid!")
|
||||
return True
|
||||
|
||||
except json.JSONDecodeError:
|
||||
print("❌ GSC credentials file is not valid JSON!")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Error reading credentials file: {e}")
|
||||
return False
|
||||
|
||||
def check_database_tables():
|
||||
"""Check if GSC database tables exist."""
|
||||
db_path = "alwrity.db"
|
||||
|
||||
if not os.path.exists(db_path):
|
||||
print("❌ Database file not found!")
|
||||
print("📝 Please ensure the database is initialized.")
|
||||
return False
|
||||
|
||||
try:
|
||||
with sqlite3.connect(db_path) as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check for GSC tables
|
||||
tables = [
|
||||
'gsc_credentials',
|
||||
'gsc_data_cache',
|
||||
'gsc_oauth_states'
|
||||
]
|
||||
|
||||
for table in tables:
|
||||
cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table}'")
|
||||
if not cursor.fetchone():
|
||||
print(f"❌ Table '{table}' not found!")
|
||||
return False
|
||||
|
||||
print("✅ All GSC database tables exist!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error checking database: {e}")
|
||||
return False
|
||||
|
||||
def check_environment_variables():
|
||||
"""Check if required environment variables are set."""
|
||||
required_vars = ['GSC_REDIRECT_URI']
|
||||
missing_vars = []
|
||||
|
||||
for var in required_vars:
|
||||
if not os.getenv(var):
|
||||
missing_vars.append(var)
|
||||
|
||||
if missing_vars:
|
||||
print(f"❌ Missing environment variables: {', '.join(missing_vars)}")
|
||||
print("📝 Please set these in your .env file:")
|
||||
for var in missing_vars:
|
||||
if var == 'GSC_REDIRECT_URI':
|
||||
print(f" {var}=http://localhost:8000/gsc/callback")
|
||||
return False
|
||||
|
||||
print("✅ All required environment variables are set!")
|
||||
return True
|
||||
|
||||
def create_database_tables():
|
||||
"""Create GSC database tables if they don't exist."""
|
||||
db_path = "alwrity.db"
|
||||
|
||||
try:
|
||||
with sqlite3.connect(db_path) as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# GSC credentials table
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsc_credentials (
|
||||
user_id TEXT PRIMARY KEY,
|
||||
credentials_json TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# GSC data cache table
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsc_data_cache (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id TEXT NOT NULL,
|
||||
site_url TEXT NOT NULL,
|
||||
data_type TEXT NOT NULL,
|
||||
data_json TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
expires_at TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (user_id) REFERENCES gsc_credentials (user_id)
|
||||
)
|
||||
''')
|
||||
|
||||
# GSC OAuth states table
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsc_oauth_states (
|
||||
state TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
conn.commit()
|
||||
print("✅ GSC database tables created successfully!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error creating database tables: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main setup function."""
|
||||
print("🔧 Google Search Console Setup Check")
|
||||
print("=" * 50)
|
||||
|
||||
# Change to backend directory
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
os.chdir(backend_dir)
|
||||
|
||||
all_good = True
|
||||
|
||||
# Check credentials file
|
||||
print("\n1. Checking GSC credentials file...")
|
||||
if not check_credentials_file():
|
||||
all_good = False
|
||||
|
||||
# Check environment variables
|
||||
print("\n2. Checking environment variables...")
|
||||
if not check_environment_variables():
|
||||
all_good = False
|
||||
|
||||
# Check/create database tables
|
||||
print("\n3. Checking database tables...")
|
||||
if not check_database_tables():
|
||||
print("📝 Creating missing database tables...")
|
||||
if not create_database_tables():
|
||||
all_good = False
|
||||
|
||||
# Summary
|
||||
print("\n" + "=" * 50)
|
||||
if all_good:
|
||||
print("✅ GSC setup is complete!")
|
||||
print("🚀 You can now test the GSC integration in onboarding step 5.")
|
||||
else:
|
||||
print("❌ GSC setup is incomplete!")
|
||||
print("📝 Please fix the issues above before testing.")
|
||||
print("📖 See GSC_SETUP_GUIDE.md for detailed instructions.")
|
||||
|
||||
return 0 if all_good else 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
279
backend/scripts/update_basic_plan_limits.py
Normal file
279
backend/scripts/update_basic_plan_limits.py
Normal file
@@ -0,0 +1,279 @@
|
||||
"""
|
||||
Script to update Basic plan subscription limits for testing rate limits and renewal flows.
|
||||
|
||||
Updates:
|
||||
- LLM Calls (all providers): 10 calls (was 500-1000)
|
||||
- LLM Tokens (all providers): 5000 tokens (increased from 2000 to support research workflow)
|
||||
- Images: 5 images (was 50)
|
||||
|
||||
This script updates the SubscriptionPlan table, which automatically applies to all users
|
||||
who have a Basic plan subscription via the plan_id foreign key.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
|
||||
from models.subscription_models import SubscriptionPlan, SubscriptionTier, UserSubscription, UsageStatus
|
||||
from services.database import DATABASE_URL
|
||||
|
||||
def update_basic_plan_limits():
|
||||
"""Update Basic plan limits for testing rate limits and renewal."""
|
||||
|
||||
try:
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Find Basic plan
|
||||
basic_plan = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == SubscriptionTier.BASIC
|
||||
).first()
|
||||
|
||||
if not basic_plan:
|
||||
logger.error("❌ Basic plan not found in database!")
|
||||
return False
|
||||
|
||||
# Store old values for logging
|
||||
old_limits = {
|
||||
'gemini_calls': basic_plan.gemini_calls_limit,
|
||||
'mistral_calls': basic_plan.mistral_calls_limit,
|
||||
'gemini_tokens': basic_plan.gemini_tokens_limit,
|
||||
'mistral_tokens': basic_plan.mistral_tokens_limit,
|
||||
'stability_calls': basic_plan.stability_calls_limit,
|
||||
}
|
||||
|
||||
logger.info(f"📋 Current Basic plan limits:")
|
||||
logger.info(f" Gemini Calls: {old_limits['gemini_calls']}")
|
||||
logger.info(f" Mistral Calls: {old_limits['mistral_calls']}")
|
||||
logger.info(f" Gemini Tokens: {old_limits['gemini_tokens']}")
|
||||
logger.info(f" Mistral Tokens: {old_limits['mistral_tokens']}")
|
||||
logger.info(f" Images (Stability): {old_limits['stability_calls']}")
|
||||
|
||||
# Update unified AI text generation limit to 10
|
||||
basic_plan.ai_text_generation_calls_limit = 10
|
||||
|
||||
# Legacy per-provider limits (kept for backwards compatibility, but not used for enforcement)
|
||||
basic_plan.gemini_calls_limit = 1000
|
||||
basic_plan.openai_calls_limit = 500
|
||||
basic_plan.anthropic_calls_limit = 200
|
||||
basic_plan.mistral_calls_limit = 500
|
||||
|
||||
# Update all LLM provider token limits to 20000 (increased from 5000 for better stability)
|
||||
basic_plan.gemini_tokens_limit = 20000
|
||||
basic_plan.openai_tokens_limit = 20000
|
||||
basic_plan.anthropic_tokens_limit = 20000
|
||||
basic_plan.mistral_tokens_limit = 20000
|
||||
|
||||
# Update image generation limit to 5
|
||||
basic_plan.stability_calls_limit = 5
|
||||
|
||||
# Update timestamp
|
||||
basic_plan.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
logger.info("\n📝 New Basic plan limits:")
|
||||
logger.info(f" LLM Calls (all providers): 10")
|
||||
logger.info(f" LLM Tokens (all providers): 20000 (increased from 5000)")
|
||||
logger.info(f" Images: 5")
|
||||
|
||||
# Count and get affected users
|
||||
user_subscriptions = db.query(UserSubscription).filter(
|
||||
UserSubscription.plan_id == basic_plan.id,
|
||||
UserSubscription.is_active == True
|
||||
).all()
|
||||
|
||||
affected_users = len(user_subscriptions)
|
||||
|
||||
logger.info(f"\n👥 Users affected: {affected_users}")
|
||||
|
||||
if affected_users > 0:
|
||||
logger.info("\n📋 Affected user IDs:")
|
||||
for sub in user_subscriptions:
|
||||
logger.info(f" - {sub.user_id}")
|
||||
else:
|
||||
logger.info(" (No active Basic plan subscriptions found)")
|
||||
|
||||
# Commit plan limit changes first
|
||||
db.commit()
|
||||
logger.info("\n✅ Basic plan limits updated successfully!")
|
||||
|
||||
# Cap usage at new limits for all affected users (preserve historical data, but cap enforcement)
|
||||
logger.info("\n🔄 Capping usage counters at new limits for Basic plan users...")
|
||||
logger.info(" (Historical usage preserved, but capped to allow new calls within limits)")
|
||||
from models.subscription_models import UsageSummary
|
||||
from services.subscription import PricingService
|
||||
|
||||
pricing_service = PricingService(db)
|
||||
capped_count = 0
|
||||
|
||||
# New limits - use unified AI text generation limit if available
|
||||
new_call_limit = getattr(basic_plan, 'ai_text_generation_calls_limit', None) or basic_plan.gemini_calls_limit
|
||||
new_token_limit = basic_plan.gemini_tokens_limit # 5000 (increased from 2000)
|
||||
new_image_limit = basic_plan.stability_calls_limit # 5
|
||||
|
||||
for sub in user_subscriptions:
|
||||
try:
|
||||
# Get current billing period for this user
|
||||
current_period = pricing_service.get_current_billing_period(sub.user_id) or datetime.now(timezone.utc).strftime("%Y-%m")
|
||||
|
||||
# Find usage summary for current period
|
||||
usage_summary = db.query(UsageSummary).filter(
|
||||
UsageSummary.user_id == sub.user_id,
|
||||
UsageSummary.billing_period == current_period
|
||||
).first()
|
||||
|
||||
if usage_summary:
|
||||
# Store old values for logging
|
||||
old_gemini = usage_summary.gemini_calls or 0
|
||||
old_mistral = usage_summary.mistral_calls or 0
|
||||
old_openai = usage_summary.openai_calls or 0
|
||||
old_anthropic = usage_summary.anthropic_calls or 0
|
||||
old_tokens = max(
|
||||
usage_summary.gemini_tokens or 0,
|
||||
usage_summary.openai_tokens or 0,
|
||||
usage_summary.anthropic_tokens or 0,
|
||||
usage_summary.mistral_tokens or 0
|
||||
)
|
||||
old_images = usage_summary.stability_calls or 0
|
||||
|
||||
# Cap LLM provider counters at new limits (don't reset, just cap)
|
||||
# This allows historical data to remain but prevents blocking from old usage
|
||||
usage_summary.gemini_calls = min(old_gemini, new_call_limit)
|
||||
usage_summary.mistral_calls = min(old_mistral, new_call_limit)
|
||||
usage_summary.openai_calls = min(old_openai, new_call_limit)
|
||||
usage_summary.anthropic_calls = min(old_anthropic, new_call_limit)
|
||||
|
||||
# Cap token counters at new limits
|
||||
usage_summary.gemini_tokens = min(usage_summary.gemini_tokens or 0, new_token_limit)
|
||||
usage_summary.openai_tokens = min(usage_summary.openai_tokens or 0, new_token_limit)
|
||||
usage_summary.anthropic_tokens = min(usage_summary.anthropic_tokens or 0, new_token_limit)
|
||||
usage_summary.mistral_tokens = min(usage_summary.mistral_tokens or 0, new_token_limit)
|
||||
|
||||
# Cap image counter at new limit
|
||||
usage_summary.stability_calls = min(old_images, new_image_limit)
|
||||
|
||||
# Update totals based on capped values (approximate)
|
||||
# Recalculate total_calls and total_tokens based on capped provider values
|
||||
total_capped_calls = (
|
||||
usage_summary.gemini_calls +
|
||||
usage_summary.mistral_calls +
|
||||
usage_summary.openai_calls +
|
||||
usage_summary.anthropic_calls +
|
||||
usage_summary.stability_calls
|
||||
)
|
||||
total_capped_tokens = (
|
||||
usage_summary.gemini_tokens +
|
||||
usage_summary.mistral_tokens +
|
||||
usage_summary.openai_tokens +
|
||||
usage_summary.anthropic_tokens
|
||||
)
|
||||
|
||||
usage_summary.total_calls = total_capped_calls
|
||||
usage_summary.total_tokens = total_capped_tokens
|
||||
|
||||
# Reset status to active to allow new calls
|
||||
usage_summary.usage_status = UsageStatus.ACTIVE
|
||||
usage_summary.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
db.commit()
|
||||
capped_count += 1
|
||||
|
||||
logger.info(f" ✅ Capped usage for user {sub.user_id}:")
|
||||
logger.info(f" Gemini Calls: {old_gemini} → {usage_summary.gemini_calls} (limit: {new_call_limit})")
|
||||
logger.info(f" Mistral Calls: {old_mistral} → {usage_summary.mistral_calls} (limit: {new_call_limit})")
|
||||
logger.info(f" Tokens: {old_tokens} → {max(usage_summary.gemini_tokens, usage_summary.mistral_tokens)} (limit: {new_token_limit})")
|
||||
logger.info(f" Images: {old_images} → {usage_summary.stability_calls} (limit: {new_image_limit})")
|
||||
else:
|
||||
logger.info(f" ℹ️ No usage summary found for user {sub.user_id} (period {current_period})")
|
||||
|
||||
except Exception as cap_error:
|
||||
logger.error(f" ❌ Error capping usage for user {sub.user_id}: {cap_error}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
db.rollback()
|
||||
|
||||
if capped_count > 0:
|
||||
logger.info(f"\n✅ Capped usage counters for {capped_count} user(s)")
|
||||
logger.info(" Historical usage preserved, but capped at new limits to allow new calls")
|
||||
else:
|
||||
logger.info("\nℹ️ No usage counters to cap")
|
||||
|
||||
# Note about cache clearing
|
||||
logger.info("\n🔄 Cache Information:")
|
||||
logger.info(" The subscription limits cache is per-instance and will refresh on next request.")
|
||||
logger.info(" No manual cache clearing needed - limits will be read from database on next check.")
|
||||
|
||||
# Display final summary
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("BASIC PLAN UPDATE SUMMARY")
|
||||
logger.info("="*60)
|
||||
logger.info(f"\nPlan: {basic_plan.name} ({basic_plan.tier.value})")
|
||||
logger.info(f"Price: ${basic_plan.price_monthly}/mo, ${basic_plan.price_yearly}/yr")
|
||||
logger.info(f"\nUpdated Limits:")
|
||||
logger.info(f" LLM Calls (gemini/openai/anthropic/mistral): {basic_plan.gemini_calls_limit}")
|
||||
logger.info(f" LLM Tokens (gemini/openai/anthropic/mistral): {basic_plan.gemini_tokens_limit}")
|
||||
logger.info(f" Images (stability): {basic_plan.stability_calls_limit}")
|
||||
logger.info(f"\nUsers Affected: {affected_users}")
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("\n💡 Note: These limits apply immediately to all Basic plan users.")
|
||||
logger.info(" Historical usage has been preserved but capped at new limits.")
|
||||
logger.info(" Users can continue making new calls up to the new limits.")
|
||||
logger.info(" Users will hit rate limits faster for testing purposes.")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"❌ Error updating Basic plan: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Failed to connect to database: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🚀 Starting Basic plan limits update...")
|
||||
logger.info("="*60)
|
||||
logger.info("This will update Basic plan limits for testing rate limits:")
|
||||
logger.info(" - LLM Calls: 10 (all providers)")
|
||||
logger.info(" - LLM Tokens: 20000 (all providers, increased from 5000)")
|
||||
logger.info(" - Images: 5")
|
||||
logger.info("="*60)
|
||||
|
||||
# Ask for confirmation in non-interactive mode, proceed directly
|
||||
# In interactive mode, you can add: input("\nPress Enter to continue or Ctrl+C to cancel...")
|
||||
|
||||
try:
|
||||
success = update_basic_plan_limits()
|
||||
|
||||
if success:
|
||||
logger.info("\n✅ Script completed successfully!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("\n❌ Script failed!")
|
||||
sys.exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("\n⚠️ Script cancelled by user")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.error(f"\n❌ Unexpected error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
102
backend/scripts/update_image_edit_limits.py
Normal file
102
backend/scripts/update_image_edit_limits.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""
|
||||
Script to update existing subscription plans with image_edit_calls_limit values.
|
||||
|
||||
This script updates the SubscriptionPlan table to set image_edit_calls_limit
|
||||
for plans that were created before this column was added.
|
||||
|
||||
Limits:
|
||||
- Free: 10 image editing calls/month
|
||||
- Basic: 30 image editing calls/month
|
||||
- Pro: 100 image editing calls/month
|
||||
- Enterprise: 0 (unlimited)
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# Add the backend directory to Python path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from loguru import logger
|
||||
|
||||
from models.subscription_models import SubscriptionPlan, SubscriptionTier
|
||||
from services.database import DATABASE_URL
|
||||
|
||||
def update_image_edit_limits():
|
||||
"""Update existing subscription plans with image_edit_calls_limit values."""
|
||||
|
||||
try:
|
||||
# Create engine
|
||||
engine = create_engine(DATABASE_URL, echo=False)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Ensure schema columns exist
|
||||
from services.subscription.schema_utils import ensure_subscription_plan_columns
|
||||
ensure_subscription_plan_columns(db)
|
||||
|
||||
# Define limits for each tier
|
||||
limits_by_tier = {
|
||||
SubscriptionTier.FREE: 10,
|
||||
SubscriptionTier.BASIC: 30,
|
||||
SubscriptionTier.PRO: 100,
|
||||
SubscriptionTier.ENTERPRISE: 0, # Unlimited
|
||||
}
|
||||
|
||||
updated_count = 0
|
||||
|
||||
# Update each plan
|
||||
for tier, limit in limits_by_tier.items():
|
||||
plans = db.query(SubscriptionPlan).filter(
|
||||
SubscriptionPlan.tier == tier,
|
||||
SubscriptionPlan.is_active == True
|
||||
).all()
|
||||
|
||||
for plan in plans:
|
||||
current_limit = getattr(plan, 'image_edit_calls_limit', 0) or 0
|
||||
|
||||
# Only update if limit is 0 (not set) or if it's different
|
||||
if current_limit != limit:
|
||||
setattr(plan, 'image_edit_calls_limit', limit)
|
||||
plan.updated_at = datetime.now(timezone.utc)
|
||||
updated_count += 1
|
||||
logger.info(f"Updated {plan.name} plan ({tier.value}): image_edit_calls_limit = {current_limit} -> {limit}")
|
||||
else:
|
||||
logger.debug(f"Plan {plan.name} ({tier.value}) already has image_edit_calls_limit = {limit}")
|
||||
|
||||
# Commit changes
|
||||
db.commit()
|
||||
|
||||
if updated_count > 0:
|
||||
logger.info(f"✅ Successfully updated {updated_count} subscription plan(s) with image_edit_calls_limit")
|
||||
else:
|
||||
logger.info("✅ All subscription plans already have correct image_edit_calls_limit values")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error updating image_edit_limits: {e}")
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating database connection: {e}")
|
||||
raise
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("🔄 Updating subscription plans with image_edit_calls_limit...")
|
||||
success = update_image_edit_limits()
|
||||
if success:
|
||||
logger.info("🎉 Image edit limits update completed successfully!")
|
||||
else:
|
||||
logger.error("❌ Image edit limits update failed")
|
||||
sys.exit(1)
|
||||
|
||||
30
backend/scripts/verify_cumulative_stats.py
Normal file
30
backend/scripts/verify_cumulative_stats.py
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Verify cumulative stats table exists and has data"""
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
backend_dir = os.path.dirname(script_dir)
|
||||
db_path = os.path.join(backend_dir, 'alwrity.db')
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if table exists
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='scheduler_cumulative_stats'")
|
||||
result = cursor.fetchone()
|
||||
print(f"Table exists: {result is not None}")
|
||||
|
||||
if result:
|
||||
cursor.execute("SELECT * FROM scheduler_cumulative_stats WHERE id=1")
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
print(f"Row data: {row}")
|
||||
else:
|
||||
print("Table exists but no row with id=1")
|
||||
else:
|
||||
print("Table does not exist")
|
||||
|
||||
conn.close()
|
||||
|
||||
73
backend/scripts/verify_current_user_data.py
Normal file
73
backend/scripts/verify_current_user_data.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Verify current user data in the database
|
||||
Check if data is being saved with Clerk user IDs
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from loguru import logger
|
||||
from services.database import SessionLocal
|
||||
from models.onboarding import OnboardingSession, APIKey, WebsiteAnalysis, ResearchPreferences
|
||||
|
||||
def verify_user_data():
|
||||
"""Check what user_id format is being used."""
|
||||
try:
|
||||
db = SessionLocal()
|
||||
|
||||
logger.info("Checking onboarding_sessions table...")
|
||||
sessions = db.query(OnboardingSession).all()
|
||||
|
||||
logger.info(f"Found {len(sessions)} sessions:")
|
||||
for session in sessions:
|
||||
logger.info(f" Session ID: {session.id}")
|
||||
logger.info(f" User ID: {session.user_id} (type: {type(session.user_id).__name__})")
|
||||
logger.info(f" Current Step: {session.current_step}")
|
||||
logger.info(f" Progress: {session.progress}%")
|
||||
|
||||
# Check API keys for this session
|
||||
api_keys = db.query(APIKey).filter(APIKey.session_id == session.id).all()
|
||||
logger.info(f" API Keys: {len(api_keys)} found")
|
||||
for key in api_keys:
|
||||
logger.info(f" - {key.provider}")
|
||||
|
||||
# Check website analysis
|
||||
website = db.query(WebsiteAnalysis).filter(WebsiteAnalysis.session_id == session.id).first()
|
||||
if website:
|
||||
logger.info(f" Website Analysis: {website.website_url}")
|
||||
else:
|
||||
logger.info(f" Website Analysis: None")
|
||||
|
||||
# Check research preferences
|
||||
research = db.query(ResearchPreferences).filter(ResearchPreferences.session_id == session.id).first()
|
||||
if research:
|
||||
logger.info(f" Research Preferences: Found")
|
||||
else:
|
||||
logger.info(f" Research Preferences: None")
|
||||
|
||||
logger.info("")
|
||||
|
||||
if len(sessions) == 0:
|
||||
logger.warning("⚠️ No sessions found in database!")
|
||||
logger.info("This means either:")
|
||||
logger.info(" 1. No onboarding data has been saved yet")
|
||||
logger.info(" 2. Data was cleared during migration")
|
||||
logger.info("\nYou need to go through onboarding steps 1-5 again to save data with Clerk user ID")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying data: {e}")
|
||||
return False
|
||||
finally:
|
||||
if db:
|
||||
db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("="*60)
|
||||
logger.info("VERIFY CURRENT USER DATA IN DATABASE")
|
||||
logger.info("="*60)
|
||||
|
||||
verify_user_data()
|
||||
|
||||
338
backend/scripts/verify_onboarding_data.py
Normal file
338
backend/scripts/verify_onboarding_data.py
Normal file
@@ -0,0 +1,338 @@
|
||||
"""
|
||||
Database Verification Script for Onboarding Data
|
||||
Verifies that all onboarding steps data is properly saved to the database.
|
||||
|
||||
Usage:
|
||||
python backend/scripts/verify_onboarding_data.py [user_id]
|
||||
|
||||
Example:
|
||||
python backend/scripts/verify_onboarding_data.py user_33Gz1FPI86VDXhRY8QN4ragRFGN
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add backend directory to path
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from loguru import logger
|
||||
from sqlalchemy import inspect, text
|
||||
from typing import Optional
|
||||
import json
|
||||
|
||||
def get_user_id_from_args() -> Optional[str]:
|
||||
"""Get user_id from command line arguments."""
|
||||
if len(sys.argv) > 1:
|
||||
return sys.argv[1]
|
||||
return None
|
||||
|
||||
def verify_table_exists(table_name: str, inspector) -> bool:
|
||||
"""Check if a table exists in the database."""
|
||||
tables = inspector.get_table_names()
|
||||
exists = table_name in tables
|
||||
|
||||
if exists:
|
||||
logger.info(f"✅ Table '{table_name}' exists")
|
||||
# Show column count
|
||||
columns = inspector.get_columns(table_name)
|
||||
logger.info(f" Columns: {len(columns)}")
|
||||
else:
|
||||
logger.error(f"❌ Table '{table_name}' does NOT exist")
|
||||
|
||||
return exists
|
||||
|
||||
def verify_onboarding_session(user_id: str, db):
|
||||
"""Verify onboarding session data."""
|
||||
try:
|
||||
from models.onboarding import OnboardingSession
|
||||
|
||||
session = db.query(OnboardingSession).filter(
|
||||
OnboardingSession.user_id == user_id
|
||||
).first()
|
||||
|
||||
if session:
|
||||
logger.info(f"✅ Onboarding Session found for user: {user_id}")
|
||||
logger.info(f" Session ID: {session.id}")
|
||||
logger.info(f" Current Step: {session.current_step}")
|
||||
logger.info(f" Progress: {session.progress}%")
|
||||
logger.info(f" Started At: {session.started_at}")
|
||||
logger.info(f" Updated At: {session.updated_at}")
|
||||
return session.id
|
||||
else:
|
||||
logger.error(f"❌ No onboarding session found for user: {user_id}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying onboarding session: {e}")
|
||||
return None
|
||||
|
||||
def verify_api_keys(session_id: int, user_id: str, db):
|
||||
"""Verify API keys data (Step 1)."""
|
||||
try:
|
||||
from models.onboarding import APIKey
|
||||
|
||||
api_keys = db.query(APIKey).filter(
|
||||
APIKey.session_id == session_id
|
||||
).all()
|
||||
|
||||
if api_keys:
|
||||
logger.info(f"✅ Step 1 (API Keys): Found {len(api_keys)} API key(s)")
|
||||
for key in api_keys:
|
||||
# Mask the key for security
|
||||
masked_key = f"{key.key[:8]}...{key.key[-4:]}" if len(key.key) > 12 else "***"
|
||||
logger.info(f" - Provider: {key.provider}")
|
||||
logger.info(f" Key: {masked_key}")
|
||||
logger.info(f" Created: {key.created_at}")
|
||||
else:
|
||||
logger.warning(f"⚠️ Step 1 (API Keys): No API keys found")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying API keys: {e}")
|
||||
|
||||
def verify_website_analysis(session_id: int, user_id: str, db):
|
||||
"""Verify website analysis data (Step 2)."""
|
||||
try:
|
||||
from models.onboarding import WebsiteAnalysis
|
||||
|
||||
analysis = db.query(WebsiteAnalysis).filter(
|
||||
WebsiteAnalysis.session_id == session_id
|
||||
).first()
|
||||
|
||||
if analysis:
|
||||
logger.info(f"✅ Step 2 (Website Analysis): Data found")
|
||||
logger.info(f" Website URL: {analysis.website_url}")
|
||||
logger.info(f" Analysis Date: {analysis.analysis_date}")
|
||||
logger.info(f" Status: {analysis.status}")
|
||||
|
||||
if analysis.writing_style:
|
||||
logger.info(f" Writing Style: {len(analysis.writing_style)} attributes")
|
||||
if analysis.content_characteristics:
|
||||
logger.info(f" Content Characteristics: {len(analysis.content_characteristics)} attributes")
|
||||
if analysis.target_audience:
|
||||
logger.info(f" Target Audience: {len(analysis.target_audience)} attributes")
|
||||
else:
|
||||
logger.warning(f"⚠️ Step 2 (Website Analysis): No data found")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying website analysis: {e}")
|
||||
|
||||
def verify_research_preferences(session_id: int, user_id: str, db):
|
||||
"""Verify research preferences data (Step 3)."""
|
||||
try:
|
||||
from models.onboarding import ResearchPreferences
|
||||
|
||||
prefs = db.query(ResearchPreferences).filter(
|
||||
ResearchPreferences.session_id == session_id
|
||||
).first()
|
||||
|
||||
if prefs:
|
||||
logger.info(f"✅ Step 3 (Research Preferences): Data found")
|
||||
logger.info(f" Research Depth: {prefs.research_depth}")
|
||||
logger.info(f" Content Types: {prefs.content_types}")
|
||||
logger.info(f" Auto Research: {prefs.auto_research}")
|
||||
logger.info(f" Factual Content: {prefs.factual_content}")
|
||||
else:
|
||||
logger.warning(f"⚠️ Step 3 (Research Preferences): No data found")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying research preferences: {e}")
|
||||
|
||||
def verify_persona_data(session_id: int, user_id: str, db):
|
||||
"""Verify persona data (Step 4) - THE NEW FIX!"""
|
||||
try:
|
||||
from models.onboarding import PersonaData
|
||||
|
||||
persona = db.query(PersonaData).filter(
|
||||
PersonaData.session_id == session_id
|
||||
).first()
|
||||
|
||||
if persona:
|
||||
logger.info(f"✅ Step 4 (Persona Generation): Data found ⭐")
|
||||
|
||||
if persona.core_persona:
|
||||
logger.info(f" Core Persona: Present")
|
||||
if isinstance(persona.core_persona, dict):
|
||||
logger.info(f" Attributes: {len(persona.core_persona)} fields")
|
||||
|
||||
if persona.platform_personas:
|
||||
logger.info(f" Platform Personas: Present")
|
||||
if isinstance(persona.platform_personas, dict):
|
||||
platforms = list(persona.platform_personas.keys())
|
||||
logger.info(f" Platforms: {', '.join(platforms)}")
|
||||
|
||||
if persona.quality_metrics:
|
||||
logger.info(f" Quality Metrics: Present")
|
||||
if isinstance(persona.quality_metrics, dict):
|
||||
logger.info(f" Metrics: {len(persona.quality_metrics)} fields")
|
||||
|
||||
if persona.selected_platforms:
|
||||
logger.info(f" Selected Platforms: {persona.selected_platforms}")
|
||||
|
||||
logger.info(f" Created At: {persona.created_at}")
|
||||
logger.info(f" Updated At: {persona.updated_at}")
|
||||
else:
|
||||
logger.error(f"❌ Step 4 (Persona Generation): No data found - THIS IS THE BUG WE FIXED!")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying persona data: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
def show_raw_sql_query_example(user_id: str):
|
||||
"""Show example SQL queries for manual verification."""
|
||||
logger.info("")
|
||||
logger.info("=" * 60)
|
||||
logger.info("📋 Raw SQL Queries for Manual Verification:")
|
||||
logger.info("=" * 60)
|
||||
|
||||
queries = [
|
||||
("Onboarding Session",
|
||||
f"SELECT * FROM onboarding_sessions WHERE user_id = '{user_id}';"),
|
||||
|
||||
("API Keys",
|
||||
f"""SELECT ak.* FROM api_keys ak
|
||||
JOIN onboarding_sessions os ON ak.session_id = os.id
|
||||
WHERE os.user_id = '{user_id}';"""),
|
||||
|
||||
("Website Analysis",
|
||||
f"""SELECT wa.website_url, wa.analysis_date, wa.status
|
||||
FROM website_analyses wa
|
||||
JOIN onboarding_sessions os ON wa.session_id = os.id
|
||||
WHERE os.user_id = '{user_id}';"""),
|
||||
|
||||
("Research Preferences",
|
||||
f"""SELECT rp.research_depth, rp.content_types, rp.auto_research
|
||||
FROM research_preferences rp
|
||||
JOIN onboarding_sessions os ON rp.session_id = os.id
|
||||
WHERE os.user_id = '{user_id}';"""),
|
||||
|
||||
("Persona Data (NEW!)",
|
||||
f"""SELECT pd.* FROM persona_data pd
|
||||
JOIN onboarding_sessions os ON pd.session_id = os.id
|
||||
WHERE os.user_id = '{user_id}';"""),
|
||||
]
|
||||
|
||||
for title, query in queries:
|
||||
logger.info(f"\n{title}:")
|
||||
logger.info(f" {query}")
|
||||
|
||||
def count_all_records(db):
|
||||
"""Count records in all onboarding tables."""
|
||||
logger.info("")
|
||||
logger.info("=" * 60)
|
||||
logger.info("📊 Overall Database Statistics:")
|
||||
logger.info("=" * 60)
|
||||
|
||||
try:
|
||||
from models.onboarding import (
|
||||
OnboardingSession, APIKey, WebsiteAnalysis,
|
||||
ResearchPreferences, PersonaData
|
||||
)
|
||||
|
||||
counts = {
|
||||
"Onboarding Sessions": db.query(OnboardingSession).count(),
|
||||
"API Keys": db.query(APIKey).count(),
|
||||
"Website Analyses": db.query(WebsiteAnalysis).count(),
|
||||
"Research Preferences": db.query(ResearchPreferences).count(),
|
||||
"Persona Data": db.query(PersonaData).count(),
|
||||
}
|
||||
|
||||
for table, count in counts.items():
|
||||
logger.info(f" {table}: {count} record(s)")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error counting records: {e}")
|
||||
|
||||
def main():
|
||||
"""Main verification function."""
|
||||
logger.info("=" * 60)
|
||||
logger.info("🔍 Onboarding Database Verification")
|
||||
logger.info("=" * 60)
|
||||
|
||||
# Get user_id
|
||||
user_id = get_user_id_from_args()
|
||||
|
||||
if not user_id:
|
||||
logger.warning("⚠️ No user_id provided. Will show overall statistics only.")
|
||||
logger.info("Usage: python backend/scripts/verify_onboarding_data.py <user_id>")
|
||||
|
||||
try:
|
||||
from services.database import SessionLocal, engine
|
||||
from sqlalchemy import inspect
|
||||
|
||||
# Check tables exist
|
||||
logger.info("")
|
||||
logger.info("=" * 60)
|
||||
logger.info("1️⃣ Verifying Database Tables:")
|
||||
logger.info("=" * 60)
|
||||
|
||||
inspector = inspect(engine)
|
||||
tables = [
|
||||
'onboarding_sessions',
|
||||
'api_keys',
|
||||
'website_analyses',
|
||||
'research_preferences',
|
||||
'persona_data'
|
||||
]
|
||||
|
||||
all_exist = True
|
||||
for table in tables:
|
||||
if not verify_table_exists(table, inspector):
|
||||
all_exist = False
|
||||
|
||||
if not all_exist:
|
||||
logger.error("")
|
||||
logger.error("❌ Some tables are missing! Run migrations first.")
|
||||
return False
|
||||
|
||||
# Count all records
|
||||
db = SessionLocal()
|
||||
try:
|
||||
count_all_records(db)
|
||||
|
||||
# If user_id provided, show detailed data
|
||||
if user_id:
|
||||
logger.info("")
|
||||
logger.info("=" * 60)
|
||||
logger.info(f"2️⃣ Verifying Data for User: {user_id}")
|
||||
logger.info("=" * 60)
|
||||
|
||||
# Verify session
|
||||
session_id = verify_onboarding_session(user_id, db)
|
||||
|
||||
if session_id:
|
||||
logger.info("")
|
||||
# Verify each step's data
|
||||
verify_api_keys(session_id, user_id, db)
|
||||
logger.info("")
|
||||
verify_website_analysis(session_id, user_id, db)
|
||||
logger.info("")
|
||||
verify_research_preferences(session_id, user_id, db)
|
||||
logger.info("")
|
||||
verify_persona_data(session_id, user_id, db)
|
||||
|
||||
# Show SQL examples
|
||||
show_raw_sql_query_example(user_id)
|
||||
|
||||
logger.info("")
|
||||
logger.info("=" * 60)
|
||||
logger.info("✅ Verification Complete!")
|
||||
logger.info("=" * 60)
|
||||
|
||||
return True
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Verification failed: {e}")
|
||||
import traceback
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
42
backend/scripts/verify_podcast_table.py
Normal file
42
backend/scripts/verify_podcast_table.py
Normal file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Verify that the podcast_projects table exists and has the correct structure.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
backend_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from sqlalchemy import inspect
|
||||
from services.database import engine
|
||||
|
||||
def verify_table():
|
||||
"""Verify the podcast_projects table exists."""
|
||||
inspector = inspect(engine)
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
if 'podcast_projects' in tables:
|
||||
print("✅ Table 'podcast_projects' exists")
|
||||
|
||||
columns = inspector.get_columns('podcast_projects')
|
||||
print(f"\n📊 Columns ({len(columns)}):")
|
||||
for col in columns:
|
||||
print(f" • {col['name']}: {col['type']}")
|
||||
|
||||
indexes = inspector.get_indexes('podcast_projects')
|
||||
print(f"\n📈 Indexes ({len(indexes)}):")
|
||||
for idx in indexes:
|
||||
print(f" • {idx['name']}: {idx['column_names']}")
|
||||
|
||||
return True
|
||||
else:
|
||||
print("❌ Table 'podcast_projects' not found")
|
||||
print(f"Available tables: {', '.join(tables)}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = verify_table()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
Reference in New Issue
Block a user