Base code
This commit is contained in:
41
backend/services/analytics/__init__.py
Normal file
41
backend/services/analytics/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""
|
||||
Analytics Package
|
||||
|
||||
Modular analytics system for retrieving and processing data from connected platforms.
|
||||
"""
|
||||
|
||||
from .models import AnalyticsData, PlatformType, AnalyticsStatus, PlatformConnectionStatus
|
||||
from .handlers import (
|
||||
BaseAnalyticsHandler,
|
||||
GSCAnalyticsHandler,
|
||||
BingAnalyticsHandler,
|
||||
WordPressAnalyticsHandler,
|
||||
WixAnalyticsHandler
|
||||
)
|
||||
from .connection_manager import PlatformConnectionManager
|
||||
from .summary_generator import AnalyticsSummaryGenerator
|
||||
from .cache_manager import AnalyticsCacheManager
|
||||
from .platform_analytics_service import PlatformAnalyticsService
|
||||
|
||||
__all__ = [
|
||||
# Models
|
||||
'AnalyticsData',
|
||||
'PlatformType',
|
||||
'AnalyticsStatus',
|
||||
'PlatformConnectionStatus',
|
||||
|
||||
# Handlers
|
||||
'BaseAnalyticsHandler',
|
||||
'GSCAnalyticsHandler',
|
||||
'BingAnalyticsHandler',
|
||||
'WordPressAnalyticsHandler',
|
||||
'WixAnalyticsHandler',
|
||||
|
||||
# Managers
|
||||
'PlatformConnectionManager',
|
||||
'AnalyticsSummaryGenerator',
|
||||
'AnalyticsCacheManager',
|
||||
|
||||
# Main Service
|
||||
'PlatformAnalyticsService'
|
||||
]
|
||||
110
backend/services/analytics/cache_manager.py
Normal file
110
backend/services/analytics/cache_manager.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""
|
||||
Analytics Cache Manager
|
||||
|
||||
Provides a unified interface for caching analytics data with platform-specific configurations.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
from loguru import logger
|
||||
|
||||
from ..analytics_cache_service import analytics_cache
|
||||
from .models.platform_types import PlatformType
|
||||
|
||||
|
||||
class AnalyticsCacheManager:
|
||||
"""Manages caching for analytics data with platform-specific TTL configurations"""
|
||||
|
||||
def __init__(self):
|
||||
# Platform-specific cache TTL configurations (in seconds)
|
||||
self.cache_ttl = {
|
||||
PlatformType.GSC: 3600, # 1 hour
|
||||
PlatformType.BING: 3600, # 1 hour (expensive operation)
|
||||
PlatformType.WORDPRESS: 1800, # 30 minutes
|
||||
PlatformType.WIX: 1800, # 30 minutes
|
||||
'platform_status': 1800, # 30 minutes
|
||||
'analytics_summary': 900, # 15 minutes
|
||||
}
|
||||
|
||||
def get_cached_analytics(self, platform: PlatformType, user_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached analytics data for a platform"""
|
||||
cache_key = f"{platform.value}_analytics"
|
||||
cached_data = analytics_cache.get(cache_key, user_id)
|
||||
|
||||
if cached_data:
|
||||
logger.info(f"Cache HIT: {platform.value} analytics for user {user_id}")
|
||||
return cached_data
|
||||
|
||||
logger.info(f"Cache MISS: {platform.value} analytics for user {user_id}")
|
||||
return None
|
||||
|
||||
def set_cached_analytics(self, platform: PlatformType, user_id: str, data: Dict[str, Any], ttl_override: Optional[int] = None):
|
||||
"""Cache analytics data for a platform"""
|
||||
cache_key = f"{platform.value}_analytics"
|
||||
ttl = ttl_override or self.cache_ttl.get(platform, 1800) # Default 30 minutes
|
||||
|
||||
analytics_cache.set(cache_key, user_id, data, ttl_override=ttl)
|
||||
logger.info(f"Cached {platform.value} analytics for user {user_id} (TTL: {ttl}s)")
|
||||
|
||||
def get_cached_platform_status(self, user_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached platform connection status"""
|
||||
cached_data = analytics_cache.get('platform_status', user_id)
|
||||
|
||||
if cached_data:
|
||||
logger.info(f"Cache HIT: platform status for user {user_id}")
|
||||
return cached_data
|
||||
|
||||
logger.info(f"Cache MISS: platform status for user {user_id}")
|
||||
return None
|
||||
|
||||
def set_cached_platform_status(self, user_id: str, status_data: Dict[str, Any]):
|
||||
"""Cache platform connection status"""
|
||||
ttl = self.cache_ttl['platform_status']
|
||||
analytics_cache.set('platform_status', user_id, status_data, ttl_override=ttl)
|
||||
logger.info(f"Cached platform status for user {user_id} (TTL: {ttl}s)")
|
||||
|
||||
def get_cached_summary(self, user_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached analytics summary"""
|
||||
cached_data = analytics_cache.get('analytics_summary', user_id)
|
||||
|
||||
if cached_data:
|
||||
logger.info(f"Cache HIT: analytics summary for user {user_id}")
|
||||
return cached_data
|
||||
|
||||
logger.info(f"Cache MISS: analytics summary for user {user_id}")
|
||||
return None
|
||||
|
||||
def set_cached_summary(self, user_id: str, summary_data: Dict[str, Any]):
|
||||
"""Cache analytics summary"""
|
||||
ttl = self.cache_ttl['analytics_summary']
|
||||
analytics_cache.set('analytics_summary', user_id, summary_data, ttl_override=ttl)
|
||||
logger.info(f"Cached analytics summary for user {user_id} (TTL: {ttl}s)")
|
||||
|
||||
def invalidate_platform_cache(self, platform: PlatformType, user_id: str):
|
||||
"""Invalidate cache for a specific platform"""
|
||||
cache_key = f"{platform.value}_analytics"
|
||||
analytics_cache.invalidate(cache_key, user_id)
|
||||
logger.info(f"Invalidated {platform.value} analytics cache for user {user_id}")
|
||||
|
||||
def invalidate_user_cache(self, user_id: str):
|
||||
"""Invalidate all cache entries for a user"""
|
||||
analytics_cache.invalidate_user(user_id)
|
||||
logger.info(f"Invalidated all analytics cache for user {user_id}")
|
||||
|
||||
def invalidate_platform_status_cache(self, user_id: str):
|
||||
"""Invalidate platform status cache for a user"""
|
||||
analytics_cache.invalidate('platform_status', user_id)
|
||||
logger.info(f"Invalidated platform status cache for user {user_id}")
|
||||
|
||||
def invalidate_summary_cache(self, user_id: str):
|
||||
"""Invalidate analytics summary cache for a user"""
|
||||
analytics_cache.invalidate('analytics_summary', user_id)
|
||||
logger.info(f"Invalidated analytics summary cache for user {user_id}")
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics"""
|
||||
return analytics_cache.get_stats()
|
||||
|
||||
def clear_all_cache(self):
|
||||
"""Clear all analytics cache"""
|
||||
analytics_cache.clear_all()
|
||||
logger.info("Cleared all analytics cache")
|
||||
152
backend/services/analytics/connection_manager.py
Normal file
152
backend/services/analytics/connection_manager.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""
|
||||
Platform Connection Manager
|
||||
|
||||
Manages platform connection status checking and caching across all analytics platforms.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List
|
||||
from loguru import logger
|
||||
|
||||
from ..analytics_cache_service import analytics_cache
|
||||
from .handlers import (
|
||||
GSCAnalyticsHandler,
|
||||
BingAnalyticsHandler,
|
||||
WordPressAnalyticsHandler,
|
||||
WixAnalyticsHandler
|
||||
)
|
||||
from .models.platform_types import PlatformType
|
||||
|
||||
|
||||
class PlatformConnectionManager:
|
||||
"""Manages platform connection status across all analytics platforms"""
|
||||
|
||||
def __init__(self):
|
||||
self.handlers = {
|
||||
PlatformType.GSC: GSCAnalyticsHandler(),
|
||||
PlatformType.BING: BingAnalyticsHandler(),
|
||||
PlatformType.WORDPRESS: WordPressAnalyticsHandler(),
|
||||
PlatformType.WIX: WixAnalyticsHandler()
|
||||
}
|
||||
|
||||
async def get_platform_connection_status(self, user_id: str) -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
Check connection status for all platforms
|
||||
|
||||
Returns:
|
||||
Dictionary with connection status for each platform
|
||||
"""
|
||||
# Check cache first - connection status doesn't change frequently
|
||||
cached_status = analytics_cache.get('platform_status', user_id)
|
||||
if cached_status:
|
||||
logger.info("Using cached platform connection status for user {user_id}", user_id=user_id)
|
||||
return cached_status
|
||||
|
||||
logger.info("Fetching fresh platform connection status for user {user_id}", user_id=user_id)
|
||||
status = {}
|
||||
|
||||
# Check each platform connection
|
||||
for platform_type, handler in self.handlers.items():
|
||||
platform_name = platform_type.value
|
||||
try:
|
||||
status[platform_name] = handler.get_connection_status(user_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking {platform_name} connection status: {e}")
|
||||
status[platform_name] = {
|
||||
'connected': False,
|
||||
'sites_count': 0,
|
||||
'sites': [],
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
# Cache the connection status
|
||||
analytics_cache.set('platform_status', user_id, status)
|
||||
logger.info("Cached platform connection status for user {user_id}", user_id=user_id)
|
||||
|
||||
return status
|
||||
|
||||
def get_connected_platforms(self, user_id: str, status_data: Dict[str, Dict[str, Any]] = None) -> List[str]:
|
||||
"""
|
||||
Get list of connected platform names
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
status_data: Optional pre-fetched status data
|
||||
|
||||
Returns:
|
||||
List of connected platform names
|
||||
"""
|
||||
if status_data is None:
|
||||
# This would need to be async, but for now return empty list
|
||||
# In practice, this method should be called with pre-fetched status
|
||||
return []
|
||||
|
||||
connected_platforms = []
|
||||
for platform_name, status in status_data.items():
|
||||
if status.get('connected', False):
|
||||
connected_platforms.append(platform_name)
|
||||
|
||||
return connected_platforms
|
||||
|
||||
def get_platform_sites_count(self, user_id: str, platform_name: str, status_data: Dict[str, Dict[str, Any]] = None) -> int:
|
||||
"""
|
||||
Get sites count for a specific platform
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
platform_name: Name of the platform
|
||||
status_data: Optional pre-fetched status data
|
||||
|
||||
Returns:
|
||||
Number of connected sites for the platform
|
||||
"""
|
||||
if status_data is None:
|
||||
return 0
|
||||
|
||||
platform_status = status_data.get(platform_name, {})
|
||||
return platform_status.get('sites_count', 0)
|
||||
|
||||
def is_platform_connected(self, user_id: str, platform_name: str, status_data: Dict[str, Dict[str, Any]] = None) -> bool:
|
||||
"""
|
||||
Check if a specific platform is connected
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
platform_name: Name of the platform
|
||||
status_data: Optional pre-fetched status data
|
||||
|
||||
Returns:
|
||||
True if platform is connected, False otherwise
|
||||
"""
|
||||
if status_data is None:
|
||||
return False
|
||||
|
||||
platform_status = status_data.get(platform_name, {})
|
||||
return platform_status.get('connected', False)
|
||||
|
||||
def get_platform_error(self, user_id: str, platform_name: str, status_data: Dict[str, Dict[str, Any]] = None) -> str:
|
||||
"""
|
||||
Get error message for a specific platform
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
platform_name: Name of the platform
|
||||
status_data: Optional pre-fetched status data
|
||||
|
||||
Returns:
|
||||
Error message if any, None otherwise
|
||||
"""
|
||||
if status_data is None:
|
||||
return None
|
||||
|
||||
platform_status = status_data.get(platform_name, {})
|
||||
return platform_status.get('error')
|
||||
|
||||
def invalidate_connection_cache(self, user_id: str):
|
||||
"""
|
||||
Invalidate connection status cache for a user
|
||||
|
||||
Args:
|
||||
user_id: User ID to invalidate cache for
|
||||
"""
|
||||
analytics_cache.invalidate('platform_status', user_id)
|
||||
logger.info("Invalidated platform connection status cache for user {user_id}", user_id=user_id)
|
||||
19
backend/services/analytics/handlers/__init__.py
Normal file
19
backend/services/analytics/handlers/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
Analytics Handlers Package
|
||||
|
||||
Contains platform-specific analytics handlers.
|
||||
"""
|
||||
|
||||
from .base_handler import BaseAnalyticsHandler
|
||||
from .gsc_handler import GSCAnalyticsHandler
|
||||
from .bing_handler import BingAnalyticsHandler
|
||||
from .wordpress_handler import WordPressAnalyticsHandler
|
||||
from .wix_handler import WixAnalyticsHandler
|
||||
|
||||
__all__ = [
|
||||
'BaseAnalyticsHandler',
|
||||
'GSCAnalyticsHandler',
|
||||
'BingAnalyticsHandler',
|
||||
'WordPressAnalyticsHandler',
|
||||
'WixAnalyticsHandler'
|
||||
]
|
||||
88
backend/services/analytics/handlers/base_handler.py
Normal file
88
backend/services/analytics/handlers/base_handler.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Base Analytics Handler
|
||||
|
||||
Abstract base class for platform-specific analytics handlers.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
|
||||
from ..models.analytics_data import AnalyticsData
|
||||
from ..models.platform_types import PlatformType
|
||||
|
||||
|
||||
class BaseAnalyticsHandler(ABC):
|
||||
"""Abstract base class for platform analytics handlers"""
|
||||
|
||||
def __init__(self, platform_type: PlatformType):
|
||||
self.platform_type = platform_type
|
||||
self.platform_name = platform_type.value
|
||||
|
||||
@abstractmethod
|
||||
async def get_analytics(self, user_id: str) -> AnalyticsData:
|
||||
"""
|
||||
Get analytics data for the platform
|
||||
|
||||
Args:
|
||||
user_id: User ID to get analytics for
|
||||
|
||||
Returns:
|
||||
AnalyticsData object with platform metrics
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_connection_status(self, user_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get connection status for the platform
|
||||
|
||||
Args:
|
||||
user_id: User ID to check connection for
|
||||
|
||||
Returns:
|
||||
Dictionary with connection status information
|
||||
"""
|
||||
pass
|
||||
|
||||
def create_error_response(self, error_message: str) -> AnalyticsData:
|
||||
"""Create a standardized error response"""
|
||||
return AnalyticsData(
|
||||
platform=self.platform_name,
|
||||
metrics={},
|
||||
date_range={'start': '', 'end': ''},
|
||||
last_updated=datetime.now().isoformat(),
|
||||
status='error',
|
||||
error_message=error_message
|
||||
)
|
||||
|
||||
def create_partial_response(self, metrics: Dict[str, Any], error_message: str = None) -> AnalyticsData:
|
||||
"""Create a standardized partial response"""
|
||||
return AnalyticsData(
|
||||
platform=self.platform_name,
|
||||
metrics=metrics,
|
||||
date_range={'start': '', 'end': ''},
|
||||
last_updated=datetime.now().isoformat(),
|
||||
status='partial',
|
||||
error_message=error_message
|
||||
)
|
||||
|
||||
def create_success_response(self, metrics: Dict[str, Any], date_range: Dict[str, str] = None) -> AnalyticsData:
|
||||
"""Create a standardized success response"""
|
||||
return AnalyticsData(
|
||||
platform=self.platform_name,
|
||||
metrics=metrics,
|
||||
date_range=date_range or {'start': '', 'end': ''},
|
||||
last_updated=datetime.now().isoformat(),
|
||||
status='success'
|
||||
)
|
||||
|
||||
def log_analytics_request(self, user_id: str, operation: str):
|
||||
"""Log analytics request for monitoring"""
|
||||
from loguru import logger
|
||||
logger.info(f"{self.platform_name} analytics: {operation} for user {user_id}")
|
||||
|
||||
def log_analytics_error(self, user_id: str, operation: str, error: Exception):
|
||||
"""Log analytics error for monitoring"""
|
||||
from loguru import logger
|
||||
logger.error(f"{self.platform_name} analytics: {operation} failed for user {user_id}: {error}")
|
||||
279
backend/services/analytics/handlers/bing_handler.py
Normal file
279
backend/services/analytics/handlers/bing_handler.py
Normal file
@@ -0,0 +1,279 @@
|
||||
"""
|
||||
Bing Webmaster Tools Analytics Handler
|
||||
|
||||
Handles Bing Webmaster Tools analytics data retrieval and processing.
|
||||
"""
|
||||
|
||||
import requests
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from loguru import logger
|
||||
|
||||
from services.integrations.bing_oauth import BingOAuthService
|
||||
from ...analytics_cache_service import analytics_cache
|
||||
from ..models.analytics_data import AnalyticsData
|
||||
from ..models.platform_types import PlatformType
|
||||
from .base_handler import BaseAnalyticsHandler
|
||||
from ..insights.bing_insights_service import BingInsightsService
|
||||
from services.bing_analytics_storage_service import BingAnalyticsStorageService
|
||||
import os
|
||||
|
||||
|
||||
class BingAnalyticsHandler(BaseAnalyticsHandler):
|
||||
"""Handler for Bing Webmaster Tools analytics"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(PlatformType.BING)
|
||||
self.bing_service = BingOAuthService()
|
||||
# Initialize insights service
|
||||
database_url = os.getenv('DATABASE_URL', 'sqlite:///./bing_analytics.db')
|
||||
self.insights_service = BingInsightsService(database_url)
|
||||
# Storage service used in onboarding step 5
|
||||
self.storage_service = BingAnalyticsStorageService(os.getenv('DATABASE_URL', 'sqlite:///alwrity.db'))
|
||||
|
||||
async def get_analytics(self, user_id: str) -> AnalyticsData:
|
||||
"""
|
||||
Get Bing Webmaster analytics data using Bing Webmaster API
|
||||
|
||||
Note: Bing Webmaster provides SEO insights and search performance data
|
||||
"""
|
||||
self.log_analytics_request(user_id, "get_analytics")
|
||||
|
||||
# Check cache first - this is an expensive operation
|
||||
cached_data = analytics_cache.get('bing_analytics', user_id)
|
||||
if cached_data:
|
||||
logger.info("Using cached Bing analytics for user {user_id}", user_id=user_id)
|
||||
return AnalyticsData(**cached_data)
|
||||
|
||||
logger.info("Fetching fresh Bing analytics for user {user_id} (expensive operation)", user_id=user_id)
|
||||
try:
|
||||
# Get user's Bing connection status with detailed token info
|
||||
token_status = self.bing_service.get_user_token_status(user_id)
|
||||
|
||||
if not token_status.get('has_active_tokens'):
|
||||
if token_status.get('has_expired_tokens'):
|
||||
return self.create_error_response('Bing Webmaster tokens expired - please reconnect')
|
||||
else:
|
||||
return self.create_error_response('Bing Webmaster not connected')
|
||||
|
||||
# Try once to fetch sites (may return empty if tokens are valid but no verified sites); do not block
|
||||
sites = self.bing_service.get_user_sites(user_id)
|
||||
|
||||
# Get active tokens for access token
|
||||
active_tokens = token_status.get('active_tokens', [])
|
||||
if not active_tokens:
|
||||
return self.create_error_response('No active Bing Webmaster tokens available')
|
||||
|
||||
# Get the first active token's access token
|
||||
token_info = active_tokens[0]
|
||||
access_token = token_info.get('access_token')
|
||||
|
||||
# Cache the sites for future use (even if empty)
|
||||
analytics_cache.set('bing_sites', user_id, sites or [], ttl_override=2*60*60)
|
||||
logger.info(f"Cached Bing sites for analytics for user {user_id} (TTL: 2 hours)")
|
||||
|
||||
if not access_token:
|
||||
return self.create_error_response('Bing Webmaster access token not available')
|
||||
|
||||
# Do NOT call live Bing APIs here; use stored analytics like step 5
|
||||
query_stats = {}
|
||||
try:
|
||||
# If sites available, use first; otherwise ask storage for any stored summary
|
||||
site_url_for_storage = sites[0].get('Url', '') if (sites and isinstance(sites[0], dict)) else None
|
||||
stored = self.storage_service.get_analytics_summary(user_id, site_url_for_storage, days=30)
|
||||
if stored and isinstance(stored, dict):
|
||||
query_stats = {
|
||||
'total_clicks': stored.get('summary', {}).get('total_clicks', 0),
|
||||
'total_impressions': stored.get('summary', {}).get('total_impressions', 0),
|
||||
'total_queries': stored.get('summary', {}).get('total_queries', 0),
|
||||
'avg_ctr': stored.get('summary', {}).get('total_ctr', 0),
|
||||
'avg_position': stored.get('summary', {}).get('avg_position', 0),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning(f"Bing analytics: Failed to read stored analytics summary: {e}")
|
||||
|
||||
# Get enhanced insights from database
|
||||
insights = self._get_enhanced_insights(user_id, sites[0].get('Url', '') if sites else '')
|
||||
|
||||
# Extract comprehensive site information with actual metrics
|
||||
metrics = {
|
||||
'connection_status': 'connected',
|
||||
'connected_sites': len(sites),
|
||||
'sites': sites[:5] if sites else [],
|
||||
'connected_since': token_info.get('created_at', ''),
|
||||
'scope': token_info.get('scope', ''),
|
||||
'total_clicks': query_stats.get('total_clicks', 0),
|
||||
'total_impressions': query_stats.get('total_impressions', 0),
|
||||
'total_queries': query_stats.get('total_queries', 0),
|
||||
'avg_ctr': query_stats.get('avg_ctr', 0),
|
||||
'avg_position': query_stats.get('avg_position', 0),
|
||||
'insights': insights,
|
||||
'note': 'Bing Webmaster API provides SEO insights, search performance, and index status data'
|
||||
}
|
||||
|
||||
# If no stored data or no sites, return partial like step 5, else success
|
||||
if (not sites) or (metrics.get('total_impressions', 0) == 0 and metrics.get('total_clicks', 0) == 0):
|
||||
result = self.create_partial_response(metrics=metrics, error_message='Connected to Bing; waiting for stored analytics or site verification')
|
||||
else:
|
||||
result = self.create_success_response(metrics=metrics)
|
||||
|
||||
# Cache the result to avoid expensive API calls
|
||||
analytics_cache.set('bing_analytics', user_id, result.__dict__)
|
||||
logger.info("Cached Bing analytics data for user {user_id}", user_id=user_id)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
self.log_analytics_error(user_id, "get_analytics", e)
|
||||
error_result = self.create_error_response(str(e))
|
||||
|
||||
# Cache error result for shorter time to retry sooner
|
||||
analytics_cache.set('bing_analytics', user_id, error_result.__dict__, ttl_override=300) # 5 minutes
|
||||
return error_result
|
||||
|
||||
def get_connection_status(self, user_id: str) -> Dict[str, Any]:
|
||||
"""Get Bing Webmaster connection status"""
|
||||
self.log_analytics_request(user_id, "get_connection_status")
|
||||
|
||||
try:
|
||||
bing_connection = self.bing_service.get_connection_status(user_id)
|
||||
return {
|
||||
'connected': bing_connection.get('connected', False),
|
||||
'sites_count': bing_connection.get('total_sites', 0),
|
||||
'sites': bing_connection.get('sites', []),
|
||||
'error': None
|
||||
}
|
||||
except Exception as e:
|
||||
self.log_analytics_error(user_id, "get_connection_status", e)
|
||||
return {
|
||||
'connected': False,
|
||||
'sites_count': 0,
|
||||
'sites': [],
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
def _extract_user_sites(self, sites_data: Any) -> list:
|
||||
"""Extract user sites from Bing API response"""
|
||||
if isinstance(sites_data, dict):
|
||||
if 'd' in sites_data:
|
||||
d_data = sites_data['d']
|
||||
if isinstance(d_data, dict) and 'results' in d_data:
|
||||
return d_data['results']
|
||||
elif isinstance(d_data, list):
|
||||
return d_data
|
||||
else:
|
||||
return []
|
||||
else:
|
||||
return []
|
||||
elif isinstance(sites_data, list):
|
||||
return sites_data
|
||||
else:
|
||||
return []
|
||||
|
||||
async def _get_query_stats(self, user_id: str, sites: list) -> Dict[str, Any]:
|
||||
"""Get query statistics for Bing sites"""
|
||||
query_stats = {}
|
||||
logger.info(f"Bing sites found: {len(sites)} sites")
|
||||
|
||||
if sites:
|
||||
first_site = sites[0]
|
||||
logger.info(f"First Bing site: {first_site}")
|
||||
# Bing API returns URL in 'Url' field (capital U)
|
||||
site_url = first_site.get('Url', '') if isinstance(first_site, dict) else str(first_site)
|
||||
logger.info(f"Extracted site URL: {site_url}")
|
||||
|
||||
if site_url:
|
||||
try:
|
||||
# Use the Bing service method to get query stats
|
||||
logger.info(f"Getting Bing query stats for site: {site_url}")
|
||||
query_data = self.bing_service.get_query_stats(
|
||||
user_id=user_id,
|
||||
site_url=site_url,
|
||||
start_date=(datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d'),
|
||||
end_date=datetime.now().strftime('%Y-%m-%d'),
|
||||
page=0
|
||||
)
|
||||
|
||||
if "error" not in query_data:
|
||||
logger.info(f"Bing query stats response structure: {type(query_data)}, keys: {list(query_data.keys()) if isinstance(query_data, dict) else 'Not a dict'}")
|
||||
logger.info(f"Bing query stats raw response: {query_data}")
|
||||
|
||||
# Handle different response structures from Bing API
|
||||
queries = self._extract_queries(query_data)
|
||||
|
||||
logger.info(f"Bing queries extracted: {len(queries)} queries")
|
||||
if queries and len(queries) > 0:
|
||||
logger.info(f"First query sample: {queries[0] if isinstance(queries[0], dict) else queries[0]}")
|
||||
|
||||
# Calculate summary metrics
|
||||
total_clicks = sum(query.get('Clicks', 0) for query in queries if isinstance(query, dict))
|
||||
total_impressions = sum(query.get('Impressions', 0) for query in queries if isinstance(query, dict))
|
||||
total_queries = len(queries)
|
||||
avg_ctr = (total_clicks / total_impressions * 100) if total_impressions > 0 else 0
|
||||
avg_position = sum(query.get('AvgClickPosition', 0) for query in queries if isinstance(query, dict)) / total_queries if total_queries > 0 else 0
|
||||
|
||||
query_stats = {
|
||||
'total_clicks': total_clicks,
|
||||
'total_impressions': total_impressions,
|
||||
'total_queries': total_queries,
|
||||
'avg_ctr': round(avg_ctr, 2),
|
||||
'avg_position': round(avg_position, 2)
|
||||
}
|
||||
|
||||
logger.info(f"Bing query stats calculated: {query_stats}")
|
||||
else:
|
||||
logger.warning(f"Bing query stats error: {query_data['error']}")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error getting Bing query stats: {e}")
|
||||
|
||||
return query_stats
|
||||
|
||||
def _extract_queries(self, query_data: Any) -> list:
|
||||
"""Extract queries from Bing API response"""
|
||||
if isinstance(query_data, dict):
|
||||
if 'd' in query_data:
|
||||
d_data = query_data['d']
|
||||
logger.info(f"Bing 'd' data structure: {type(d_data)}, keys: {list(d_data.keys()) if isinstance(d_data, dict) else 'Not a dict'}")
|
||||
if isinstance(d_data, dict) and 'results' in d_data:
|
||||
return d_data['results']
|
||||
elif isinstance(d_data, list):
|
||||
return d_data
|
||||
else:
|
||||
return []
|
||||
else:
|
||||
return []
|
||||
elif isinstance(query_data, list):
|
||||
return query_data
|
||||
else:
|
||||
return []
|
||||
|
||||
def _get_enhanced_insights(self, user_id: str, site_url: str) -> Dict[str, Any]:
|
||||
"""Get enhanced insights from stored Bing analytics data"""
|
||||
try:
|
||||
if not site_url:
|
||||
return {'status': 'no_site_url', 'message': 'No site URL available for insights'}
|
||||
|
||||
# Get performance insights
|
||||
performance_insights = self.insights_service.get_performance_insights(user_id, site_url, days=30)
|
||||
|
||||
# Get SEO insights
|
||||
seo_insights = self.insights_service.get_seo_insights(user_id, site_url, days=30)
|
||||
|
||||
# Get actionable recommendations
|
||||
recommendations = self.insights_service.get_actionable_recommendations(user_id, site_url, days=30)
|
||||
|
||||
return {
|
||||
'performance': performance_insights,
|
||||
'seo': seo_insights,
|
||||
'recommendations': recommendations,
|
||||
'last_analyzed': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error getting enhanced insights: {e}")
|
||||
return {
|
||||
'status': 'error',
|
||||
'message': f'Unable to generate insights: {str(e)}',
|
||||
'fallback': True
|
||||
}
|
||||
255
backend/services/analytics/handlers/gsc_handler.py
Normal file
255
backend/services/analytics/handlers/gsc_handler.py
Normal file
@@ -0,0 +1,255 @@
|
||||
"""
|
||||
Google Search Console Analytics Handler
|
||||
|
||||
Handles GSC analytics data retrieval and processing.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from loguru import logger
|
||||
|
||||
from services.gsc_service import GSCService
|
||||
from ...analytics_cache_service import analytics_cache
|
||||
from ..models.analytics_data import AnalyticsData
|
||||
from ..models.platform_types import PlatformType
|
||||
from .base_handler import BaseAnalyticsHandler
|
||||
|
||||
|
||||
class GSCAnalyticsHandler(BaseAnalyticsHandler):
|
||||
"""Handler for Google Search Console analytics"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(PlatformType.GSC)
|
||||
self.gsc_service = GSCService()
|
||||
|
||||
async def get_analytics(self, user_id: str) -> AnalyticsData:
|
||||
"""
|
||||
Get Google Search Console analytics data with caching
|
||||
|
||||
Returns comprehensive SEO metrics including clicks, impressions, CTR, and position data.
|
||||
"""
|
||||
self.log_analytics_request(user_id, "get_analytics")
|
||||
|
||||
# Check cache first - GSC API calls can be expensive
|
||||
cached_data = analytics_cache.get('gsc_analytics', user_id)
|
||||
if cached_data:
|
||||
logger.info("Using cached GSC analytics for user {user_id}", user_id=user_id)
|
||||
return AnalyticsData(**cached_data)
|
||||
|
||||
logger.info("Fetching fresh GSC analytics for user {user_id}", user_id=user_id)
|
||||
try:
|
||||
# Get user's sites
|
||||
sites = self.gsc_service.get_site_list(user_id)
|
||||
logger.info(f"GSC Sites found for user {user_id}: {sites}")
|
||||
if not sites:
|
||||
logger.warning(f"No GSC sites found for user {user_id}")
|
||||
return self.create_error_response('No GSC sites found')
|
||||
|
||||
# Get analytics for the first site (or combine all sites)
|
||||
site_url = sites[0]['siteUrl']
|
||||
logger.info(f"Using GSC site URL: {site_url}")
|
||||
|
||||
# Get search analytics for last 30 days
|
||||
end_date = datetime.now().strftime('%Y-%m-%d')
|
||||
start_date = (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')
|
||||
logger.info(f"GSC Date range: {start_date} to {end_date}")
|
||||
|
||||
search_analytics = self.gsc_service.get_search_analytics(
|
||||
user_id=user_id,
|
||||
site_url=site_url,
|
||||
start_date=start_date,
|
||||
end_date=end_date
|
||||
)
|
||||
logger.info(f"GSC Search analytics retrieved for user {user_id}")
|
||||
|
||||
# Process GSC data into standardized format
|
||||
processed_metrics = self._process_gsc_metrics(search_analytics)
|
||||
|
||||
result = self.create_success_response(
|
||||
metrics=processed_metrics,
|
||||
date_range={'start': start_date, 'end': end_date}
|
||||
)
|
||||
|
||||
# Cache the result to avoid expensive API calls
|
||||
analytics_cache.set('gsc_analytics', user_id, result.__dict__)
|
||||
logger.info("Cached GSC analytics data for user {user_id}", user_id=user_id)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
self.log_analytics_error(user_id, "get_analytics", e)
|
||||
error_result = self.create_error_response(str(e))
|
||||
|
||||
# Cache error result for shorter time to retry sooner
|
||||
analytics_cache.set('gsc_analytics', user_id, error_result.__dict__, ttl_override=300) # 5 minutes
|
||||
return error_result
|
||||
|
||||
def get_connection_status(self, user_id: str) -> Dict[str, Any]:
|
||||
"""Get GSC connection status"""
|
||||
self.log_analytics_request(user_id, "get_connection_status")
|
||||
|
||||
try:
|
||||
sites = self.gsc_service.get_site_list(user_id)
|
||||
return {
|
||||
'connected': len(sites) > 0,
|
||||
'sites_count': len(sites),
|
||||
'sites': sites[:3] if sites else [], # Show first 3 sites
|
||||
'error': None
|
||||
}
|
||||
except Exception as e:
|
||||
self.log_analytics_error(user_id, "get_connection_status", e)
|
||||
return {
|
||||
'connected': False,
|
||||
'sites_count': 0,
|
||||
'sites': [],
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
def _process_gsc_metrics(self, search_analytics: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Process GSC raw data into standardized metrics"""
|
||||
try:
|
||||
# Debug: Log the raw search analytics data structure
|
||||
logger.info(f"GSC Raw search analytics structure: {search_analytics}")
|
||||
logger.info(f"GSC Raw search analytics keys: {list(search_analytics.keys())}")
|
||||
|
||||
# Handle new data structure with overall_metrics and query_data
|
||||
if 'overall_metrics' in search_analytics:
|
||||
# New structure from updated GSC service
|
||||
overall_rows = search_analytics.get('overall_metrics', {}).get('rows', [])
|
||||
query_rows = search_analytics.get('query_data', {}).get('rows', [])
|
||||
verification_rows = search_analytics.get('verification_data', {}).get('rows', [])
|
||||
|
||||
logger.info(f"GSC Overall metrics rows: {len(overall_rows)}")
|
||||
logger.info(f"GSC Query data rows: {len(query_rows)}")
|
||||
logger.info(f"GSC Verification rows: {len(verification_rows)}")
|
||||
|
||||
if overall_rows:
|
||||
logger.info(f"GSC Overall first row: {overall_rows[0]}")
|
||||
if query_rows:
|
||||
logger.info(f"GSC Query first row: {query_rows[0]}")
|
||||
|
||||
# Use query_rows for detailed insights, overall_rows for summary
|
||||
rows = query_rows if query_rows else overall_rows
|
||||
else:
|
||||
# Legacy structure
|
||||
rows = search_analytics.get('rows', [])
|
||||
logger.info(f"GSC Legacy rows count: {len(rows)}")
|
||||
if rows:
|
||||
logger.info(f"GSC Legacy first row structure: {rows[0]}")
|
||||
logger.info(f"GSC Legacy first row keys: {list(rows[0].keys()) if rows[0] else 'No rows'}")
|
||||
|
||||
# Calculate summary metrics - handle different response formats
|
||||
total_clicks = 0
|
||||
total_impressions = 0
|
||||
total_position = 0
|
||||
valid_rows = 0
|
||||
|
||||
for row in rows:
|
||||
# Handle different possible response formats
|
||||
clicks = row.get('clicks', 0)
|
||||
impressions = row.get('impressions', 0)
|
||||
position = row.get('position', 0)
|
||||
|
||||
# If position is 0 or None, skip it from average calculation
|
||||
if position and position > 0:
|
||||
total_position += position
|
||||
valid_rows += 1
|
||||
|
||||
total_clicks += clicks
|
||||
total_impressions += impressions
|
||||
|
||||
avg_ctr = (total_clicks / total_impressions * 100) if total_impressions > 0 else 0
|
||||
avg_position = total_position / valid_rows if valid_rows > 0 else 0
|
||||
|
||||
logger.info(f"GSC Calculated metrics - clicks: {total_clicks}, impressions: {total_impressions}, ctr: {avg_ctr}, position: {avg_position}, valid_rows: {valid_rows}")
|
||||
|
||||
# Get top performing queries - handle different data structures
|
||||
if rows and 'keys' in rows[0]:
|
||||
# New GSC API format with keys array
|
||||
top_queries = sorted(rows, key=lambda x: x.get('clicks', 0), reverse=True)[:10]
|
||||
|
||||
# Get top performing pages (if we have page data)
|
||||
page_data = {}
|
||||
for row in rows:
|
||||
# Handle different key structures
|
||||
keys = row.get('keys', [])
|
||||
if len(keys) > 1 and keys[1]: # Page data available
|
||||
page = keys[1].get('keys', ['Unknown'])[0] if isinstance(keys[1], dict) else str(keys[1])
|
||||
else:
|
||||
page = 'Unknown'
|
||||
|
||||
if page not in page_data:
|
||||
page_data[page] = {'clicks': 0, 'impressions': 0, 'ctr': 0, 'position': 0}
|
||||
page_data[page]['clicks'] += row.get('clicks', 0)
|
||||
page_data[page]['impressions'] += row.get('impressions', 0)
|
||||
else:
|
||||
# Legacy format or no keys structure
|
||||
top_queries = sorted(rows, key=lambda x: x.get('clicks', 0), reverse=True)[:10]
|
||||
page_data = {}
|
||||
|
||||
# Calculate page metrics
|
||||
for page in page_data:
|
||||
if page_data[page]['impressions'] > 0:
|
||||
page_data[page]['ctr'] = page_data[page]['clicks'] / page_data[page]['impressions'] * 100
|
||||
|
||||
top_pages = sorted(page_data.items(), key=lambda x: x[1]['clicks'], reverse=True)[:10]
|
||||
|
||||
return {
|
||||
'connection_status': 'connected',
|
||||
'connected_sites': 1, # GSC typically has one site per user
|
||||
'total_clicks': total_clicks,
|
||||
'total_impressions': total_impressions,
|
||||
'avg_ctr': round(avg_ctr, 2),
|
||||
'avg_position': round(avg_position, 2),
|
||||
'total_queries': len(rows),
|
||||
'top_queries': [
|
||||
{
|
||||
'query': self._extract_query_from_row(row),
|
||||
'clicks': row.get('clicks', 0),
|
||||
'impressions': row.get('impressions', 0),
|
||||
'ctr': round(row.get('ctr', 0) * 100, 2),
|
||||
'position': round(row.get('position', 0), 2)
|
||||
}
|
||||
for row in top_queries
|
||||
],
|
||||
'top_pages': [
|
||||
{
|
||||
'page': page,
|
||||
'clicks': data['clicks'],
|
||||
'impressions': data['impressions'],
|
||||
'ctr': round(data['ctr'], 2)
|
||||
}
|
||||
for page, data in top_pages
|
||||
],
|
||||
'note': 'Google Search Console provides search performance data, keyword rankings, and SEO insights'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing GSC metrics: {e}")
|
||||
return {
|
||||
'connection_status': 'error',
|
||||
'connected_sites': 0,
|
||||
'total_clicks': 0,
|
||||
'total_impressions': 0,
|
||||
'avg_ctr': 0,
|
||||
'avg_position': 0,
|
||||
'total_queries': 0,
|
||||
'top_queries': [],
|
||||
'top_pages': [],
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
def _extract_query_from_row(self, row: Dict[str, Any]) -> str:
|
||||
"""Extract query text from GSC API row data"""
|
||||
try:
|
||||
keys = row.get('keys', [])
|
||||
if keys and len(keys) > 0:
|
||||
first_key = keys[0]
|
||||
if isinstance(first_key, dict):
|
||||
return first_key.get('keys', ['Unknown'])[0]
|
||||
else:
|
||||
return str(first_key)
|
||||
return 'Unknown'
|
||||
except Exception as e:
|
||||
logger.error(f"Error extracting query from row: {e}")
|
||||
return 'Unknown'
|
||||
71
backend/services/analytics/handlers/wix_handler.py
Normal file
71
backend/services/analytics/handlers/wix_handler.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""
|
||||
Wix Analytics Handler
|
||||
|
||||
Handles Wix analytics data retrieval and processing.
|
||||
Note: This is currently a placeholder implementation.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from loguru import logger
|
||||
|
||||
from services.wix_service import WixService
|
||||
from ..models.analytics_data import AnalyticsData
|
||||
from ..models.platform_types import PlatformType
|
||||
from .base_handler import BaseAnalyticsHandler
|
||||
|
||||
|
||||
class WixAnalyticsHandler(BaseAnalyticsHandler):
|
||||
"""Handler for Wix analytics"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(PlatformType.WIX)
|
||||
self.wix_service = WixService()
|
||||
|
||||
async def get_analytics(self, user_id: str) -> AnalyticsData:
|
||||
"""
|
||||
Get Wix analytics data using the Business Management API
|
||||
|
||||
Note: This requires the Wix Business Management API which may need additional permissions
|
||||
"""
|
||||
self.log_analytics_request(user_id, "get_analytics")
|
||||
|
||||
try:
|
||||
# TODO: Implement Wix analytics retrieval
|
||||
# This would require:
|
||||
# 1. Storing Wix access tokens in database
|
||||
# 2. Using Wix Business Management API
|
||||
# 3. Requesting analytics permissions during OAuth
|
||||
|
||||
# For now, return a placeholder response
|
||||
return self.create_partial_response(
|
||||
metrics={
|
||||
'connection_status': 'not_implemented',
|
||||
'connected_sites': 0,
|
||||
'page_views': 0,
|
||||
'visitors': 0,
|
||||
'bounce_rate': 0,
|
||||
'avg_session_duration': 0,
|
||||
'top_pages': [],
|
||||
'traffic_sources': {},
|
||||
'device_breakdown': {},
|
||||
'geo_distribution': {},
|
||||
'note': 'Wix analytics integration coming soon'
|
||||
},
|
||||
error_message='Wix analytics integration coming soon'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.log_analytics_error(user_id, "get_analytics", e)
|
||||
return self.create_error_response(str(e))
|
||||
|
||||
def get_connection_status(self, user_id: str) -> Dict[str, Any]:
|
||||
"""Get Wix connection status"""
|
||||
self.log_analytics_request(user_id, "get_connection_status")
|
||||
|
||||
# TODO: Implement actual Wix connection check
|
||||
return {
|
||||
'connected': False, # TODO: Implement actual Wix connection check
|
||||
'sites_count': 0,
|
||||
'sites': [],
|
||||
'error': 'Wix connection check not implemented'
|
||||
}
|
||||
119
backend/services/analytics/handlers/wordpress_handler.py
Normal file
119
backend/services/analytics/handlers/wordpress_handler.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""
|
||||
WordPress.com Analytics Handler
|
||||
|
||||
Handles WordPress.com analytics data retrieval and processing.
|
||||
"""
|
||||
|
||||
import requests
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
from loguru import logger
|
||||
|
||||
from services.integrations.wordpress_oauth import WordPressOAuthService
|
||||
from ..models.analytics_data import AnalyticsData
|
||||
from ..models.platform_types import PlatformType
|
||||
from .base_handler import BaseAnalyticsHandler
|
||||
|
||||
|
||||
class WordPressAnalyticsHandler(BaseAnalyticsHandler):
|
||||
"""Handler for WordPress.com analytics"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(PlatformType.WORDPRESS)
|
||||
self.wordpress_service = WordPressOAuthService()
|
||||
|
||||
async def get_analytics(self, user_id: str) -> AnalyticsData:
|
||||
"""
|
||||
Get WordPress analytics data using WordPress.com REST API
|
||||
|
||||
Note: WordPress.com has limited analytics API access
|
||||
We'll try to get basic site stats and post data
|
||||
"""
|
||||
self.log_analytics_request(user_id, "get_analytics")
|
||||
|
||||
try:
|
||||
# Get user's WordPress tokens
|
||||
connection_status = self.wordpress_service.get_connection_status(user_id)
|
||||
|
||||
if not connection_status.get('connected'):
|
||||
return self.create_error_response('WordPress not connected')
|
||||
|
||||
# Get the first connected site
|
||||
sites = connection_status.get('sites', [])
|
||||
if not sites:
|
||||
return self.create_error_response('No WordPress sites found')
|
||||
|
||||
site = sites[0]
|
||||
access_token = site.get('access_token')
|
||||
blog_id = site.get('blog_id')
|
||||
|
||||
if not access_token or not blog_id:
|
||||
return self.create_error_response('WordPress access token not available')
|
||||
|
||||
# Try to get basic site stats from WordPress.com API
|
||||
headers = {
|
||||
'Authorization': f'Bearer {access_token}',
|
||||
'User-Agent': 'ALwrity/1.0'
|
||||
}
|
||||
|
||||
# Get site info and basic stats
|
||||
site_info_url = f"https://public-api.wordpress.com/rest/v1.1/sites/{blog_id}"
|
||||
response = requests.get(site_info_url, headers=headers, timeout=10)
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.warning(f"WordPress API call failed: {response.status_code}")
|
||||
# Return basic connection info instead of full analytics
|
||||
return self.create_partial_response(
|
||||
metrics={
|
||||
'site_name': site.get('blog_url', 'Unknown'),
|
||||
'connection_status': 'connected',
|
||||
'blog_id': blog_id,
|
||||
'connected_since': site.get('created_at', ''),
|
||||
'note': 'WordPress.com API has limited analytics access'
|
||||
},
|
||||
error_message='WordPress.com API has limited analytics access'
|
||||
)
|
||||
|
||||
site_data = response.json()
|
||||
|
||||
# Extract basic site information
|
||||
metrics = {
|
||||
'site_name': site_data.get('name', 'Unknown'),
|
||||
'site_url': site_data.get('URL', ''),
|
||||
'blog_id': blog_id,
|
||||
'language': site_data.get('lang', ''),
|
||||
'timezone': site_data.get('timezone', ''),
|
||||
'is_private': site_data.get('is_private', False),
|
||||
'is_coming_soon': site_data.get('is_coming_soon', False),
|
||||
'connected_since': site.get('created_at', ''),
|
||||
'connection_status': 'connected',
|
||||
'connected_sites': len(sites),
|
||||
'note': 'WordPress.com API has limited analytics access. For detailed analytics, consider integrating with Google Analytics or Jetpack Stats.'
|
||||
}
|
||||
|
||||
return self.create_success_response(metrics=metrics)
|
||||
|
||||
except Exception as e:
|
||||
self.log_analytics_error(user_id, "get_analytics", e)
|
||||
return self.create_error_response(str(e))
|
||||
|
||||
def get_connection_status(self, user_id: str) -> Dict[str, Any]:
|
||||
"""Get WordPress.com connection status"""
|
||||
self.log_analytics_request(user_id, "get_connection_status")
|
||||
|
||||
try:
|
||||
wp_connection = self.wordpress_service.get_connection_status(user_id)
|
||||
return {
|
||||
'connected': wp_connection.get('connected', False),
|
||||
'sites_count': wp_connection.get('total_sites', 0),
|
||||
'sites': wp_connection.get('sites', []),
|
||||
'error': None
|
||||
}
|
||||
except Exception as e:
|
||||
self.log_analytics_error(user_id, "get_connection_status", e)
|
||||
return {
|
||||
'connected': False,
|
||||
'sites_count': 0,
|
||||
'sites': [],
|
||||
'error': str(e)
|
||||
}
|
||||
11
backend/services/analytics/insights/__init__.py
Normal file
11
backend/services/analytics/insights/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Analytics Insights Package
|
||||
|
||||
Advanced insights and recommendations for analytics data.
|
||||
"""
|
||||
|
||||
from .bing_insights_service import BingInsightsService
|
||||
|
||||
__all__ = [
|
||||
'BingInsightsService'
|
||||
]
|
||||
1038
backend/services/analytics/insights/bing_insights_service.py
Normal file
1038
backend/services/analytics/insights/bing_insights_service.py
Normal file
File diff suppressed because it is too large
Load Diff
15
backend/services/analytics/models/__init__.py
Normal file
15
backend/services/analytics/models/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Analytics Models Package
|
||||
|
||||
Contains data models and type definitions for the analytics system.
|
||||
"""
|
||||
|
||||
from .analytics_data import AnalyticsData
|
||||
from .platform_types import PlatformType, AnalyticsStatus, PlatformConnectionStatus
|
||||
|
||||
__all__ = [
|
||||
'AnalyticsData',
|
||||
'PlatformType',
|
||||
'AnalyticsStatus',
|
||||
'PlatformConnectionStatus'
|
||||
]
|
||||
51
backend/services/analytics/models/analytics_data.py
Normal file
51
backend/services/analytics/models/analytics_data.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
Analytics Data Models
|
||||
|
||||
Core data structures for analytics data across all platforms.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsData:
|
||||
"""Standardized analytics data structure for all platforms"""
|
||||
platform: str
|
||||
metrics: Dict[str, Any]
|
||||
date_range: Dict[str, str]
|
||||
last_updated: str
|
||||
status: str # 'success', 'error', 'partial'
|
||||
error_message: Optional[str] = None
|
||||
|
||||
def is_successful(self) -> bool:
|
||||
"""Check if the analytics data was successfully retrieved"""
|
||||
return self.status == 'success'
|
||||
|
||||
def is_partial(self) -> bool:
|
||||
"""Check if the analytics data is partially available"""
|
||||
return self.status == 'partial'
|
||||
|
||||
def has_error(self) -> bool:
|
||||
"""Check if there was an error retrieving analytics data"""
|
||||
return self.status == 'error'
|
||||
|
||||
def get_metric(self, key: str, default: Any = None) -> Any:
|
||||
"""Get a specific metric value with fallback"""
|
||||
return self.metrics.get(key, default)
|
||||
|
||||
def get_total_clicks(self) -> int:
|
||||
"""Get total clicks across all platforms"""
|
||||
return self.get_metric('total_clicks', 0)
|
||||
|
||||
def get_total_impressions(self) -> int:
|
||||
"""Get total impressions across all platforms"""
|
||||
return self.get_metric('total_impressions', 0)
|
||||
|
||||
def get_avg_ctr(self) -> float:
|
||||
"""Get average click-through rate"""
|
||||
return self.get_metric('avg_ctr', 0.0)
|
||||
|
||||
def get_avg_position(self) -> float:
|
||||
"""Get average position in search results"""
|
||||
return self.get_metric('avg_position', 0.0)
|
||||
85
backend/services/analytics/models/platform_types.py
Normal file
85
backend/services/analytics/models/platform_types.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Platform Types and Enums
|
||||
|
||||
Type definitions and constants for platform analytics.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Dict, Any, List, Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
class PlatformType(Enum):
|
||||
"""Supported analytics platforms"""
|
||||
GSC = "gsc"
|
||||
BING = "bing"
|
||||
WORDPRESS = "wordpress"
|
||||
WIX = "wix"
|
||||
|
||||
|
||||
class AnalyticsStatus(Enum):
|
||||
"""Analytics data retrieval status"""
|
||||
SUCCESS = "success"
|
||||
ERROR = "error"
|
||||
PARTIAL = "partial"
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlatformConnectionStatus:
|
||||
"""Platform connection status information"""
|
||||
connected: bool
|
||||
sites_count: int
|
||||
sites: List[Dict[str, Any]]
|
||||
error: Optional[str] = None
|
||||
|
||||
def has_sites(self) -> bool:
|
||||
"""Check if platform has connected sites"""
|
||||
return self.sites_count > 0
|
||||
|
||||
def get_first_site(self) -> Optional[Dict[str, Any]]:
|
||||
"""Get the first connected site"""
|
||||
return self.sites[0] if self.sites else None
|
||||
|
||||
|
||||
# Platform configuration constants
|
||||
PLATFORM_CONFIG = {
|
||||
PlatformType.GSC: {
|
||||
"name": "Google Search Console",
|
||||
"description": "SEO performance and search analytics",
|
||||
"api_endpoint": "https://www.googleapis.com/webmasters/v3/sites",
|
||||
"cache_ttl": 3600, # 1 hour
|
||||
},
|
||||
PlatformType.BING: {
|
||||
"name": "Bing Webmaster Tools",
|
||||
"description": "Search performance and SEO insights",
|
||||
"api_endpoint": "https://ssl.bing.com/webmaster/api.svc/json",
|
||||
"cache_ttl": 3600, # 1 hour
|
||||
},
|
||||
PlatformType.WORDPRESS: {
|
||||
"name": "WordPress.com",
|
||||
"description": "Content management and site analytics",
|
||||
"api_endpoint": "https://public-api.wordpress.com/rest/v1.1",
|
||||
"cache_ttl": 1800, # 30 minutes
|
||||
},
|
||||
PlatformType.WIX: {
|
||||
"name": "Wix",
|
||||
"description": "Website builder and analytics",
|
||||
"api_endpoint": "https://www.wix.com/_api/wix-business-accounts",
|
||||
"cache_ttl": 1800, # 30 minutes
|
||||
}
|
||||
}
|
||||
|
||||
# Default platforms to include in comprehensive analytics
|
||||
DEFAULT_PLATFORMS = [PlatformType.GSC, PlatformType.BING, PlatformType.WORDPRESS, PlatformType.WIX]
|
||||
|
||||
# Metrics that are common across platforms
|
||||
COMMON_METRICS = [
|
||||
'total_clicks',
|
||||
'total_impressions',
|
||||
'avg_ctr',
|
||||
'avg_position',
|
||||
'total_queries',
|
||||
'connection_status',
|
||||
'connected_sites',
|
||||
'last_updated'
|
||||
]
|
||||
166
backend/services/analytics/platform_analytics_service.py
Normal file
166
backend/services/analytics/platform_analytics_service.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""
|
||||
Platform Analytics Service (Refactored)
|
||||
|
||||
Streamlined orchestrator service for platform analytics with modular architecture.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List, Optional
|
||||
from loguru import logger
|
||||
|
||||
from .models.analytics_data import AnalyticsData
|
||||
from .models.platform_types import PlatformType, DEFAULT_PLATFORMS
|
||||
from .handlers import (
|
||||
GSCAnalyticsHandler,
|
||||
BingAnalyticsHandler,
|
||||
WordPressAnalyticsHandler,
|
||||
WixAnalyticsHandler
|
||||
)
|
||||
from .connection_manager import PlatformConnectionManager
|
||||
from .summary_generator import AnalyticsSummaryGenerator
|
||||
from .cache_manager import AnalyticsCacheManager
|
||||
|
||||
|
||||
class PlatformAnalyticsService:
|
||||
"""
|
||||
Streamlined service for retrieving analytics data from connected platforms.
|
||||
|
||||
This service orchestrates platform handlers, manages caching, and provides
|
||||
comprehensive analytics summaries.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# Initialize platform handlers
|
||||
self.handlers = {
|
||||
PlatformType.GSC: GSCAnalyticsHandler(),
|
||||
PlatformType.BING: BingAnalyticsHandler(),
|
||||
PlatformType.WORDPRESS: WordPressAnalyticsHandler(),
|
||||
PlatformType.WIX: WixAnalyticsHandler()
|
||||
}
|
||||
|
||||
# Initialize managers
|
||||
self.connection_manager = PlatformConnectionManager()
|
||||
self.summary_generator = AnalyticsSummaryGenerator()
|
||||
self.cache_manager = AnalyticsCacheManager()
|
||||
|
||||
async def get_comprehensive_analytics(self, user_id: str, platforms: List[str] = None) -> Dict[str, AnalyticsData]:
|
||||
"""
|
||||
Get analytics data from all connected platforms
|
||||
|
||||
Args:
|
||||
user_id: User ID to get analytics for
|
||||
platforms: List of platforms to get data from (None = all available)
|
||||
|
||||
Returns:
|
||||
Dictionary of platform analytics data
|
||||
"""
|
||||
if platforms is None:
|
||||
platforms = [p.value for p in DEFAULT_PLATFORMS]
|
||||
|
||||
logger.info(f"Getting comprehensive analytics for user {user_id}, platforms: {platforms}")
|
||||
analytics_data = {}
|
||||
|
||||
for platform_name in platforms:
|
||||
try:
|
||||
# Convert string to PlatformType enum
|
||||
platform_type = PlatformType(platform_name)
|
||||
handler = self.handlers.get(platform_type)
|
||||
|
||||
if handler:
|
||||
analytics_data[platform_name] = await handler.get_analytics(user_id)
|
||||
else:
|
||||
logger.warning(f"Unknown platform: {platform_name}")
|
||||
analytics_data[platform_name] = self._create_error_response(platform_name, f"Unknown platform: {platform_name}")
|
||||
|
||||
except ValueError:
|
||||
logger.warning(f"Invalid platform name: {platform_name}")
|
||||
analytics_data[platform_name] = self._create_error_response(platform_name, f"Invalid platform name: {platform_name}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get analytics for {platform_name}: {e}")
|
||||
analytics_data[platform_name] = self._create_error_response(platform_name, str(e))
|
||||
|
||||
return analytics_data
|
||||
|
||||
async def get_platform_connection_status(self, user_id: str) -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
Check connection status for all platforms
|
||||
|
||||
Returns:
|
||||
Dictionary with connection status for each platform
|
||||
"""
|
||||
return await self.connection_manager.get_platform_connection_status(user_id)
|
||||
|
||||
def get_analytics_summary(self, analytics_data: Dict[str, AnalyticsData]) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate a summary of analytics data across all platforms
|
||||
|
||||
Args:
|
||||
analytics_data: Dictionary of platform analytics data
|
||||
|
||||
Returns:
|
||||
Summary statistics and insights
|
||||
"""
|
||||
return self.summary_generator.get_analytics_summary(analytics_data)
|
||||
|
||||
def get_platform_comparison(self, analytics_data: Dict[str, AnalyticsData]) -> Dict[str, Any]:
|
||||
"""Generate platform comparison metrics"""
|
||||
return self.summary_generator.get_platform_comparison(analytics_data)
|
||||
|
||||
def get_trend_analysis(self, analytics_data: Dict[str, AnalyticsData]) -> Dict[str, Any]:
|
||||
"""Generate trend analysis (placeholder for future implementation)"""
|
||||
return self.summary_generator.get_trend_analysis(analytics_data)
|
||||
|
||||
def invalidate_platform_cache(self, user_id: str, platform: str = None):
|
||||
"""
|
||||
Invalidate cache for platform connections and analytics
|
||||
|
||||
Args:
|
||||
user_id: User ID to invalidate cache for
|
||||
platform: Specific platform to invalidate (optional, invalidates all if None)
|
||||
"""
|
||||
if platform:
|
||||
try:
|
||||
platform_type = PlatformType(platform)
|
||||
self.cache_manager.invalidate_platform_cache(platform_type, user_id)
|
||||
logger.info(f"Invalidated {platform} cache for user {user_id}")
|
||||
except ValueError:
|
||||
logger.warning(f"Invalid platform name for cache invalidation: {platform}")
|
||||
else:
|
||||
self.cache_manager.invalidate_user_cache(user_id)
|
||||
logger.info(f"Invalidated all platform caches for user {user_id}")
|
||||
|
||||
def invalidate_connection_cache(self, user_id: str):
|
||||
"""Invalidate platform connection status cache"""
|
||||
self.cache_manager.invalidate_platform_status_cache(user_id)
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics"""
|
||||
return self.cache_manager.get_cache_stats()
|
||||
|
||||
def clear_all_cache(self):
|
||||
"""Clear all analytics cache"""
|
||||
self.cache_manager.clear_all_cache()
|
||||
|
||||
def get_supported_platforms(self) -> List[str]:
|
||||
"""Get list of supported platforms"""
|
||||
return [p.value for p in PlatformType]
|
||||
|
||||
def get_platform_handler(self, platform: str) -> Optional[Any]:
|
||||
"""Get handler for a specific platform"""
|
||||
try:
|
||||
platform_type = PlatformType(platform)
|
||||
return self.handlers.get(platform_type)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def _create_error_response(self, platform_name: str, error_message: str) -> AnalyticsData:
|
||||
"""Create a standardized error response"""
|
||||
from datetime import datetime
|
||||
|
||||
return AnalyticsData(
|
||||
platform=platform_name,
|
||||
metrics={},
|
||||
date_range={'start': '', 'end': ''},
|
||||
last_updated=datetime.now().isoformat(),
|
||||
status='error',
|
||||
error_message=error_message
|
||||
)
|
||||
215
backend/services/analytics/summary_generator.py
Normal file
215
backend/services/analytics/summary_generator.py
Normal file
@@ -0,0 +1,215 @@
|
||||
"""
|
||||
Analytics Summary Generator
|
||||
|
||||
Generates comprehensive summaries and aggregations of analytics data across platforms.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
from loguru import logger
|
||||
|
||||
from .models.analytics_data import AnalyticsData
|
||||
from .models.platform_types import PlatformType
|
||||
|
||||
|
||||
class AnalyticsSummaryGenerator:
|
||||
"""Generates analytics summaries and insights"""
|
||||
|
||||
def __init__(self):
|
||||
self.supported_metrics = [
|
||||
'total_clicks',
|
||||
'total_impressions',
|
||||
'avg_ctr',
|
||||
'avg_position',
|
||||
'total_queries',
|
||||
'connected_sites'
|
||||
]
|
||||
|
||||
def get_analytics_summary(self, analytics_data: Dict[str, AnalyticsData]) -> Dict[str, Any]:
|
||||
"""
|
||||
Generate a summary of analytics data across all platforms
|
||||
|
||||
Args:
|
||||
analytics_data: Dictionary of platform analytics data
|
||||
|
||||
Returns:
|
||||
Summary statistics and insights
|
||||
"""
|
||||
summary = {
|
||||
'total_platforms': len(analytics_data),
|
||||
'connected_platforms': 0,
|
||||
'successful_data': 0,
|
||||
'partial_data': 0,
|
||||
'failed_data': 0,
|
||||
'total_clicks': 0,
|
||||
'total_impressions': 0,
|
||||
'total_queries': 0,
|
||||
'total_sites': 0,
|
||||
'platforms': {},
|
||||
'insights': [],
|
||||
'last_updated': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Process each platform's data
|
||||
for platform_name, data in analytics_data.items():
|
||||
platform_summary = self._process_platform_data(platform_name, data)
|
||||
summary['platforms'][platform_name] = platform_summary
|
||||
|
||||
# Aggregate counts
|
||||
if data.status == 'success':
|
||||
summary['connected_platforms'] += 1
|
||||
summary['successful_data'] += 1
|
||||
elif data.status == 'partial':
|
||||
summary['partial_data'] += 1
|
||||
else:
|
||||
summary['failed_data'] += 1
|
||||
|
||||
# Aggregate metrics if successful
|
||||
if data.is_successful():
|
||||
summary['total_clicks'] += data.get_total_clicks()
|
||||
summary['total_impressions'] += data.get_total_impressions()
|
||||
summary['total_queries'] += data.get_metric('total_queries', 0)
|
||||
summary['total_sites'] += data.get_metric('connected_sites', 0)
|
||||
|
||||
# Calculate derived metrics
|
||||
summary['overall_ctr'] = self._calculate_ctr(summary['total_clicks'], summary['total_impressions'])
|
||||
summary['avg_position'] = self._calculate_avg_position(analytics_data)
|
||||
summary['insights'] = self._generate_insights(summary, analytics_data)
|
||||
|
||||
return summary
|
||||
|
||||
def _process_platform_data(self, platform_name: str, data: AnalyticsData) -> Dict[str, Any]:
|
||||
"""Process individual platform data for summary"""
|
||||
platform_summary = {
|
||||
'status': data.status,
|
||||
'last_updated': data.last_updated,
|
||||
'metrics_count': len(data.metrics),
|
||||
'has_data': data.is_successful() or data.is_partial()
|
||||
}
|
||||
|
||||
if data.has_error():
|
||||
platform_summary['error'] = data.error_message
|
||||
|
||||
if data.is_successful():
|
||||
# Add key metrics for successful platforms
|
||||
platform_summary.update({
|
||||
'clicks': data.get_total_clicks(),
|
||||
'impressions': data.get_total_impressions(),
|
||||
'ctr': data.get_avg_ctr(),
|
||||
'position': data.get_avg_position(),
|
||||
'queries': data.get_metric('total_queries', 0),
|
||||
'sites': data.get_metric('connected_sites', 0)
|
||||
})
|
||||
|
||||
return platform_summary
|
||||
|
||||
def _calculate_ctr(self, total_clicks: int, total_impressions: int) -> float:
|
||||
"""Calculate overall click-through rate"""
|
||||
if total_impressions > 0:
|
||||
return round(total_clicks / total_impressions * 100, 2)
|
||||
return 0.0
|
||||
|
||||
def _calculate_avg_position(self, analytics_data: Dict[str, AnalyticsData]) -> float:
|
||||
"""Calculate average position across all platforms"""
|
||||
total_position = 0
|
||||
platform_count = 0
|
||||
|
||||
for data in analytics_data.values():
|
||||
if data.is_successful():
|
||||
position = data.get_avg_position()
|
||||
if position > 0:
|
||||
total_position += position
|
||||
platform_count += 1
|
||||
|
||||
if platform_count > 0:
|
||||
return round(total_position / platform_count, 2)
|
||||
return 0.0
|
||||
|
||||
def _generate_insights(self, summary: Dict[str, Any], analytics_data: Dict[str, AnalyticsData]) -> List[str]:
|
||||
"""Generate actionable insights from analytics data"""
|
||||
insights = []
|
||||
|
||||
# Connection insights
|
||||
if summary['connected_platforms'] == 0:
|
||||
insights.append("No platforms are currently connected. Connect platforms to start collecting analytics data.")
|
||||
elif summary['connected_platforms'] < summary['total_platforms']:
|
||||
insights.append(f"Only {summary['connected_platforms']} of {summary['total_platforms']} platforms are connected.")
|
||||
|
||||
# Performance insights
|
||||
if summary['total_clicks'] > 0:
|
||||
insights.append(f"Total traffic across all platforms: {summary['total_clicks']:,} clicks from {summary['total_impressions']:,} impressions.")
|
||||
|
||||
if summary['overall_ctr'] < 2.0:
|
||||
insights.append("Overall CTR is below 2%. Consider optimizing titles and descriptions for better click-through rates.")
|
||||
elif summary['overall_ctr'] > 5.0:
|
||||
insights.append("Excellent CTR performance! Your content is highly engaging.")
|
||||
|
||||
# Platform-specific insights
|
||||
for platform_name, data in analytics_data.items():
|
||||
if data.is_successful():
|
||||
if data.get_avg_position() > 10:
|
||||
insights.append(f"{platform_name.title()} average position is {data.get_avg_position()}. Consider SEO optimization.")
|
||||
elif data.get_avg_position() < 5:
|
||||
insights.append(f"Great {platform_name.title()} performance! Average position is {data.get_avg_position()}.")
|
||||
|
||||
# Data freshness insights
|
||||
for platform_name, data in analytics_data.items():
|
||||
if data.is_successful():
|
||||
try:
|
||||
last_updated = datetime.fromisoformat(data.last_updated.replace('Z', '+00:00'))
|
||||
hours_old = (datetime.now().replace(tzinfo=last_updated.tzinfo) - last_updated).total_seconds() / 3600
|
||||
|
||||
if hours_old > 24:
|
||||
insights.append(f"{platform_name.title()} data is {hours_old:.1f} hours old. Consider refreshing for latest insights.")
|
||||
except:
|
||||
pass
|
||||
|
||||
return insights
|
||||
|
||||
def get_platform_comparison(self, analytics_data: Dict[str, AnalyticsData]) -> Dict[str, Any]:
|
||||
"""Generate platform comparison metrics"""
|
||||
comparison = {
|
||||
'platforms': {},
|
||||
'top_performer': None,
|
||||
'needs_attention': []
|
||||
}
|
||||
|
||||
max_clicks = 0
|
||||
top_platform = None
|
||||
|
||||
for platform_name, data in analytics_data.items():
|
||||
if data.is_successful():
|
||||
platform_metrics = {
|
||||
'clicks': data.get_total_clicks(),
|
||||
'impressions': data.get_total_impressions(),
|
||||
'ctr': data.get_avg_ctr(),
|
||||
'position': data.get_avg_position(),
|
||||
'queries': data.get_metric('total_queries', 0)
|
||||
}
|
||||
|
||||
comparison['platforms'][platform_name] = platform_metrics
|
||||
|
||||
# Track top performer
|
||||
if platform_metrics['clicks'] > max_clicks:
|
||||
max_clicks = platform_metrics['clicks']
|
||||
top_platform = platform_name
|
||||
|
||||
# Identify platforms needing attention
|
||||
if platform_metrics['ctr'] < 1.0 or platform_metrics['position'] > 20:
|
||||
comparison['needs_attention'].append(platform_name)
|
||||
|
||||
comparison['top_performer'] = top_platform
|
||||
return comparison
|
||||
|
||||
def get_trend_analysis(self, analytics_data: Dict[str, AnalyticsData]) -> Dict[str, Any]:
|
||||
"""Generate trend analysis (placeholder for future implementation)"""
|
||||
# TODO: Implement trend analysis when historical data is available
|
||||
return {
|
||||
'status': 'not_implemented',
|
||||
'message': 'Trend analysis requires historical data collection',
|
||||
'suggestions': [
|
||||
'Enable data storage to track trends over time',
|
||||
'Implement daily metrics collection',
|
||||
'Add time-series analysis capabilities'
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user