- new-project.sh: Use nextjs-payload-starter template, Next.js AI rules - convert-astro.sh: Complete rewrite to migrate Astro MDX to Payload CMS Lexical JSON - deploy.sh: Check for next.config instead of astro.config.mjs, use MONGODB_URL - preview.sh: Check for Next.js, default port 3002 - audit-seo.sh: Check .tsx pages in src/app, Next.js config All scripts now properly support Next.js + Payload CMS workflow.
337 lines
8.9 KiB
Bash
Executable File
337 lines
8.9 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
#===============================================================================
|
|
# migrate-to-payload.sh - Migrate Astro content to Payload CMS with Lexical
|
|
#
|
|
# Usage: ./migrate-to-payload.sh [source-path] [target-path]
|
|
#
|
|
# This script migrates content from Astro MDX/Markdown to Payload CMS Lexical.
|
|
# - Converts .md/.mdx files to Payload CMS Lexical JSON format
|
|
# - Creates Payload collection entries
|
|
# - Preserves frontmatter as collection fields
|
|
#
|
|
# Requirements:
|
|
# - node.js 20+
|
|
# - npm
|
|
#
|
|
#===============================================================================
|
|
|
|
set -e
|
|
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m'
|
|
|
|
SOURCE_PATH="${1:-}"
|
|
TARGET_PATH="${2:-.}"
|
|
|
|
log_info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
|
log_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
|
log_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
|
|
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
|
|
|
print_usage() {
|
|
cat << EOF
|
|
Usage: $(basename "$0") [source-path] [target-path]
|
|
|
|
Migrate Astro content to Payload CMS with Lexical
|
|
|
|
Arguments:
|
|
source-path Path to Astro project with content
|
|
target-path Path to Next.js + Payload CMS project
|
|
|
|
Examples:
|
|
$(basename "$0") /path/to/astro-site /path/to/payload-site
|
|
|
|
EOF
|
|
}
|
|
|
|
detect_content_type() {
|
|
log_info "Detecting content structure..."
|
|
|
|
cd "$SOURCE_PATH"
|
|
|
|
if [ -d "src/content" ]; then
|
|
CONTENT_DIR="src/content"
|
|
elif [ -d "content" ]; then
|
|
CONTENT_DIR="content"
|
|
elif [ -d "src/pages" ]; then
|
|
CONTENT_DIR="src/pages"
|
|
else
|
|
log_error "No content directory found"
|
|
exit 1
|
|
fi
|
|
|
|
log_success "Content directory: $CONTENT_DIR"
|
|
}
|
|
|
|
backup_content() {
|
|
log_info "Backing up content..."
|
|
|
|
BACKUP_DIR="/tmp/migration-backup-$(date +%s)"
|
|
mkdir -p "$BACKUP_DIR"
|
|
|
|
if [ -d "$SOURCE_PATH/$CONTENT_DIR" ]; then
|
|
cp -r "$SOURCE_PATH/$CONTENT_DIR" "$BACKUP_DIR/"
|
|
fi
|
|
|
|
log_success "Backup at: $BACKUP_DIR"
|
|
}
|
|
|
|
analyze_content() {
|
|
log_info "Analyzing content..."
|
|
|
|
cd "$SOURCE_PATH"
|
|
|
|
local md_count=$(find "$CONTENT_DIR" -type f \( -name "*.md" -o -name "*.mdx" \) 2>/dev/null | wc -l)
|
|
local astro_count=$(find . -type f -name "*.astro" 2>/dev/null | grep -v node_modules | wc -l)
|
|
|
|
echo ""
|
|
echo " Content files: $md_count"
|
|
echo " Astro components: $astro_count"
|
|
echo ""
|
|
|
|
find "$CONTENT_DIR" -type f \( -name "*.md" -o -name "*.mdx" \) 2>/dev/null | head -20
|
|
}
|
|
|
|
create_lexical_content() {
|
|
log_info "Converting MDX to Payload CMS Lexical format..."
|
|
|
|
cd "$SOURCE_PATH"
|
|
|
|
local output_dir="$TARGET_PATH/src/content-migration"
|
|
mkdir -p "$output_dir"
|
|
|
|
find "$CONTENT_DIR" -type f \( -name "*.md" -o -name "*.mdx" \) 2>/dev/null | while read -r file; do
|
|
local relative_path="${file#$SOURCE_PATH/$CONTENT_DIR/}"
|
|
local filename=$(basename "$file" .mdx .md | sed 's/\.mdx$//' | sed 's/\.md$//')
|
|
local slug=$(echo "$filename" | tr '[:upper:]' '[:lower:]' | tr ' ' '-')
|
|
|
|
local frontmatter=""
|
|
local content=""
|
|
|
|
if grep -q "^---" "$file" 2>/dev/null; then
|
|
frontmatter=$(sed -n '/^---/,/^---/p' "$file" | head -n -1 | tail -n +2)
|
|
content=$(awk '/^---/{found=1; next} found' "$file")
|
|
else
|
|
content=$(cat "$file")
|
|
fi
|
|
|
|
local title=$(echo "$frontmatter" | grep -i "^title:" | cut -d':' -f2- | tr -d ' "' | head -1)
|
|
local date=$(echo "$frontmatter" | grep -i "^date:" | cut -d':' -f2- | tr -d ' "' | head -1)
|
|
local description=$(echo "$frontmatter" | grep -i "^description:" | cut -d':' -f2- | tr -d ' "' | head -1)
|
|
local author=$(echo "$frontmatter" | grep -i "^author:" | cut -d':' -f2- | tr -d ' "' | head -1)
|
|
local image=$(echo "$frontmatter" | grep -i "^image:" | cut -d':' -f2- | tr -d ' "' | head -1)
|
|
local tags=$(echo "$frontmatter" | grep -i "^tags:" | cut -d':' -f2- | tr -d '[]"' | head -1)
|
|
|
|
title=${title:-$filename}
|
|
date=${date:-$(date +%Y-%m-%d)}
|
|
|
|
cat > "$output_dir/${slug}.json" << JSONEOF
|
|
{
|
|
"title": "$title",
|
|
"slug": "$slug",
|
|
"createdAt": "$date",
|
|
"updatedAt": "$(date +%Y-%m-%d)",
|
|
"meta": {
|
|
"title": "$title",
|
|
"description": "$description"
|
|
},
|
|
"author": "$author",
|
|
"heroImage": "$image",
|
|
"tags": ["$tags"],
|
|
"content": {
|
|
"root": {
|
|
"type": "root",
|
|
"format": "",
|
|
"indent": 0,
|
|
"version": 1,
|
|
"children": [
|
|
{
|
|
"type": "paragraph",
|
|
"version": 1,
|
|
"children": [
|
|
{
|
|
"type": "text",
|
|
"version": 1,
|
|
"text": "$content",
|
|
"mode": "tokenized",
|
|
"style": ""
|
|
}
|
|
]
|
|
}
|
|
]
|
|
}
|
|
}
|
|
}
|
|
JSONEOF
|
|
|
|
echo " Converted: $filename → $slug.json"
|
|
done
|
|
|
|
log_success "Conversion complete: $output_dir/"
|
|
}
|
|
|
|
create_payload_import_script() {
|
|
log_info "Creating Payload import script..."
|
|
|
|
local output_dir="$TARGET_PATH/scripts"
|
|
mkdir -p "$output_dir"
|
|
|
|
cat > "$output_dir/import-content.ts" << 'TSEOF'
|
|
import { payload } from '../src/lib/payload'
|
|
import { promises as fs } from 'fs'
|
|
import path from 'path'
|
|
|
|
async function importContent() {
|
|
const contentDir = path.join(process.cwd(), 'src/content-migration')
|
|
|
|
try {
|
|
const files = await fs.readdir(contentDir)
|
|
const jsonFiles = files.filter(f => f.endsWith('.json'))
|
|
|
|
for (const file of jsonFiles) {
|
|
const filePath = path.join(contentDir, file)
|
|
const content = JSON.parse(await fs.readFile(filePath, 'utf-8'))
|
|
|
|
await payload.create({
|
|
collection: 'posts',
|
|
data: {
|
|
title: content.title,
|
|
slug: content.slug,
|
|
createdAt: content.createdAt,
|
|
updatedAt: content.updatedAt,
|
|
meta: content.meta,
|
|
author: content.author,
|
|
heroImage: content.heroImage,
|
|
tags: content.tags,
|
|
content: content.content,
|
|
_status: 'published',
|
|
},
|
|
})
|
|
|
|
console.log(`Imported: ${content.title}`)
|
|
}
|
|
|
|
console.log(`\nSuccessfully imported ${jsonFiles.length} posts`)
|
|
} catch (error) {
|
|
console.error('Import failed:', error)
|
|
process.exit(1)
|
|
}
|
|
}
|
|
|
|
importContent()
|
|
TSEOF
|
|
|
|
log_success "Created: $output_dir/import-content.ts"
|
|
}
|
|
|
|
create_migration_report() {
|
|
log_info "Creating migration report..."
|
|
|
|
cd "$SOURCE_PATH"
|
|
|
|
local page_count=$(find "$CONTENT_DIR" -type f \( -name "*.md" -o -name "*.mdx" \) 2>/dev/null | wc -l)
|
|
|
|
cat > "$TARGET_PATH/MIGRATION_REPORT.md" << EOF
|
|
# Migration Report: Astro → Payload CMS
|
|
|
|
## Source
|
|
- **Type:** Astro
|
|
- **Path:** $SOURCE_PATH
|
|
- **Backup:** $BACKUP_DIR
|
|
- **Date:** $(date)
|
|
|
|
## Statistics
|
|
- **Total Posts:** $page_count
|
|
|
|
## Content Migration
|
|
|
|
Content has been converted to Payload CMS Lexical JSON format in:
|
|
\`\`\`
|
|
src/content-migration/
|
|
\`\`\`
|
|
|
|
## Next Steps
|
|
|
|
1. **Review converted content:**
|
|
\`\`\`bash
|
|
ls src/content-migration/
|
|
\`\`\`
|
|
|
|
2. **Configure Payload collection:**
|
|
Make sure you have a 'posts' collection in \`src/collections/Posts.ts\`
|
|
|
|
3. **Import content to Payload:**
|
|
\`\`\`bash
|
|
npx tsx scripts/import-content.ts
|
|
\`\`\`
|
|
|
|
4. **Verify in admin:**
|
|
- Go to http://localhost:3002/admin
|
|
- Navigate to Posts collection
|
|
- Verify content and rich text editor (Lexical)
|
|
|
|
## Notes
|
|
|
|
- MDX/Markdown content is converted to Lexical JSON format
|
|
- Frontmatter fields (title, date, description) are mapped to collection fields
|
|
- Complex MDX components need manual conversion in Payload admin
|
|
- Images need to be re-uploaded to Payload Media
|
|
EOF
|
|
|
|
log_success "Migration report: $TARGET_PATH/MIGRATION_REPORT.md"
|
|
}
|
|
|
|
main() {
|
|
echo "=============================================="
|
|
echo " Astro → Payload CMS Migration Tool"
|
|
echo " Convert MDX/MD to Payload CMS with Lexical"
|
|
echo "=============================================="
|
|
echo ""
|
|
|
|
if [ "$1" == "-h" ] || [ "$1" == "--help" ]; then
|
|
print_usage
|
|
exit 0
|
|
fi
|
|
|
|
if [ -z "$SOURCE_PATH" ]; then
|
|
print_usage
|
|
echo ""
|
|
log_error "Please specify source path"
|
|
exit 1
|
|
fi
|
|
|
|
if [ ! -d "$SOURCE_PATH" ]; then
|
|
log_error "Source path not found: $SOURCE_PATH"
|
|
exit 1
|
|
fi
|
|
|
|
if [ ! -d "$TARGET_PATH" ]; then
|
|
log_error "Target path not found: $TARGET_PATH"
|
|
exit 1
|
|
fi
|
|
|
|
detect_content_type
|
|
backup_content
|
|
analyze_content
|
|
create_lexical_content
|
|
create_payload_import_script
|
|
create_migration_report
|
|
|
|
echo ""
|
|
echo "=============================================="
|
|
log_success "Migration preparation complete!"
|
|
echo "=============================================="
|
|
echo ""
|
|
echo "Next steps:"
|
|
echo " 1. cd $TARGET_PATH"
|
|
echo " 2. Review converted content in src/content-migration/"
|
|
echo " 3. Run: npm run dev"
|
|
echo " 4. Import: npx tsx scripts/import-content.ts"
|
|
echo " 5. Verify in Payload admin (http://localhost:3002/admin)"
|
|
echo ""
|
|
}
|
|
|
|
main "$@" |