Backup Dyad on new versions (#595)
This commit is contained in:
230
e2e-tests/backup.spec.ts
Normal file
230
e2e-tests/backup.spec.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import * as crypto from "crypto";
|
||||
import { testWithConfig, test, PageObject } from "./helpers/test_helper";
|
||||
import { expect } from "@playwright/test";
|
||||
|
||||
const testWithLastVersion = testWithConfig({
|
||||
preLaunchHook: async ({ userDataDir }) => {
|
||||
fs.mkdirSync(path.join(userDataDir), { recursive: true });
|
||||
fs.writeFileSync(path.join(userDataDir, ".last_version"), "0.1.0");
|
||||
fs.copyFileSync(
|
||||
path.join(__dirname, "fixtures", "backups", "empty-v0.12.0-beta.1.db"),
|
||||
path.join(userDataDir, "sqlite.db"),
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const testWithMultipleBackups = testWithConfig({
|
||||
preLaunchHook: async ({ userDataDir }) => {
|
||||
fs.mkdirSync(path.join(userDataDir), { recursive: true });
|
||||
// Make sure there's a last version file so the version upgrade is detected.
|
||||
fs.writeFileSync(path.join(userDataDir, ".last_version"), "0.1.0");
|
||||
|
||||
// Create backups directory
|
||||
const backupsDir = path.join(userDataDir, "backups");
|
||||
fs.mkdirSync(backupsDir, { recursive: true });
|
||||
|
||||
// Create 5 mock backup directories with different timestamps
|
||||
// These timestamps are in ascending order (oldest to newest)
|
||||
const mockBackups = [
|
||||
{
|
||||
name: "v1.0.0_2023-01-01T10-00-00-000Z_upgrade_from_0.9.0",
|
||||
timestamp: "2023-01-01T10:00:00.000Z",
|
||||
version: "1.0.0",
|
||||
reason: "upgrade_from_0.9.0",
|
||||
},
|
||||
{
|
||||
name: "v1.0.1_2023-01-02T10-00-00-000Z_upgrade_from_1.0.0",
|
||||
timestamp: "2023-01-02T10:00:00.000Z",
|
||||
version: "1.0.1",
|
||||
reason: "upgrade_from_1.0.0",
|
||||
},
|
||||
{
|
||||
name: "v1.0.2_2023-01-03T10-00-00-000Z_upgrade_from_1.0.1",
|
||||
timestamp: "2023-01-03T10:00:00.000Z",
|
||||
version: "1.0.2",
|
||||
reason: "upgrade_from_1.0.1",
|
||||
},
|
||||
{
|
||||
name: "v1.0.3_2023-01-04T10-00-00-000Z_upgrade_from_1.0.2",
|
||||
timestamp: "2023-01-04T10:00:00.000Z",
|
||||
version: "1.0.3",
|
||||
reason: "upgrade_from_1.0.2",
|
||||
},
|
||||
{
|
||||
name: "v1.0.4_2023-01-05T10-00-00-000Z_upgrade_from_1.0.3",
|
||||
timestamp: "2023-01-05T10:00:00.000Z",
|
||||
version: "1.0.4",
|
||||
reason: "upgrade_from_1.0.3",
|
||||
},
|
||||
];
|
||||
|
||||
// Create each backup directory with realistic structure
|
||||
for (const backup of mockBackups) {
|
||||
const backupPath = path.join(backupsDir, backup.name);
|
||||
fs.mkdirSync(backupPath, { recursive: true });
|
||||
|
||||
// Create backup metadata
|
||||
const metadata = {
|
||||
version: backup.version,
|
||||
timestamp: backup.timestamp,
|
||||
reason: backup.reason,
|
||||
files: {
|
||||
settings: true,
|
||||
database: true,
|
||||
},
|
||||
checksums: {
|
||||
settings: "mock_settings_checksum_" + backup.version,
|
||||
database: "mock_database_checksum_" + backup.version,
|
||||
},
|
||||
};
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(backupPath, "backup.json"),
|
||||
JSON.stringify(metadata, null, 2),
|
||||
);
|
||||
|
||||
// Create mock backup files
|
||||
fs.writeFileSync(
|
||||
path.join(backupPath, "user-settings.json"),
|
||||
JSON.stringify({ version: backup.version, mockData: true }, null, 2),
|
||||
);
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(backupPath, "sqlite.db"),
|
||||
`mock_database_content_${backup.version}`,
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const ensureAppIsRunning = async (po: PageObject) => {
|
||||
await po.page.waitForSelector("h1");
|
||||
const text = await po.page.$eval("h1", (el) => el.textContent);
|
||||
expect(text).toBe("Build your dream app");
|
||||
};
|
||||
|
||||
test("backup is not created for first run", async ({ po }) => {
|
||||
await ensureAppIsRunning(po);
|
||||
|
||||
expect(fs.existsSync(path.join(po.userDataDir, "backups"))).toEqual(false);
|
||||
});
|
||||
|
||||
testWithLastVersion(
|
||||
"backup is created if version is upgraded",
|
||||
async ({ po }) => {
|
||||
await ensureAppIsRunning(po);
|
||||
|
||||
const backups = fs.readdirSync(path.join(po.userDataDir, "backups"));
|
||||
expect(backups).toHaveLength(1);
|
||||
const backupDir = path.join(po.userDataDir, "backups", backups[0]);
|
||||
const backupMetadata = JSON.parse(
|
||||
fs.readFileSync(path.join(backupDir, "backup.json"), "utf8"),
|
||||
);
|
||||
|
||||
expect(backupMetadata.version).toBeDefined();
|
||||
expect(backupMetadata.timestamp).toBeDefined();
|
||||
expect(backupMetadata.reason).toBe("upgrade_from_0.1.0");
|
||||
expect(backupMetadata.files.settings).toBe(true);
|
||||
expect(backupMetadata.files.database).toBe(true);
|
||||
expect(backupMetadata.checksums.settings).toBeDefined();
|
||||
expect(backupMetadata.checksums.database).toBeDefined();
|
||||
|
||||
// Compare the backup files to the original files
|
||||
const originalSettings = fs.readFileSync(
|
||||
path.join(po.userDataDir, "user-settings.json"),
|
||||
"utf8",
|
||||
);
|
||||
const backupSettings = fs.readFileSync(
|
||||
path.join(backupDir, "user-settings.json"),
|
||||
"utf8",
|
||||
);
|
||||
expect(cleanSettings(backupSettings)).toEqual(
|
||||
cleanSettings(originalSettings),
|
||||
);
|
||||
|
||||
// For database, verify the backup file exists and has correct checksum
|
||||
const backupDbPath = path.join(backupDir, "sqlite.db");
|
||||
const originalDbPath = path.join(po.userDataDir, "sqlite.db");
|
||||
|
||||
expect(fs.existsSync(backupDbPath)).toBe(true);
|
||||
expect(fs.existsSync(originalDbPath)).toBe(true);
|
||||
|
||||
const backupChecksum = calculateChecksum(backupDbPath);
|
||||
// Verify backup metadata contains the correct checksum
|
||||
expect(backupMetadata.checksums.database).toBe(backupChecksum);
|
||||
},
|
||||
);
|
||||
|
||||
testWithMultipleBackups(
|
||||
"backup cleanup deletes oldest backups when exceeding MAX_BACKUPS",
|
||||
async ({ po }) => {
|
||||
await ensureAppIsRunning(po);
|
||||
|
||||
const backupsDir = path.join(po.userDataDir, "backups");
|
||||
const backups = fs.readdirSync(backupsDir);
|
||||
|
||||
// Should have only 3 backups remaining (MAX_BACKUPS = 3)
|
||||
expect(backups).toHaveLength(3);
|
||||
|
||||
const expectedRemainingBackups = [
|
||||
"*",
|
||||
// These are the two older backups
|
||||
"v1.0.4_2023-01-05T10-00-00-000Z_upgrade_from_1.0.3",
|
||||
"v1.0.3_2023-01-04T10-00-00-000Z_upgrade_from_1.0.2",
|
||||
];
|
||||
|
||||
// Check that the expected backups exist
|
||||
for (let backup of expectedRemainingBackups) {
|
||||
let expectedBackup = backup;
|
||||
if (backup === "*") {
|
||||
expectedBackup = backups[0];
|
||||
expect(expectedBackup.endsWith("_upgrade_from_0.1.0")).toEqual(true);
|
||||
} else {
|
||||
expect(backups).toContain(expectedBackup);
|
||||
}
|
||||
|
||||
// Verify the backup directory and metadata still exist
|
||||
const backupPath = path.join(backupsDir, expectedBackup);
|
||||
expect(fs.existsSync(backupPath)).toBe(true);
|
||||
expect(fs.existsSync(path.join(backupPath, "backup.json"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(backupPath, "user-settings.json"))).toBe(
|
||||
true,
|
||||
);
|
||||
|
||||
// The first backup does NOT have a SQLite database because the backup
|
||||
// manager is run before the DB is initialized.
|
||||
expect(fs.existsSync(path.join(backupPath, "sqlite.db"))).toBe(
|
||||
backup !== "*",
|
||||
);
|
||||
}
|
||||
|
||||
// The 2 oldest backups should have been deleted
|
||||
const deletedBackups = [
|
||||
"v1.0.0_2023-01-01T10-00-00-000Z_upgrade_from_0.9.0", // oldest
|
||||
"v1.0.1_2023-01-02T10-00-00-000Z_upgrade_from_1.0.0", // second oldest
|
||||
"v1.0.2_2023-01-03T10-00-00-000Z_upgrade_from_1.0.1", // third oldest
|
||||
];
|
||||
|
||||
for (const deletedBackup of deletedBackups) {
|
||||
expect(backups).not.toContain(deletedBackup);
|
||||
expect(fs.existsSync(path.join(backupsDir, deletedBackup))).toBe(false);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
function cleanSettings(settings: string) {
|
||||
const parsed = JSON.parse(settings);
|
||||
delete parsed.hasRunBefore;
|
||||
delete parsed.isTestMode;
|
||||
delete parsed.lastShownReleaseNotesVersion;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function calculateChecksum(filePath: string): string {
|
||||
const fileBuffer = fs.readFileSync(filePath);
|
||||
const hash = crypto.createHash("sha256");
|
||||
hash.update(fileBuffer);
|
||||
return hash.digest("hex");
|
||||
}
|
||||
BIN
e2e-tests/fixtures/backups/empty-v0.12.0-beta.1.db
Normal file
BIN
e2e-tests/fixtures/backups/empty-v0.12.0-beta.1.db
Normal file
Binary file not shown.
@@ -187,7 +187,7 @@ class GitHubConnector {
|
||||
}
|
||||
|
||||
export class PageObject {
|
||||
private userDataDir: string;
|
||||
public userDataDir: string;
|
||||
public githubConnector: GitHubConnector;
|
||||
constructor(
|
||||
public electronApp: ElectronApplication,
|
||||
@@ -935,15 +935,27 @@ export class PageObject {
|
||||
}
|
||||
}
|
||||
|
||||
interface ElectronConfig {
|
||||
preLaunchHook?: ({ userDataDir }: { userDataDir: string }) => Promise<void>;
|
||||
}
|
||||
|
||||
// From https://github.com/microsoft/playwright/issues/8208#issuecomment-1435475930
|
||||
//
|
||||
// Note how we mark the fixture as { auto: true }.
|
||||
// This way it is always instantiated, even if the test does not use it explicitly.
|
||||
export const test = base.extend<{
|
||||
electronConfig: ElectronConfig;
|
||||
attachScreenshotsToReport: void;
|
||||
electronApp: ElectronApplication;
|
||||
po: PageObject;
|
||||
}>({
|
||||
electronConfig: [
|
||||
async ({}, use) => {
|
||||
// Default configuration - tests can override this fixture
|
||||
await use({});
|
||||
},
|
||||
{ auto: true },
|
||||
],
|
||||
po: [
|
||||
async ({ electronApp }, use) => {
|
||||
const page = await electronApp.firstWindow();
|
||||
@@ -976,7 +988,7 @@ export const test = base.extend<{
|
||||
{ auto: true },
|
||||
],
|
||||
electronApp: [
|
||||
async ({}, use) => {
|
||||
async ({ electronConfig }, use) => {
|
||||
// find the latest build in the out directory
|
||||
const latestBuild = eph.findLatestBuild();
|
||||
// parse the directory and find paths and other info
|
||||
@@ -990,15 +1002,15 @@ export const test = base.extend<{
|
||||
// This is just a hack to avoid the AI setup screen.
|
||||
process.env.OPENAI_API_KEY = "sk-test";
|
||||
const baseTmpDir = os.tmpdir();
|
||||
const USER_DATA_DIR = path.join(
|
||||
baseTmpDir,
|
||||
`dyad-e2e-tests-${Date.now()}`,
|
||||
);
|
||||
const userDataDir = path.join(baseTmpDir, `dyad-e2e-tests-${Date.now()}`);
|
||||
if (electronConfig.preLaunchHook) {
|
||||
await electronConfig.preLaunchHook({ userDataDir });
|
||||
}
|
||||
const electronApp = await electron.launch({
|
||||
args: [
|
||||
appInfo.main,
|
||||
"--enable-logging",
|
||||
`--user-data-dir=${USER_DATA_DIR}`,
|
||||
`--user-data-dir=${userDataDir}`,
|
||||
],
|
||||
executablePath: appInfo.executable,
|
||||
// Strong suspicion this is causing issues on Windows with tests hanging due to error:
|
||||
@@ -1007,7 +1019,7 @@ export const test = base.extend<{
|
||||
// dir: "test-results",
|
||||
// },
|
||||
});
|
||||
(electronApp as any).$dyadUserDataDir = USER_DATA_DIR;
|
||||
(electronApp as any).$dyadUserDataDir = userDataDir;
|
||||
|
||||
console.log("electronApp launched!");
|
||||
if (showDebugLogs) {
|
||||
@@ -1064,6 +1076,14 @@ export const test = base.extend<{
|
||||
],
|
||||
});
|
||||
|
||||
export function testWithConfig(config: ElectronConfig) {
|
||||
return test.extend({
|
||||
electronConfig: async ({}, use) => {
|
||||
await use(config);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Wrapper that skips tests on Windows platform
|
||||
export const testSkipIfWindows = os.platform() === "win32" ? test.skip : test;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
- img
|
||||
- text: 1 error
|
||||
- button "Recheck":
|
||||
- button "Run checks":
|
||||
- img
|
||||
- button "Fix All":
|
||||
- img
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
- paragraph: No problems found
|
||||
- button "Recheck":
|
||||
- img
|
||||
- button "Run checks":
|
||||
- img
|
||||
@@ -1,6 +1,6 @@
|
||||
- img
|
||||
- text: 1 error
|
||||
- button "Recheck":
|
||||
- button "Run checks":
|
||||
- img
|
||||
- button "Fix All":
|
||||
- img
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
- paragraph: No problems found
|
||||
- button "Recheck":
|
||||
- img
|
||||
- button "Run checks":
|
||||
- img
|
||||
390
src/backup_manager.ts
Normal file
390
src/backup_manager.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs/promises";
|
||||
import { app } from "electron";
|
||||
import * as crypto from "crypto";
|
||||
import log from "electron-log";
|
||||
import Database from "better-sqlite3";
|
||||
|
||||
const logger = log.scope("backup_manager");
|
||||
|
||||
const MAX_BACKUPS = 3;
|
||||
|
||||
interface BackupManagerOptions {
|
||||
settingsFile: string;
|
||||
dbFile: string;
|
||||
}
|
||||
|
||||
interface BackupMetadata {
|
||||
version: string;
|
||||
timestamp: string;
|
||||
reason: string;
|
||||
files: {
|
||||
settings: boolean;
|
||||
database: boolean;
|
||||
};
|
||||
checksums: {
|
||||
settings: string | null;
|
||||
database: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
interface BackupInfo extends BackupMetadata {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export class BackupManager {
|
||||
private readonly maxBackups: number;
|
||||
private readonly settingsFilePath: string;
|
||||
private readonly dbFilePath: string;
|
||||
private userDataPath!: string;
|
||||
private backupBasePath!: string;
|
||||
|
||||
constructor(options: BackupManagerOptions) {
|
||||
this.maxBackups = MAX_BACKUPS;
|
||||
this.settingsFilePath = options.settingsFile;
|
||||
this.dbFilePath = options.dbFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize backup system - call this on app ready
|
||||
*/
|
||||
async initialize(): Promise<void> {
|
||||
logger.info("Initializing backup system...");
|
||||
|
||||
// Set paths after app is ready
|
||||
this.userDataPath = app.getPath("userData");
|
||||
this.backupBasePath = path.join(this.userDataPath, "backups");
|
||||
|
||||
logger.info(
|
||||
`Backup system paths - UserData: ${this.userDataPath}, Backups: ${this.backupBasePath}`,
|
||||
);
|
||||
|
||||
// Check if this is a version upgrade
|
||||
const currentVersion = app.getVersion();
|
||||
const lastVersion = await this.getLastRunVersion();
|
||||
|
||||
if (lastVersion === null) {
|
||||
logger.info("No previous version found, skipping backup");
|
||||
return;
|
||||
}
|
||||
|
||||
if (lastVersion === currentVersion) {
|
||||
logger.info(
|
||||
`No version upgrade detected. Current version: ${currentVersion}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure backup directory exists
|
||||
await fs.mkdir(this.backupBasePath, { recursive: true });
|
||||
logger.debug("Backup directory created/verified");
|
||||
|
||||
logger.info(`Version upgrade detected: ${lastVersion} → ${currentVersion}`);
|
||||
await this.createBackup(`upgrade_from_${lastVersion}`);
|
||||
|
||||
// Save current version
|
||||
await this.saveCurrentVersion(currentVersion);
|
||||
|
||||
// Clean up old backups
|
||||
await this.cleanupOldBackups();
|
||||
logger.info("Backup system initialized successfully");
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a backup of settings and database
|
||||
*/
|
||||
async createBackup(reason: string = "manual"): Promise<string> {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
||||
const version = app.getVersion();
|
||||
const backupName = `v${version}_${timestamp}_${reason}`;
|
||||
const backupPath = path.join(this.backupBasePath, backupName);
|
||||
|
||||
logger.info(`Creating backup: ${backupName} (reason: ${reason})`);
|
||||
|
||||
try {
|
||||
// Create backup directory
|
||||
await fs.mkdir(backupPath, { recursive: true });
|
||||
logger.debug(`Backup directory created: ${backupPath}`);
|
||||
|
||||
// Backup settings file
|
||||
const settingsBackupPath = path.join(
|
||||
backupPath,
|
||||
path.basename(this.settingsFilePath),
|
||||
);
|
||||
const settingsExists = await this.fileExists(this.settingsFilePath);
|
||||
|
||||
if (settingsExists) {
|
||||
await fs.copyFile(this.settingsFilePath, settingsBackupPath);
|
||||
logger.info("Settings backed up successfully");
|
||||
} else {
|
||||
logger.debug("Settings file not found, skipping settings backup");
|
||||
}
|
||||
|
||||
// Backup SQLite database
|
||||
const dbBackupPath = path.join(
|
||||
backupPath,
|
||||
path.basename(this.dbFilePath),
|
||||
);
|
||||
const dbExists = await this.fileExists(this.dbFilePath);
|
||||
|
||||
if (dbExists) {
|
||||
await this.backupSQLiteDatabase(this.dbFilePath, dbBackupPath);
|
||||
logger.info("Database backed up successfully");
|
||||
} else {
|
||||
logger.debug("Database file not found, skipping database backup");
|
||||
}
|
||||
|
||||
// Create backup metadata
|
||||
const metadata: BackupMetadata = {
|
||||
version,
|
||||
timestamp: new Date().toISOString(),
|
||||
reason,
|
||||
files: {
|
||||
settings: settingsExists,
|
||||
database: dbExists,
|
||||
},
|
||||
checksums: {
|
||||
settings: settingsExists
|
||||
? await this.getFileChecksum(settingsBackupPath)
|
||||
: null,
|
||||
database: dbExists ? await this.getFileChecksum(dbBackupPath) : null,
|
||||
},
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(backupPath, "backup.json"),
|
||||
JSON.stringify(metadata, null, 2),
|
||||
);
|
||||
|
||||
logger.info(`Backup created successfully: ${backupName}`);
|
||||
return backupPath;
|
||||
} catch (error) {
|
||||
logger.error("Backup failed:", error);
|
||||
// Clean up failed backup
|
||||
try {
|
||||
await fs.rm(backupPath, { recursive: true, force: true });
|
||||
logger.debug("Failed backup directory cleaned up");
|
||||
} catch (cleanupError) {
|
||||
logger.error("Failed to clean up backup directory:", cleanupError);
|
||||
}
|
||||
throw new Error(`Backup creation failed: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all available backups
|
||||
*/
|
||||
async listBackups(): Promise<BackupInfo[]> {
|
||||
try {
|
||||
const entries = await fs.readdir(this.backupBasePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
const backups: BackupInfo[] = [];
|
||||
|
||||
logger.debug(`Found ${entries.length} entries in backup directory`);
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const metadataPath = path.join(
|
||||
this.backupBasePath,
|
||||
entry.name,
|
||||
"backup.json",
|
||||
);
|
||||
|
||||
try {
|
||||
const metadataContent = await fs.readFile(metadataPath, "utf8");
|
||||
const metadata: BackupMetadata = JSON.parse(metadataContent);
|
||||
backups.push({
|
||||
name: entry.name,
|
||||
...metadata,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.warn(`Invalid backup found: ${entry.name}`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Found ${backups.length} valid backups`);
|
||||
|
||||
// Sort by timestamp, newest first
|
||||
return backups.sort(
|
||||
(a, b) =>
|
||||
new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime(),
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error("Failed to list backups:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old backups, keeping only the most recent ones
|
||||
*/
|
||||
async cleanupOldBackups(): Promise<void> {
|
||||
try {
|
||||
const backups = await this.listBackups();
|
||||
|
||||
if (backups.length <= this.maxBackups) {
|
||||
logger.debug(
|
||||
`No cleanup needed - ${backups.length} backups (max: ${this.maxBackups})`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Keep the newest backups
|
||||
const backupsToDelete = backups.slice(this.maxBackups);
|
||||
|
||||
logger.info(
|
||||
`Cleaning up ${backupsToDelete.length} old backups (keeping ${this.maxBackups} most recent)`,
|
||||
);
|
||||
|
||||
for (const backup of backupsToDelete) {
|
||||
const backupPath = path.join(this.backupBasePath, backup.name);
|
||||
await fs.rm(backupPath, { recursive: true, force: true });
|
||||
logger.debug(`Deleted old backup: ${backup.name}`);
|
||||
}
|
||||
|
||||
logger.info("Old backup cleanup completed");
|
||||
} catch (error) {
|
||||
logger.error("Failed to clean up old backups:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a specific backup
|
||||
*/
|
||||
async deleteBackup(backupName: string): Promise<void> {
|
||||
const backupPath = path.join(this.backupBasePath, backupName);
|
||||
|
||||
logger.info(`Deleting backup: ${backupName}`);
|
||||
|
||||
try {
|
||||
await fs.rm(backupPath, { recursive: true, force: true });
|
||||
logger.info(`Deleted backup: ${backupName}`);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to delete backup ${backupName}:`, error);
|
||||
throw new Error(`Failed to delete backup: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get backup size in bytes
|
||||
*/
|
||||
async getBackupSize(backupName: string): Promise<number> {
|
||||
const backupPath = path.join(this.backupBasePath, backupName);
|
||||
logger.debug(`Calculating size for backup: ${backupName}`);
|
||||
|
||||
const size = await this.getDirectorySize(backupPath);
|
||||
logger.debug(`Backup ${backupName} size: ${size} bytes`);
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Backup SQLite database safely
|
||||
*/
|
||||
private async backupSQLiteDatabase(
|
||||
sourcePath: string,
|
||||
destPath: string,
|
||||
): Promise<void> {
|
||||
logger.debug(`Backing up SQLite database: ${sourcePath} → ${destPath}`);
|
||||
const sourceDb = new Database(sourcePath, {
|
||||
readonly: true,
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
try {
|
||||
// This is safe even if other connections are active
|
||||
await sourceDb.backup(destPath);
|
||||
logger.info("Database backup completed successfully");
|
||||
} catch (error) {
|
||||
logger.error("Database backup failed:", error);
|
||||
throw error;
|
||||
} finally {
|
||||
// Always close the temporary connection
|
||||
sourceDb.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Check if file exists
|
||||
*/
|
||||
private async fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Calculate file checksum
|
||||
*/
|
||||
private async getFileChecksum(filePath: string): Promise<string | null> {
|
||||
try {
|
||||
const fileBuffer = await fs.readFile(filePath);
|
||||
const hash = crypto.createHash("sha256");
|
||||
hash.update(fileBuffer);
|
||||
const checksum = hash.digest("hex");
|
||||
logger.debug(
|
||||
`Checksum calculated for ${filePath}: ${checksum.substring(0, 8)}...`,
|
||||
);
|
||||
return checksum;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to calculate checksum for ${filePath}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Get directory size recursively
|
||||
*/
|
||||
private async getDirectorySize(dirPath: string): Promise<number> {
|
||||
let size = 0;
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
size += await this.getDirectorySize(fullPath);
|
||||
} else {
|
||||
const stats = await fs.stat(fullPath);
|
||||
size += stats.size;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to calculate directory size for ${dirPath}:`, error);
|
||||
}
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Get last run version
|
||||
*/
|
||||
private async getLastRunVersion(): Promise<string | null> {
|
||||
try {
|
||||
const versionFile = path.join(this.userDataPath, ".last_version");
|
||||
const version = await fs.readFile(versionFile, "utf8");
|
||||
const trimmedVersion = version.trim();
|
||||
logger.debug(`Last run version retrieved: ${trimmedVersion}`);
|
||||
return trimmedVersion;
|
||||
} catch {
|
||||
logger.debug("No previous version file found");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Save current version
|
||||
*/
|
||||
private async saveCurrentVersion(version: string): Promise<void> {
|
||||
const versionFile = path.join(this.userDataPath, ".last_version");
|
||||
await fs.writeFile(versionFile, version, "utf8");
|
||||
logger.debug(`Current version saved: ${version}`);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
// db.ts
|
||||
import {
|
||||
type BetterSQLite3Database,
|
||||
drizzle,
|
||||
@@ -8,7 +9,6 @@ import { migrate } from "drizzle-orm/better-sqlite3/migrator";
|
||||
import path from "node:path";
|
||||
import fs from "node:fs";
|
||||
import { getDyadAppPath, getUserDataPath } from "../paths/paths";
|
||||
|
||||
import log from "electron-log";
|
||||
|
||||
const logger = log.scope("db");
|
||||
@@ -36,10 +36,8 @@ export function initializeDatabase(): BetterSQLite3Database<typeof schema> & {
|
||||
|
||||
// Check if the database file exists and remove it if it has issues
|
||||
try {
|
||||
// If the file exists but is empty or corrupted, it might cause issues
|
||||
if (fs.existsSync(dbPath)) {
|
||||
const stats = fs.statSync(dbPath);
|
||||
// If the file is very small, it might be corrupted
|
||||
if (stats.size < 100) {
|
||||
logger.log("Database file exists but may be corrupted. Removing it...");
|
||||
fs.unlinkSync(dbPath);
|
||||
@@ -50,16 +48,11 @@ export function initializeDatabase(): BetterSQLite3Database<typeof schema> & {
|
||||
}
|
||||
|
||||
fs.mkdirSync(getUserDataPath(), { recursive: true });
|
||||
// Just a convenient time to create it.
|
||||
fs.mkdirSync(getDyadAppPath("."), { recursive: true });
|
||||
|
||||
// Open the database with a higher timeout
|
||||
const sqlite = new Database(dbPath, { timeout: 10000 });
|
||||
|
||||
// Enable foreign key constraints
|
||||
sqlite.pragma("foreign_keys = ON");
|
||||
|
||||
// Create DB instance with schema
|
||||
_db = drizzle(sqlite, { schema });
|
||||
|
||||
try {
|
||||
@@ -77,13 +70,25 @@ export function initializeDatabase(): BetterSQLite3Database<typeof schema> & {
|
||||
return _db as any;
|
||||
}
|
||||
|
||||
// Initialize database on import
|
||||
try {
|
||||
initializeDatabase();
|
||||
} catch (error) {
|
||||
logger.error("Failed to initialize database:", error);
|
||||
/**
|
||||
* Get the database instance (throws if not initialized)
|
||||
*/
|
||||
export function getDb(): BetterSQLite3Database<typeof schema> & {
|
||||
$client: Database.Database;
|
||||
} {
|
||||
if (!_db) {
|
||||
throw new Error(
|
||||
"Database not initialized. Call initializeDatabase() first.",
|
||||
);
|
||||
}
|
||||
return _db as any;
|
||||
}
|
||||
|
||||
export const db = _db as any as BetterSQLite3Database<typeof schema> & {
|
||||
export const db = new Proxy({} as any, {
|
||||
get(target, prop) {
|
||||
const database = getDb();
|
||||
return database[prop as keyof typeof database];
|
||||
},
|
||||
}) as BetterSQLite3Database<typeof schema> & {
|
||||
$client: Database.Database;
|
||||
};
|
||||
|
||||
29
src/main.ts
29
src/main.ts
@@ -6,10 +6,16 @@ import dotenv from "dotenv";
|
||||
import started from "electron-squirrel-startup";
|
||||
import { updateElectronApp, UpdateSourceType } from "update-electron-app";
|
||||
import log from "electron-log";
|
||||
import { readSettings, writeSettings } from "./main/settings";
|
||||
import {
|
||||
getSettingsFilePath,
|
||||
readSettings,
|
||||
writeSettings,
|
||||
} from "./main/settings";
|
||||
import { handleSupabaseOAuthReturn } from "./supabase_admin/supabase_return_handler";
|
||||
import { handleDyadProReturn } from "./main/pro";
|
||||
import { IS_TEST_BUILD } from "./ipc/utils/test_utils";
|
||||
import { BackupManager } from "./backup_manager";
|
||||
import { getDatabasePath, initializeDatabase } from "./db";
|
||||
|
||||
log.errorHandler.startCatching();
|
||||
log.eventLogger.startLogging();
|
||||
@@ -58,10 +64,19 @@ if (process.defaultApp) {
|
||||
}
|
||||
|
||||
export async function onReady() {
|
||||
await onFirstRunMaybe();
|
||||
try {
|
||||
const backupManager = new BackupManager({
|
||||
settingsFile: getSettingsFilePath(),
|
||||
dbFile: getDatabasePath(),
|
||||
});
|
||||
await backupManager.initialize();
|
||||
} catch (e) {
|
||||
logger.error("Error initializing backup manager", e);
|
||||
}
|
||||
initializeDatabase();
|
||||
await onFirstRunMaybe();
|
||||
createWindow();
|
||||
}
|
||||
|
||||
app.whenReady().then(onReady);
|
||||
|
||||
/**
|
||||
* Is this the first run of Fiddle? If so, perform
|
||||
@@ -164,11 +179,7 @@ if (!gotTheLock) {
|
||||
// the commandLine is array of strings in which last element is deep link url
|
||||
handleDeepLinkReturn(commandLine.pop()!);
|
||||
});
|
||||
|
||||
// Create mainWindow, load the rest of the app, etc...
|
||||
app.whenReady().then(() => {
|
||||
createWindow();
|
||||
});
|
||||
app.whenReady().then(onReady);
|
||||
}
|
||||
|
||||
// Handle the protocol. In this case, we choose to show an Error Box.
|
||||
|
||||
@@ -27,7 +27,7 @@ const DEFAULT_SETTINGS: UserSettings = {
|
||||
|
||||
const SETTINGS_FILE = "user-settings.json";
|
||||
|
||||
function getSettingsFilePath(): string {
|
||||
export function getSettingsFilePath(): string {
|
||||
return path.join(getUserDataPath(), SETTINGS_FILE);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user