Backup Dyad on new versions (#595)
This commit is contained in:
230
e2e-tests/backup.spec.ts
Normal file
230
e2e-tests/backup.spec.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import * as crypto from "crypto";
|
||||
import { testWithConfig, test, PageObject } from "./helpers/test_helper";
|
||||
import { expect } from "@playwright/test";
|
||||
|
||||
const testWithLastVersion = testWithConfig({
|
||||
preLaunchHook: async ({ userDataDir }) => {
|
||||
fs.mkdirSync(path.join(userDataDir), { recursive: true });
|
||||
fs.writeFileSync(path.join(userDataDir, ".last_version"), "0.1.0");
|
||||
fs.copyFileSync(
|
||||
path.join(__dirname, "fixtures", "backups", "empty-v0.12.0-beta.1.db"),
|
||||
path.join(userDataDir, "sqlite.db"),
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const testWithMultipleBackups = testWithConfig({
|
||||
preLaunchHook: async ({ userDataDir }) => {
|
||||
fs.mkdirSync(path.join(userDataDir), { recursive: true });
|
||||
// Make sure there's a last version file so the version upgrade is detected.
|
||||
fs.writeFileSync(path.join(userDataDir, ".last_version"), "0.1.0");
|
||||
|
||||
// Create backups directory
|
||||
const backupsDir = path.join(userDataDir, "backups");
|
||||
fs.mkdirSync(backupsDir, { recursive: true });
|
||||
|
||||
// Create 5 mock backup directories with different timestamps
|
||||
// These timestamps are in ascending order (oldest to newest)
|
||||
const mockBackups = [
|
||||
{
|
||||
name: "v1.0.0_2023-01-01T10-00-00-000Z_upgrade_from_0.9.0",
|
||||
timestamp: "2023-01-01T10:00:00.000Z",
|
||||
version: "1.0.0",
|
||||
reason: "upgrade_from_0.9.0",
|
||||
},
|
||||
{
|
||||
name: "v1.0.1_2023-01-02T10-00-00-000Z_upgrade_from_1.0.0",
|
||||
timestamp: "2023-01-02T10:00:00.000Z",
|
||||
version: "1.0.1",
|
||||
reason: "upgrade_from_1.0.0",
|
||||
},
|
||||
{
|
||||
name: "v1.0.2_2023-01-03T10-00-00-000Z_upgrade_from_1.0.1",
|
||||
timestamp: "2023-01-03T10:00:00.000Z",
|
||||
version: "1.0.2",
|
||||
reason: "upgrade_from_1.0.1",
|
||||
},
|
||||
{
|
||||
name: "v1.0.3_2023-01-04T10-00-00-000Z_upgrade_from_1.0.2",
|
||||
timestamp: "2023-01-04T10:00:00.000Z",
|
||||
version: "1.0.3",
|
||||
reason: "upgrade_from_1.0.2",
|
||||
},
|
||||
{
|
||||
name: "v1.0.4_2023-01-05T10-00-00-000Z_upgrade_from_1.0.3",
|
||||
timestamp: "2023-01-05T10:00:00.000Z",
|
||||
version: "1.0.4",
|
||||
reason: "upgrade_from_1.0.3",
|
||||
},
|
||||
];
|
||||
|
||||
// Create each backup directory with realistic structure
|
||||
for (const backup of mockBackups) {
|
||||
const backupPath = path.join(backupsDir, backup.name);
|
||||
fs.mkdirSync(backupPath, { recursive: true });
|
||||
|
||||
// Create backup metadata
|
||||
const metadata = {
|
||||
version: backup.version,
|
||||
timestamp: backup.timestamp,
|
||||
reason: backup.reason,
|
||||
files: {
|
||||
settings: true,
|
||||
database: true,
|
||||
},
|
||||
checksums: {
|
||||
settings: "mock_settings_checksum_" + backup.version,
|
||||
database: "mock_database_checksum_" + backup.version,
|
||||
},
|
||||
};
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(backupPath, "backup.json"),
|
||||
JSON.stringify(metadata, null, 2),
|
||||
);
|
||||
|
||||
// Create mock backup files
|
||||
fs.writeFileSync(
|
||||
path.join(backupPath, "user-settings.json"),
|
||||
JSON.stringify({ version: backup.version, mockData: true }, null, 2),
|
||||
);
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(backupPath, "sqlite.db"),
|
||||
`mock_database_content_${backup.version}`,
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const ensureAppIsRunning = async (po: PageObject) => {
|
||||
await po.page.waitForSelector("h1");
|
||||
const text = await po.page.$eval("h1", (el) => el.textContent);
|
||||
expect(text).toBe("Build your dream app");
|
||||
};
|
||||
|
||||
test("backup is not created for first run", async ({ po }) => {
|
||||
await ensureAppIsRunning(po);
|
||||
|
||||
expect(fs.existsSync(path.join(po.userDataDir, "backups"))).toEqual(false);
|
||||
});
|
||||
|
||||
testWithLastVersion(
|
||||
"backup is created if version is upgraded",
|
||||
async ({ po }) => {
|
||||
await ensureAppIsRunning(po);
|
||||
|
||||
const backups = fs.readdirSync(path.join(po.userDataDir, "backups"));
|
||||
expect(backups).toHaveLength(1);
|
||||
const backupDir = path.join(po.userDataDir, "backups", backups[0]);
|
||||
const backupMetadata = JSON.parse(
|
||||
fs.readFileSync(path.join(backupDir, "backup.json"), "utf8"),
|
||||
);
|
||||
|
||||
expect(backupMetadata.version).toBeDefined();
|
||||
expect(backupMetadata.timestamp).toBeDefined();
|
||||
expect(backupMetadata.reason).toBe("upgrade_from_0.1.0");
|
||||
expect(backupMetadata.files.settings).toBe(true);
|
||||
expect(backupMetadata.files.database).toBe(true);
|
||||
expect(backupMetadata.checksums.settings).toBeDefined();
|
||||
expect(backupMetadata.checksums.database).toBeDefined();
|
||||
|
||||
// Compare the backup files to the original files
|
||||
const originalSettings = fs.readFileSync(
|
||||
path.join(po.userDataDir, "user-settings.json"),
|
||||
"utf8",
|
||||
);
|
||||
const backupSettings = fs.readFileSync(
|
||||
path.join(backupDir, "user-settings.json"),
|
||||
"utf8",
|
||||
);
|
||||
expect(cleanSettings(backupSettings)).toEqual(
|
||||
cleanSettings(originalSettings),
|
||||
);
|
||||
|
||||
// For database, verify the backup file exists and has correct checksum
|
||||
const backupDbPath = path.join(backupDir, "sqlite.db");
|
||||
const originalDbPath = path.join(po.userDataDir, "sqlite.db");
|
||||
|
||||
expect(fs.existsSync(backupDbPath)).toBe(true);
|
||||
expect(fs.existsSync(originalDbPath)).toBe(true);
|
||||
|
||||
const backupChecksum = calculateChecksum(backupDbPath);
|
||||
// Verify backup metadata contains the correct checksum
|
||||
expect(backupMetadata.checksums.database).toBe(backupChecksum);
|
||||
},
|
||||
);
|
||||
|
||||
testWithMultipleBackups(
|
||||
"backup cleanup deletes oldest backups when exceeding MAX_BACKUPS",
|
||||
async ({ po }) => {
|
||||
await ensureAppIsRunning(po);
|
||||
|
||||
const backupsDir = path.join(po.userDataDir, "backups");
|
||||
const backups = fs.readdirSync(backupsDir);
|
||||
|
||||
// Should have only 3 backups remaining (MAX_BACKUPS = 3)
|
||||
expect(backups).toHaveLength(3);
|
||||
|
||||
const expectedRemainingBackups = [
|
||||
"*",
|
||||
// These are the two older backups
|
||||
"v1.0.4_2023-01-05T10-00-00-000Z_upgrade_from_1.0.3",
|
||||
"v1.0.3_2023-01-04T10-00-00-000Z_upgrade_from_1.0.2",
|
||||
];
|
||||
|
||||
// Check that the expected backups exist
|
||||
for (let backup of expectedRemainingBackups) {
|
||||
let expectedBackup = backup;
|
||||
if (backup === "*") {
|
||||
expectedBackup = backups[0];
|
||||
expect(expectedBackup.endsWith("_upgrade_from_0.1.0")).toEqual(true);
|
||||
} else {
|
||||
expect(backups).toContain(expectedBackup);
|
||||
}
|
||||
|
||||
// Verify the backup directory and metadata still exist
|
||||
const backupPath = path.join(backupsDir, expectedBackup);
|
||||
expect(fs.existsSync(backupPath)).toBe(true);
|
||||
expect(fs.existsSync(path.join(backupPath, "backup.json"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(backupPath, "user-settings.json"))).toBe(
|
||||
true,
|
||||
);
|
||||
|
||||
// The first backup does NOT have a SQLite database because the backup
|
||||
// manager is run before the DB is initialized.
|
||||
expect(fs.existsSync(path.join(backupPath, "sqlite.db"))).toBe(
|
||||
backup !== "*",
|
||||
);
|
||||
}
|
||||
|
||||
// The 2 oldest backups should have been deleted
|
||||
const deletedBackups = [
|
||||
"v1.0.0_2023-01-01T10-00-00-000Z_upgrade_from_0.9.0", // oldest
|
||||
"v1.0.1_2023-01-02T10-00-00-000Z_upgrade_from_1.0.0", // second oldest
|
||||
"v1.0.2_2023-01-03T10-00-00-000Z_upgrade_from_1.0.1", // third oldest
|
||||
];
|
||||
|
||||
for (const deletedBackup of deletedBackups) {
|
||||
expect(backups).not.toContain(deletedBackup);
|
||||
expect(fs.existsSync(path.join(backupsDir, deletedBackup))).toBe(false);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
function cleanSettings(settings: string) {
|
||||
const parsed = JSON.parse(settings);
|
||||
delete parsed.hasRunBefore;
|
||||
delete parsed.isTestMode;
|
||||
delete parsed.lastShownReleaseNotesVersion;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function calculateChecksum(filePath: string): string {
|
||||
const fileBuffer = fs.readFileSync(filePath);
|
||||
const hash = crypto.createHash("sha256");
|
||||
hash.update(fileBuffer);
|
||||
return hash.digest("hex");
|
||||
}
|
||||
BIN
e2e-tests/fixtures/backups/empty-v0.12.0-beta.1.db
Normal file
BIN
e2e-tests/fixtures/backups/empty-v0.12.0-beta.1.db
Normal file
Binary file not shown.
@@ -187,7 +187,7 @@ class GitHubConnector {
|
||||
}
|
||||
|
||||
export class PageObject {
|
||||
private userDataDir: string;
|
||||
public userDataDir: string;
|
||||
public githubConnector: GitHubConnector;
|
||||
constructor(
|
||||
public electronApp: ElectronApplication,
|
||||
@@ -935,15 +935,27 @@ export class PageObject {
|
||||
}
|
||||
}
|
||||
|
||||
interface ElectronConfig {
|
||||
preLaunchHook?: ({ userDataDir }: { userDataDir: string }) => Promise<void>;
|
||||
}
|
||||
|
||||
// From https://github.com/microsoft/playwright/issues/8208#issuecomment-1435475930
|
||||
//
|
||||
// Note how we mark the fixture as { auto: true }.
|
||||
// This way it is always instantiated, even if the test does not use it explicitly.
|
||||
export const test = base.extend<{
|
||||
electronConfig: ElectronConfig;
|
||||
attachScreenshotsToReport: void;
|
||||
electronApp: ElectronApplication;
|
||||
po: PageObject;
|
||||
}>({
|
||||
electronConfig: [
|
||||
async ({}, use) => {
|
||||
// Default configuration - tests can override this fixture
|
||||
await use({});
|
||||
},
|
||||
{ auto: true },
|
||||
],
|
||||
po: [
|
||||
async ({ electronApp }, use) => {
|
||||
const page = await electronApp.firstWindow();
|
||||
@@ -976,7 +988,7 @@ export const test = base.extend<{
|
||||
{ auto: true },
|
||||
],
|
||||
electronApp: [
|
||||
async ({}, use) => {
|
||||
async ({ electronConfig }, use) => {
|
||||
// find the latest build in the out directory
|
||||
const latestBuild = eph.findLatestBuild();
|
||||
// parse the directory and find paths and other info
|
||||
@@ -990,15 +1002,15 @@ export const test = base.extend<{
|
||||
// This is just a hack to avoid the AI setup screen.
|
||||
process.env.OPENAI_API_KEY = "sk-test";
|
||||
const baseTmpDir = os.tmpdir();
|
||||
const USER_DATA_DIR = path.join(
|
||||
baseTmpDir,
|
||||
`dyad-e2e-tests-${Date.now()}`,
|
||||
);
|
||||
const userDataDir = path.join(baseTmpDir, `dyad-e2e-tests-${Date.now()}`);
|
||||
if (electronConfig.preLaunchHook) {
|
||||
await electronConfig.preLaunchHook({ userDataDir });
|
||||
}
|
||||
const electronApp = await electron.launch({
|
||||
args: [
|
||||
appInfo.main,
|
||||
"--enable-logging",
|
||||
`--user-data-dir=${USER_DATA_DIR}`,
|
||||
`--user-data-dir=${userDataDir}`,
|
||||
],
|
||||
executablePath: appInfo.executable,
|
||||
// Strong suspicion this is causing issues on Windows with tests hanging due to error:
|
||||
@@ -1007,7 +1019,7 @@ export const test = base.extend<{
|
||||
// dir: "test-results",
|
||||
// },
|
||||
});
|
||||
(electronApp as any).$dyadUserDataDir = USER_DATA_DIR;
|
||||
(electronApp as any).$dyadUserDataDir = userDataDir;
|
||||
|
||||
console.log("electronApp launched!");
|
||||
if (showDebugLogs) {
|
||||
@@ -1064,6 +1076,14 @@ export const test = base.extend<{
|
||||
],
|
||||
});
|
||||
|
||||
export function testWithConfig(config: ElectronConfig) {
|
||||
return test.extend({
|
||||
electronConfig: async ({}, use) => {
|
||||
await use(config);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Wrapper that skips tests on Windows platform
|
||||
export const testSkipIfWindows = os.platform() === "win32" ? test.skip : test;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
- img
|
||||
- text: 1 error
|
||||
- button "Recheck":
|
||||
- button "Run checks":
|
||||
- img
|
||||
- button "Fix All":
|
||||
- img
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
- paragraph: No problems found
|
||||
- button "Recheck":
|
||||
- img
|
||||
- button "Run checks":
|
||||
- img
|
||||
@@ -1,6 +1,6 @@
|
||||
- img
|
||||
- text: 1 error
|
||||
- button "Recheck":
|
||||
- button "Run checks":
|
||||
- img
|
||||
- button "Fix All":
|
||||
- img
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
- paragraph: No problems found
|
||||
- button "Recheck":
|
||||
- img
|
||||
- button "Run checks":
|
||||
- img
|
||||
Reference in New Issue
Block a user