|
1 | 1 | import assert from "assert";
|
2 | 2 | import fs from "fs/promises";
|
| 3 | +import { type D1Database } from "@cloudflare/workers-types/experimental"; |
| 4 | +import { ExecutionContext } from "ava"; |
3 | 5 | import { Miniflare, MiniflareOptions } from "miniflare";
|
4 | 6 | import { useTmp, utf8Encode } from "../../test-shared";
|
5 | 7 | import { binding, getDatabase, opts, test } from "./test";
|
| 8 | +import type { Context } from "./test"; |
6 | 9 |
|
7 | 10 | export const SCHEMA = (
|
8 | 11 | tableColours: string,
|
@@ -580,3 +583,149 @@ test("it properly handles ROWS_AND_COLUMNS results format", async (t) => {
|
580 | 583 | }
|
581 | 584 | t.deepEqual(results, expectedResults);
|
582 | 585 | });
|
| 586 | + |
| 587 | +/** |
| 588 | + * Test that the `dumpSql` method returns a valid SQL dump of the database. |
| 589 | + * This test creates a new D1 database, fills it with dummy data, and then |
| 590 | + * exports the SQL dump using the `PRAGMA miniflare_d1_export` command. |
| 591 | + * It then executes the dump in a new D1 database and checks if both databases |
| 592 | + * are equal in terms of schema and data. |
| 593 | + */ |
| 594 | +test("dumpSql exports and imports complete database structure and content correctly", async (t) => { |
| 595 | + // Create a new Miniflare instance with D1 database |
| 596 | + const originalMF = new Miniflare({ |
| 597 | + ...opts, |
| 598 | + d1Databases: { test: "test" }, |
| 599 | + }); |
| 600 | + const mirrorMF = new Miniflare({ |
| 601 | + ...opts, |
| 602 | + d1Databases: { test: "test" }, |
| 603 | + }); |
| 604 | + |
| 605 | + t.teardown(() => originalMF.dispose()); |
| 606 | + t.teardown(() => mirrorMF.dispose()); |
| 607 | + |
| 608 | + const originalDb = await originalMF.getD1Database("test"); |
| 609 | + const mirrorDb = await mirrorMF.getD1Database("test"); |
| 610 | + |
| 611 | + // Fill the original database with dummy data |
| 612 | + await fillDummyData(originalDb); |
| 613 | + |
| 614 | + // Export the database schema and data |
| 615 | + const result = await originalDb |
| 616 | + .prepare("PRAGMA miniflare_d1_export(?,?,?);") |
| 617 | + .bind(0, 0) |
| 618 | + .raw(); |
| 619 | + |
| 620 | + const [dumpStatements] = result as [string[]]; |
| 621 | + const dump = dumpStatements.join("\n"); |
| 622 | + |
| 623 | + await mirrorDb.exec(dump); |
| 624 | + |
| 625 | + // Verify that the schema and data in both databases are equal |
| 626 | + await isDatabaseEqual(t, originalDb, mirrorDb); |
| 627 | +}); |
| 628 | + |
| 629 | +/** |
| 630 | + * Populates a D1 database with test data for schema export testing. |
| 631 | + * Creates tables with various schema features (foreign keys, special characters, etc.) |
| 632 | + * and inserts sample data including edge cases like NULL values and type mismatches. |
| 633 | + */ |
| 634 | +async function fillDummyData(db: D1Database) { |
| 635 | + // Create schema with various SQL features to test export compatibility |
| 636 | + // Each table must have an ID column as primary key so that we can use it for ordering in equality tests |
| 637 | + |
| 638 | + const schemas = [ |
| 639 | + // Create basic table with text primary key |
| 640 | + `CREATE TABLE "classrooms"(id TEXT PRIMARY KEY, capacity INTEGER, test_blob BLOB)`, |
| 641 | + |
| 642 | + // Create table with foreign key constraint |
| 643 | + `CREATE TABLE "students" (id INTEGER PRIMARY KEY, name TEXT NOT NULL, classroom TEXT NOT NULL, FOREIGN KEY (classroom) REFERENCES "classrooms" (id) ON DELETE CASCADE)`, |
| 644 | + |
| 645 | + // Create table with spaces in name to test quoting |
| 646 | + `CREATE TABLE "test space table" (id INTEGER PRIMARY KEY, name TEXT NOT NULL)`, |
| 647 | + |
| 648 | + // Create table with escaped quotes and SQL reserved keywords |
| 649 | + `CREATE TABLE "test""name" (id INTEGER PRIMARY KEY, "escaped""column" TEXT, "order" INTEGER)`, |
| 650 | + ]; |
| 651 | + |
| 652 | + await db.exec(schemas.join(";")); |
| 653 | + |
| 654 | + // Prepare sample data |
| 655 | + const classroomData = [ |
| 656 | + // Standard numeric data |
| 657 | + ...Array.from({ length: 10 }, (_, i) => ({ |
| 658 | + id: `classroom_${i + 1}`, |
| 659 | + capacity: (i + 1) * 10, |
| 660 | + test_blob: utf8Encode(`Blob data for classroom ${i + 1}`), |
| 661 | + })), |
| 662 | + |
| 663 | + // Edge case: type mismatch (string where number expected) |
| 664 | + { id: "different_type_classroom", capacity: "not_a_number" }, |
| 665 | + |
| 666 | + // Edge case: NULL value |
| 667 | + { id: "null_classroom", capacity: null }, |
| 668 | + ]; |
| 669 | + |
| 670 | + // Insert classroom data |
| 671 | + const classroomStmt = db.prepare( |
| 672 | + `INSERT INTO classrooms (id, capacity) VALUES (?, ?)` |
| 673 | + ); |
| 674 | + |
| 675 | + for (const classroom of classroomData) { |
| 676 | + await classroomStmt.bind(classroom.id, classroom.capacity).run(); |
| 677 | + } |
| 678 | + |
| 679 | + // Generate and insert student data with classroom references |
| 680 | + const studentStmt = db.prepare( |
| 681 | + `INSERT INTO students (id, name, classroom) VALUES (?, ?, ?)` |
| 682 | + ); |
| 683 | + |
| 684 | + // Create 2 students for each classroom |
| 685 | + for (let i = 0; i < 10; i++) { |
| 686 | + for (let j = 1; j <= 2; j++) { |
| 687 | + const studentId = i * 2 + j; |
| 688 | + await studentStmt |
| 689 | + .bind(studentId, `student_${studentId}`, `classroom_${i + 1}`) |
| 690 | + .run(); |
| 691 | + } |
| 692 | + } |
| 693 | +} |
| 694 | + |
| 695 | +/** |
| 696 | + * Compares two D1 databases to check if they are equal in terms of schema and data. |
| 697 | + * It retrieves the schema of both databases, compares the tables, and then |
| 698 | + * checks if the data in each table is identical. |
| 699 | + */ |
| 700 | +async function isDatabaseEqual( |
| 701 | + t: ExecutionContext<Context>, |
| 702 | + db: D1Database, |
| 703 | + db2: D1Database |
| 704 | +) { |
| 705 | + // SQL to select schema excluding internal tables |
| 706 | + const selectSchemaSQL = |
| 707 | + "SELECT * FROM sqlite_master WHERE type = 'table' AND (name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_%')"; |
| 708 | + |
| 709 | + // Check if schema (tables) in both databases is equal |
| 710 | + const tablesFromMirror = (await db2.prepare(selectSchemaSQL).all()).results; |
| 711 | + const tablesFromOriginal = (await db.prepare(selectSchemaSQL).all()).results; |
| 712 | + t.deepEqual(tablesFromMirror, tablesFromOriginal); |
| 713 | + |
| 714 | + // Check if data in each table is equal |
| 715 | + // We will use a simple SELECT * FROM table ORDER BY id to ensure consistent ordering |
| 716 | + for (const table of tablesFromMirror) { |
| 717 | + const tableName = table.name as string; |
| 718 | + |
| 719 | + // Escape and ORDER BY to ensure consistent ordering |
| 720 | + const selectTableSQL = `SELECT * FROM "${tableName.replace(/"/g, '""')}" ORDER BY id ASC`; |
| 721 | + |
| 722 | + const originalData = (await db.prepare(selectTableSQL).all()).results; |
| 723 | + const mirrorData = (await db2.prepare(selectTableSQL).all()).results; |
| 724 | + |
| 725 | + t.deepEqual( |
| 726 | + originalData, |
| 727 | + mirrorData, |
| 728 | + `Data mismatch in table: ${tableName}` |
| 729 | + ); |
| 730 | + } |
| 731 | +} |
0 commit comments