diff --git a/web-local/mydb.sqlite b/web-local/mydb.sqlite new file mode 100644 index 00000000000..d00a6ed2775 Binary files /dev/null and b/web-local/mydb.sqlite differ diff --git a/web-local/tests/UI/check-inspector-source-model.spec.ts b/web-local/tests/rd_UI/check-inspector-source-model.spec.ts similarity index 100% rename from web-local/tests/UI/check-inspector-source-model.spec.ts rename to web-local/tests/rd_UI/check-inspector-source-model.spec.ts diff --git a/web-local/tests/UI/check-model-ui-buttons.spec.ts b/web-local/tests/rd_UI/check-model-ui-buttons.spec.ts similarity index 100% rename from web-local/tests/UI/check-model-ui-buttons.spec.ts rename to web-local/tests/rd_UI/check-model-ui-buttons.spec.ts diff --git a/web-local/tests/UI/check-source-ui-buttons.spec.ts b/web-local/tests/rd_UI/check-source-ui-buttons.spec.ts similarity index 100% rename from web-local/tests/UI/check-source-ui-buttons.spec.ts rename to web-local/tests/rd_UI/check-source-ui-buttons.spec.ts diff --git a/web-local/tests/sources/source-blankFile.spec.ts b/web-local/tests/sources/source-blankFile.spec.ts index 9d096357e8a..2dc0569b0c0 100644 --- a/web-local/tests/sources/source-blankFile.spec.ts +++ b/web-local/tests/sources/source-blankFile.spec.ts @@ -1,38 +1,45 @@ -import { test } from '@playwright/test'; -import { test as RillTest } from '../utils/test'; -import { addFileWithCheck, waitForTable } from '../utils/sourceHelpers'; -import { renameFileUsingMenu, actionUsingMenu, checkExistInConnector } from '../utils/commonHelpers'; +import { test } from "@playwright/test"; +import { test as RillTest } from "../utils/test"; +import { addFileWithCheck, waitForTable } from "../utils/sourceHelpers"; +import { + renameFileUsingMenu, + actionUsingMenu, + checkExistInConnector, +} from "../utils/commonHelpers"; /// Blank File test /// In this test we create a `untilted_file`, create a second one to ensure `_1` is appended /// Following that we rename and modify the contents of a file to a source /// Re-create a file to ensure it makes a `untitiled_file`, then duplicate it. -/// Need to add checks for the Sources - -test.describe('Creating a blank file... and making a source.', () => { - RillTest('Creating Blank file', async ({ page }) => { +test.describe("Creating a blank file... and making a source.", () => { + RillTest("Creating Blank file", async ({ page }) => { // create blank file - await addFileWithCheck(page, 'untitled_file'); + await addFileWithCheck(page, "untitled_file"); // Wait for the file `untitled_file` to be present on the page - await page.waitForSelector('li[aria-label="/untitled_file Nav Entry"]', { state: 'visible' }); - - //create another blank file and expected untitled_file_1 - await addFileWithCheck(page, 'untitled_file_1'); - await page.waitForSelector('li[aria-label="/untitled_file_1 Nav Entry"]', { state: 'visible' }); + await page.waitForSelector('li[aria-label="/untitled_file Nav Entry"]', { + state: "visible", + }); + //create another blank file and expected untitled_file_1 + await addFileWithCheck(page, "untitled_file_1"); + await page.waitForSelector('li[aria-label="/untitled_file_1 Nav Entry"]', { + state: "visible", + }); - await renameFileUsingMenu(page, '/untitled_file', 'source.yaml') + await renameFileUsingMenu(page, "/untitled_file", "source.yaml"); - await page.waitForSelector('li[aria-label="/source.yaml Nav Entry"]', { state: 'visible' }); - console.log('File renamed successfully to source.yaml!'); + await page.waitForSelector('li[aria-label="/source.yaml Nav Entry"]', { + state: "visible", + }); + console.log("File renamed successfully to source.yaml!"); const textBox = page - .getByLabel('Code editor') // Locate the labeled parent - .getByRole('textbox'); // Find the inner textbox + .getByLabel("Code editor") // Locate the labeled parent + .getByRole("textbox"); // Find the inner textbox // Wait for the textbox to be visible - await textBox.waitFor({ state: 'visible' }); + await textBox.waitFor({ state: "visible" }); // Rewrite the contents of the textbox await textBox.fill(`# Testing manual file creation @@ -42,27 +49,36 @@ test.describe('Creating a blank file... and making a source.', () => { connector: "duckdb" sql: "select * from read_csv('gs://playwright-gcs-qa/AdBids_csv.csv', auto_detect=true, ignore_errors=1, header=true)"`); - console.log('Successfully Modified Contents. Checking for data.'); + console.log("Successfully Modified Contents. Checking for data."); + await waitForTable(page, "/source.yaml", [ + "timestamp", + "id", + "bid_price", + "domain", + "publisher", + ]); - await waitForTable(page, '/source.yaml', ['timestamp', 'id', 'bid_price', 'domain', 'publisher']); + // CREATING A NEW BLANK FILE, EXPECT IT TO BE `untitled_file` as we modified the original - //CREATING A NEW BLANK FILE, EXPECT IT TO BE `untitled_file` as we modified the original - - console.log("Creating a new file, expecting `untitled_file`") + console.log("Creating a new file, expecting `untitled_file`"); // create new blank file - await addFileWithCheck(page, 'untitled_file'); - await page.waitForSelector('li[aria-label="/untitled_file Nav Entry"]', { state: 'visible' }); - + await addFileWithCheck(page, "untitled_file"); + await page.waitForSelector('li[aria-label="/untitled_file Nav Entry"]', { + state: "visible", + }); // TEST FOR DUPLICATES and refresh // Locate and click the ellipsis menu button for `untitled_file` - await actionUsingMenu(page, "/source.yaml", "Duplicate") + await actionUsingMenu(page, "/source.yaml", "Duplicate"); await page.getByText("View this source").click(); - // checks? - await page.waitForSelector('li[aria-label="/source (copy).yaml Nav Entry"]', { state: 'visible' }); + // checks that the file exists in the duckdb connector + await page.waitForSelector( + 'li[aria-label="/source (copy).yaml Nav Entry"]', + { state: "visible" }, + ); await checkExistInConnector(page, "duckdb", "main_db", "source (copy)"); }); }); diff --git a/web-local/tests/sources/source-duckdb.spec.ts b/web-local/tests/sources/source-duckdb.spec.ts new file mode 100644 index 00000000000..5d95e03c50b --- /dev/null +++ b/web-local/tests/sources/source-duckdb.spec.ts @@ -0,0 +1,126 @@ +import { test, expect } from "@playwright/test"; +import { execSync, spawn } from "child_process"; +import { test as RillTest } from "../utils/test"; + +import path from "node:path"; +import { DuckDB, waitForTable } from "../utils/sourceHelpers"; + +//DuckDB requires 1 worker as it doesnt allow concurrency, will fail otherwise. + +test.describe("Read DuckDB Table, then read into Rill", () => { + async function ensureDuckDBInstalled() { + try { + // Check if DuckDB is installed + execSync("duckdb --version", { stdio: "ignore" }); + console.log("DuckDB is already installed."); + } catch (err) { + console.log("DuckDB not found. Installing..."); + // Install DuckDB (example for macOS/Linux using wget) + // https://github.com/duckdb/duckdb/releases/latest/download/duckdb_cli-linux-amd64.zip + try { + execSync( + ` + wget https://github.com/duckdb/duckdb/releases/download/v1.1.3/duckdb_cli-osx-universal.zip && + unzip duckdb_cli-osx-universal.zip && + chmod +x duckdb && + sudo mv duckdb /usr/local/bin/ + `, + { stdio: "inherit" }, + ); + + console.log("DuckDB installed successfully."); + } catch (error) { + console.error("DuckDB installation failed:", error); + } + console.log("DuckDB installed successfully."); + } + } + + test.beforeAll(async () => { + await ensureDuckDBInstalled(); + const currentDir = process.cwd(); + const dbPath = path.resolve(currentDir, "test/data/playwright.db"); + const commands = [ + `.open ${dbPath}`, + "select count(*) from sales;", + "select count(*) from customer_data;", + ".exit", + ]; + console.log(`Running DuckDB commands against: ${dbPath}`); + + await new Promise((resolve, reject) => { + const cli = spawn("duckdb", [], { shell: true }); + + let output = ""; + cli.stdout.on("data", (data) => { + output += data.toString(); + }); + + cli.stderr.on("data", (data) => { + console.error(`Error: ${data}`); + }); + + cli.on("close", (code) => { + if (code === 0) { + console.log("DuckDB CLI execution completed successfully."); + const salesCountMatch = output.match(/100000/); // Expected count for sales + const customerCountMatch = output.match(/10000/); // Expected count for customer_data + + expect(salesCountMatch).not.toBeNull(); + expect(customerCountMatch).not.toBeNull(); + resolve(); + } else { + reject(new Error(`DuckDB CLI exited with code ${code}`)); + } + }); + + // Write commands to DuckDB CLI + commands.forEach((cmd) => cli.stdin.write(`${cmd}\n`)); + cli.stdin.end(); + }); + }); + + test("Validate data in DuckDB", async () => { + console.log("Validation complete in beforeAll."); + }); + + console.log("Checked DuckDB with sales and customer_data tables."); + console.log("Starting Rill Developer..."); + + RillTest("Reading Source into Rill", async ({ page }) => { + // Test loading the 'sales' table + await Promise.all([ + waitForTable(page, "sources/sales.yaml", [ + "sale_date", + "sale_id", + "duration_ms", + "customer_id", + "sales_amount_usd", + "products", + "discounts", + "region", + "is_online", + ]), + DuckDB(page, "sales"), // Ensure the `sales` dataset is loaded + ]); + + console.log("Sales table validated."); + + // Test loading the 'customer_data' table + await Promise.all([ + waitForTable(page, "sources/customer_data.yaml", [ + "customer_id", + "name", + "email", + "signup_date", + "preferences", + "total_spent_usd", + "loyalty_tier", + "is_active", + ]), + DuckDB(page, "customer_data"), // Ensure the `customer_data` dataset is loaded + ]); + + console.log("Customer data table validated."); + }); +}); diff --git a/web-local/tests/sources/source-folders.spec.ts b/web-local/tests/sources/source-folders.spec.ts new file mode 100644 index 00000000000..e5c30ea65f5 --- /dev/null +++ b/web-local/tests/sources/source-folders.spec.ts @@ -0,0 +1,40 @@ +import { test } from "@playwright/test"; +import { test as RillTest } from "../utils/test"; +import { addFolderWithCheck } from "../utils/sourceHelpers"; + +/// Blank Folder test +/// In this test we create a `untilted_file`, create a second one to ensure `_1` is appended + +test.describe("Creating a Folder... and making a source.", () => { + RillTest("Creating Folder", async ({ page }) => { + // create folder file + await Promise.all([addFolderWithCheck(page, "untitled_folder_3")]); + await Promise.all([addFolderWithCheck(page, "untitled_folder_1")]); + await Promise.all([addFolderWithCheck(page, "untitled_folder_")]); + // create folder in subfolder + await page.locator('span:has-text("untitled_folder_2")').last().hover(); + await page.getByLabel("untitled_folder_2 actions menu trigger").click(); + await page.getByRole("menuitem", { name: "New Folder" }).first().click(); + + // check that the folder exists, + await page.waitForSelector("#nav-\\/untitled_folder_2\\/untitled_folder", { + timeout: 5000, + }); + await page + .locator('[aria-label="/untitled_folder_2/untitled_folder"]') + .isVisible(); + + // create another for proper "_1" append + await page.locator('span:has-text("untitled_folder_2")').last().hover(); + await page.getByLabel("untitled_folder_2 actions menu trigger").click(); + await page.getByRole("menuitem", { name: "New Folder" }).first().click(); + + await page.waitForSelector( + "#nav-\\/untitled_folder_2\\/untitled_folder_1", + { timeout: 5000 }, + ); + await page + .locator('[aria-label="/untitled_folder_2/untitled_folder_1"]') + .isVisible(); + }); +}); diff --git a/web-local/tests/sources/source-gcs.spec.ts b/web-local/tests/sources/source-gcs.spec.ts new file mode 100644 index 00000000000..131c7f1d9b8 --- /dev/null +++ b/web-local/tests/sources/source-gcs.spec.ts @@ -0,0 +1,125 @@ +import { test, expect } from "@playwright/test"; +import { test as RillTest } from "../utils/test"; +import { cloud, waitForTable } from "../utils/sourceHelpers"; + +// GCS source ingestion test +// based on public bucket gs://playwright-gcs-qa/* +// Can add more files as required, currently parquet.gz files are erroring so removed. + +test.describe("LOAD DATA FROM cloud", () => { + RillTest("Reading Source into Rill from GCS", async ({ page }) => { + console.log("Testing cloud sales data ingestion..."); + await Promise.all([ + waitForTable(page, "/sources/sales.yaml", [ + "sale_date", + "sale_id", + "duration_ms", + "customer_id", + "sales_amount_usd", + "products", + "discounts", + "region", + "is_online", + ]), + cloud(page, "sales.csv", "gcs"), + ]); + console.log("Sales table validated."); + + console.log("Testing cloud customer data ingestion..."); + await Promise.all([ + waitForTable(page, "/sources/customer_data.yaml", [ + "customer_id", + "name", + "email", + "signup_date", + "preferences", + "total_spent_usd", + "loyalty_tier", + "is_active", + ]), + cloud(page, "customer_data.csv", "gcs"), + ]); + console.log("Customer data table validated."); + + console.log("TESTING VARIOUS TYPES OF FILES ON cloud"); + const AdBidsColumns = ["id", "timestamp", "publisher", "domain"]; + + await Promise.all([ + waitForTable(page, "/sources/AdBids_csv.yaml", AdBidsColumns), + cloud(page, "AdBids_csv.csv", "gcs"), + ]); + await Promise.all([ + waitForTable(page, "/sources/AdBids_csv_gz.yaml", AdBidsColumns), + cloud(page, "AdBids_csv_gz.csv.gz", "gcs"), + ]); + await Promise.all([ + waitForTable(page, "/sources/AdBids_parquet.yaml", AdBidsColumns), + cloud(page, "AdBids_parquet.parquet", "gcs"), + ]); + /* broken parquet.gz + await Promise.all([ + waitForTable(page, '/sources/AdBids_parquet_gz.yaml', AdBidsColumns), + cloud(page, 'AdBids_parquet_gz.parquet.gz', 'gcs'), + ]); + */ + + await Promise.all([ + waitForTable(page, "/sources/AdBids_txt.yaml", AdBidsColumns), + cloud(page, "AdBids_txt.txt", "gcs"), + ]); + + const UsersColumns = ["id", "name", "city", "country"]; + const UsersJsonColumns = [ + "id", + "name", + "isActive", + "createdDate", + "address", + "tags", + "projects", + "scores", + "flag", + ]; + + await Promise.all([ + waitForTable(page, "/sources/Users_csv.yaml", UsersColumns), + cloud(page, "Users_csv.csv", "gcs"), + ]); + + await Promise.all([ + waitForTable(page, "/sources/Users_json.yaml", UsersJsonColumns), + cloud(page, "Users_json.json", "gcs"), + ]); + + await Promise.all([ + waitForTable(page, "/sources/Users_parquet.yaml", UsersColumns), + cloud(page, "Users_parquet.parquet", "gcs"), + ]); + + const AdImpressionsColumns = ["id", "city", "country", "user_id"]; + + await Promise.all([ + waitForTable( + page, + "/sources/AdImpressions_parquet.yaml", + AdImpressionsColumns, + ), + cloud(page, "AdImpressions_parquet.parquet", "gcs"), + ]); + /* broken parquet.gz + await Promise.all([ + waitForTable(page, '/sources/AdImpressions_parquet_gz.yaml', AdImpressionsColumns), + cloud(page, 'AdImpressions_parquet_gz.parquet.gz', 'gcs'), + ]); + */ + + await Promise.all([ + waitForTable( + page, + "/sources/AdImpressions_tsv.yaml", + AdImpressionsColumns, + ), + cloud(page, "AdImpressions_tsv.tsv", "gcs"), + ]); + }); +}); diff --git a/web-local/tests/sources/source-s3.spec.ts b/web-local/tests/sources/source-s3.spec.ts new file mode 100644 index 00000000000..37f54b13301 --- /dev/null +++ b/web-local/tests/sources/source-s3.spec.ts @@ -0,0 +1,125 @@ +import { test } from "@playwright/test"; +import { test as RillTest } from "../utils/test"; +import { cloud, waitForTable } from "../utils/sourceHelpers"; + +// S3 source ingestion test +// based on public bucket s3://playwright-s3-qa/* +// Can add more files as required, currently parquet.gz files are erroring so removed. + +test.describe("LOAD DATA FROM cloud", () => { + RillTest("Reading Source into Rill from S3", async ({ page }) => { + console.log("Testing cloud sales data ingestion..."); + await Promise.all([ + waitForTable(page, "/sources/sales.yaml", [ + "sale_date", + "sale_id", + "duration_ms", + "customer_id", + "sales_amount_usd", + "products", + "discounts", + "region", + "is_online", + ]), + cloud(page, "sales.csv", "s3"), + ]); + console.log("Sales table validated."); + + console.log("Testing cloud customer data ingestion..."); + await Promise.all([ + waitForTable(page, "/sources/customer_data.yaml", [ + "customer_id", + "name", + "email", + "signup_date", + "preferences", + "total_spent_usd", + "loyalty_tier", + "is_active", + ]), + cloud(page, "customer_data.csv", "s3"), + ]); + console.log("Customer data table validated."); + + console.log("TESTING VARIOUS TYPES OF FILES ON cloud"); + const AdBidsColumns = ["id", "timestamp", "publisher", "domain"]; + + await Promise.all([ + waitForTable(page, "/sources/AdBids_csv.yaml", AdBidsColumns), + cloud(page, "AdBids_csv.csv", "s3"), + ]); + await Promise.all([ + waitForTable(page, "/sources/AdBids_csv_gz.yaml", AdBidsColumns), + cloud(page, "AdBids_csv_gz.csv.gz", "s3"), + ]); + await Promise.all([ + waitForTable(page, "/sources/AdBids_parquet.yaml", AdBidsColumns), + cloud(page, "AdBids_parquet.parquet", "s3"), + ]); + /* broken + await Promise.all([ + waitForTable(page, '/sources/AdBids_parquet_gz.yaml', AdBidsColumns), + cloud(page, 'AdBids_parquet_gz.parquet.gz', 's3'), + ]); + */ + + await Promise.all([ + waitForTable(page, "/sources/AdBids_txt.yaml", AdBidsColumns), + cloud(page, "AdBids_txt.txt", "s3"), + ]); + + const UsersColumns = ["id", "name", "city", "country"]; + const UsersJsonColumns = [ + "id", + "name", + "isActive", + "createdDate", + "address", + "tags", + "projects", + "scores", + "flag", + ]; + + await Promise.all([ + waitForTable(page, "/sources/Users_csv.yaml", UsersColumns), + cloud(page, "Users_csv.csv", "s3"), + ]); + + await Promise.all([ + waitForTable(page, "/sources/Users_json.yaml", UsersJsonColumns), + cloud(page, "Users_json.json", "s3"), + ]); + + await Promise.all([ + waitForTable(page, "/sources/Users_parquet.yaml", UsersColumns), + cloud(page, "Users_parquet.parquet", "s3"), + ]); + + const AdImpressionsColumns = ["id", "city", "country", "user_id"]; + + await Promise.all([ + waitForTable( + page, + "/sources/AdImpressions_parquet.yaml", + AdImpressionsColumns, + ), + cloud(page, "AdImpressions_parquet.parquet", "s3"), + ]); + /* broken + await Promise.all([ + waitForTable(page, '/sources/AdImpressions_parquet_gz.yaml', AdImpressionsColumns), + cloud(page, 'AdImpressions_parquet_gz.parquet.gz', 's3'), + ]); + */ + + await Promise.all([ + waitForTable( + page, + "/sources/AdImpressions_tsv.yaml", + AdImpressionsColumns, + ), + cloud(page, "AdImpressions_tsv.tsv", "s3"), + ]); + }); +}); diff --git a/web-local/tests/sources/source-sqlite.spec.ts b/web-local/tests/sources/source-sqlite.spec.ts new file mode 100644 index 00000000000..d65462a7dce --- /dev/null +++ b/web-local/tests/sources/source-sqlite.spec.ts @@ -0,0 +1,173 @@ +import { test, expect } from "@playwright/test"; +import sqlite3 from "sqlite3"; +import { open } from "sqlite"; +import { test as RillTest } from "../utils/test"; +import { sqlLiteDataset, waitForTable } from "../utils/sourceHelpers"; +import fs from "fs"; +import csv from "csv-parser"; +import { fileURLToPath } from "url"; +import path from "path"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +export const DataPath = path.join(__dirname, "../data"); + +let db; + +async function readCSV(filePath) { + const rows = []; + return new Promise((resolve, reject) => { + fs.createReadStream(filePath) + .pipe(csv()) + .on("data", (row) => { + rows.push(row); + }) + .on("end", () => { + resolve(rows); + }) + .on("error", (err) => { + reject(err); + }); + }); +} + +test.describe("SQLite Test, then read into Rill", () => { + test.beforeAll(async () => { + // Create and initialize SQLite database + db = await open({ + filename: "mydb.sqlite", // Use in-memory database + driver: sqlite3.Database, + }); + + if (!db) { + throw new Error("Failed to open SQLite database"); + } + + // Create tables + await db.exec(` + CREATE TABLE IF NOT EXISTS sales ( + sale_date DATE NOT NULL, -- Date of the sale event + sale_id INTEGER PRIMARY KEY, -- Unique identifier for each sale + customer_id INTEGER NOT NULL, -- Identifier for the customer + products TEXT, -- List of products purchased (stored as JSON string) + sales_amount_usd REAL NOT NULL, -- Total sales amount in USD + discounts TEXT, -- Discounts applied (stored as JSON string) + duration_ms INTEGER, -- Time spent on the transaction (in milliseconds) + is_online BOOLEAN, -- Whether the sale was made online + region TEXT -- Region where the sale occurred + ); + `); + + await db.exec(` + CREATE TABLE IF NOT EXISTS customer_data ( + customer_id INTEGER PRIMARY KEY, -- Unique identifier for each customer + name TEXT NOT NULL, -- Customer name + email TEXT NOT NULL, -- Customer email address + signup_date DATE NOT NULL, -- Date the customer signed up + preferences TEXT, -- Customer preferences (stored as JSON string) + total_spent_usd REAL NOT NULL, -- Total money spent by the customer in USD + loyalty_tier TEXT, -- Loyalty tier (e.g., Gold, Silver, Bronze) + is_active BOOLEAN -- Whether the customer account is active + ); + `); + + // Load data from CSVs + const salesData = await readCSV(`${DataPath}/sales_data_sqlite.csv`); + const customerData = await readCSV(`${DataPath}/customer_data_sqlite.csv`); + + for (const row of salesData) { + await db.run( + ` + INSERT INTO sales (sale_date, sale_id, customer_id, products, sales_amount_usd, discounts, duration_ms, is_online, region) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, + [ + row.sale_date, + row.sale_id, + row.customer_id, + row.products, + row.sales_amount_usd, + row.discounts, + row.duration_ms, + row.is_online, + row.region, + ], + ); + } + + for (const row of customerData) { + await db.run( + ` + INSERT INTO customer_data (customer_id, name, email, signup_date, preferences, total_spent_usd, loyalty_tier, is_active) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, + [ + row.customer_id, + row.name, + row.email, + row.signup_date, + row.preferences, + row.total_spent_usd, + row.loyalty_tier, + row.is_active, + ], + ); + } + }); + + test.afterAll(async () => { + // Close the SQLite database + await db.close(); + // fs.unlinkSync("mydb.sqlite"); // Deletes the database file + // console.log("Database file deleted"); + }); + + test("Load and validate data in SQLite", async ({ page }) => { + // Validate data in the database + const sales = await db.all("SELECT * FROM sales"); + expect(sales).toHaveLength(100000); + + const customerData = await db.all("SELECT * FROM customer_data"); + expect(customerData).toHaveLength(10000); + }); + + console.log( + "Correctly Initialized SQLite Database with sales and customer_data tables.", + ); + console.log("Starting Rill Developer..."); + + RillTest("Reading Source into Rill", async ({ page }) => { + // Test loading the 'sales' table + await Promise.all([ + waitForTable(page, "sources/sales.yaml", [ + "sale_date", + "sale_id", + "duration_ms", + "customer_id", + "sales_amount_usd", + "products", + "discounts", + "region", + "is_online", + ]), + sqlLiteDataset(page, "sales"), // Ensure the `sales` dataset is loaded + ]); + + console.log("Sales table validated."); + + // Test loading the 'customer_data' table + await Promise.all([ + waitForTable(page, "sources/customer_data.yaml", [ + "customer_id", + "name", + "email", + "signup_date", + "preferences", + "total_spent_usd", + "loyalty_tier", + "is_active", + ]), + sqlLiteDataset(page, "customer_data"), // Ensure the `customer_data` dataset is loaded + ]); + + console.log("Customer data table validated."); + }); +}); diff --git a/web-local/tests/sources.spec.ts b/web-local/tests/sources/source-uploadFile.spec.ts similarity index 93% rename from web-local/tests/sources.spec.ts rename to web-local/tests/sources/source-uploadFile.spec.ts index 778bc6c57b4..1f1bda04a8c 100644 --- a/web-local/tests/sources.spec.ts +++ b/web-local/tests/sources/source-uploadFile.spec.ts @@ -3,15 +3,15 @@ import { deleteFile, renameFileUsingMenu, updateCodeEditor, -} from "./utils/commonHelpers"; +} from "../utils/commonHelpers"; import { TestDataPath, createSource, uploadFile, waitForSource, -} from "./utils/sourceHelpers"; -import { test } from "./utils/test"; -import { fileNotPresent, waitForFileNavEntry } from "./utils/waitHelpers"; +} from "../utils/sourceHelpers"; +import { test } from "../utils/test"; +import { fileNotPresent, waitForFileNavEntry } from "../utils/waitHelpers"; test.describe("sources", () => { test("Import sources", async ({ page }) => { diff --git a/web-local/tests/utils/sourceHelpers.ts b/web-local/tests/utils/sourceHelpers.ts index 0c0b7c80411..7d872444862 100644 --- a/web-local/tests/utils/sourceHelpers.ts +++ b/web-local/tests/utils/sourceHelpers.ts @@ -155,7 +155,7 @@ export async function cloud( // input the needed details for mysql // Locate the SQL text box and modify - console.log("opened the UI"); + // console.log("opened the UI"); const inputField = page.locator("input#path"); // Modify the text @@ -245,7 +245,7 @@ export async function sqlLiteDataset( // input the needed details for mysql // Locate the SQL text box and modify - console.log("opened the UI"); + // console.log("opened the UI"); const currentDir = process.cwd(); // Returns the directory where the script is executed const dbPath = path.join(currentDir, "mydb.sqlite"); @@ -296,7 +296,7 @@ export async function pgDataset( // input the needed details for mysql // Locate the SQL text box and modify - console.log("opened the UI"); + // console.log("opened the UI"); const inputField = page.locator("input#sql"); // Modify the text