From 627750a5cf73e8c62e287dfc6cab537e88c8ec54 Mon Sep 17 00:00:00 2001 From: JordanBoltonMN Date: Tue, 1 Mar 2022 14:20:28 -0600 Subject: [PATCH] Change to ES6, made parser async, add async helpers (#294) * initial commit * naive touched up * more async rework * more async work * missed one * either done or next is lexing * more fixes * another package/tsconfig pass * initial commit * changing package version * remove duplicate fn * another async pass * no-floating-promises --- .eslintrc.js | 1 + package-lock.json | 217 ++------ package.json | 2 +- src/example.ts | 5 +- src/powerquery-parser/common/arrayUtils.ts | 9 + .../common/result/resultUtils.ts | 13 + .../disambiguation/disambiguationUtils.ts | 82 +-- .../nodeIdMap/nodeIdMapUtils/leafSelectors.ts | 6 +- .../nodeIdMapUtils/nodeIdMapUtils.ts | 10 +- .../parser/parseState/parseStateUtils.ts | 8 +- src/powerquery-parser/parser/parser/parser.ts | 141 ++--- .../parser/parser/parserUtils.ts | 26 +- .../parser/parsers/combinatorialParser.ts | 40 +- src/powerquery-parser/parser/parsers/naive.ts | 493 +++++++++-------- .../parser/parsers/recursiveDescentParser.ts | 3 + src/powerquery-parser/settings/settings.ts | 2 +- src/powerquery-parser/task/taskUtils.ts | 8 +- src/test/libraryTest/parser/children.ts | 8 +- src/test/libraryTest/parser/columnNumber.ts | 32 +- src/test/libraryTest/parser/error.ts | 136 ++--- src/test/libraryTest/parser/idUtils.ts | 16 +- src/test/libraryTest/parser/nodeIdMapUtils.ts | 33 +- src/test/libraryTest/parser/simple.ts | 494 +++++++++--------- .../benchmark/createBenchmarks.ts | 4 +- src/test/resourceTest/lexParse.ts | 4 +- src/test/testUtils/assertUtils.ts | 22 +- src/test/testUtils/fileUtils.ts | 7 +- tsconfig.json | 2 +- 28 files changed, 935 insertions(+), 889 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index 27f8ef74..b4aeafa9 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -14,6 +14,7 @@ module.exports = { "@typescript-eslint/consistent-type-assertions": ["warn", { assertionStyle: "as" }], "@typescript-eslint/explicit-function-return-type": "error", "@typescript-eslint/no-inferrable-types": "off", + "@typescript-eslint/no-floating-promises": "error", "@typescript-eslint/no-namespace": "error", "@typescript-eslint/no-unused-vars": ["error", { argsIgnorePattern: "^_" }], "@typescript-eslint/prefer-namespace-keyword": "error", diff --git a/package-lock.json b/package-lock.json index aa61e2b0..689986ba 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@microsoft/powerquery-parser", - "version": "0.5.23", + "version": "0.6.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@microsoft/powerquery-parser", - "version": "0.5.23", + "version": "0.6.1", "license": "MIT", "dependencies": { "grapheme-splitter": "^1.0.4", @@ -633,10 +633,16 @@ } }, "node_modules/chokidar": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz", - "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==", + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -1276,9 +1282,9 @@ } }, "node_modules/glob": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", @@ -1290,6 +1296,9 @@ }, "engines": { "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/glob-parent": { @@ -1709,18 +1718,6 @@ "node": ">=8.6" } }, - "node_modules/micromatch/node_modules/picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, "node_modules/minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", @@ -1753,32 +1750,32 @@ } }, "node_modules/mocha": { - "version": "9.1.3", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.1.3.tgz", - "integrity": "sha512-Xcpl9FqXOAYqI3j79pEtHBBnQgVXIhpULjGQa7DVb0Po+VzmSIK9kanAiWLHoRR/dbZ2qpdPshuXr8l1VaHCzw==", + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.2.1.tgz", + "integrity": "sha512-T7uscqjJVS46Pq1XDXyo9Uvey9gd3huT/DD9cYBb4K2Xc/vbKRPUWK067bxDQRK0yIz6Jxk73IrnimvASzBNAQ==", "dev": true, "dependencies": { "@ungap/promise-all-settled": "1.1.2", "ansi-colors": "4.1.1", "browser-stdout": "1.3.1", - "chokidar": "3.5.2", - "debug": "4.3.2", + "chokidar": "3.5.3", + "debug": "4.3.3", "diff": "5.0.0", "escape-string-regexp": "4.0.0", "find-up": "5.0.0", - "glob": "7.1.7", + "glob": "7.2.0", "growl": "1.10.5", "he": "1.2.0", "js-yaml": "4.1.0", "log-symbols": "4.1.0", "minimatch": "3.0.4", "ms": "2.1.3", - "nanoid": "3.1.25", + "nanoid": "3.2.0", "serialize-javascript": "6.0.0", "strip-json-comments": "3.1.1", "supports-color": "8.1.1", "which": "2.0.2", - "workerpool": "6.1.5", + "workerpool": "6.2.0", "yargs": "16.2.0", "yargs-parser": "20.2.4", "yargs-unparser": "2.0.0" @@ -1842,61 +1839,6 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, - "node_modules/mocha/node_modules/debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/mocha/node_modules/debug/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/mocha/node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/mocha/node_modules/glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/mocha/node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -1922,9 +1864,9 @@ "dev": true }, "node_modules/nanoid": { - "version": "3.1.25", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.25.tgz", - "integrity": "sha512-rdwtIXaXCLFAQbnfqDRnI6jaRHp9fTcYBjtFKE8eezcZ7LuLjhUaQGNeMXf1HmRoCH32CLz6XwX0TtxEOS/A3Q==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", + "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==", "dev": true, "bin": { "nanoid": "bin/nanoid.cjs" @@ -2067,9 +2009,9 @@ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "node_modules/picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, "engines": { "node": ">=8.6" @@ -2606,9 +2548,9 @@ } }, "node_modules/workerpool": { - "version": "6.1.5", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.5.tgz", - "integrity": "sha512-XdKkCK0Zqc6w3iTxLckiuJ81tiD/o5rBE/m+nXpRCB+/Sq4DqkfXZ/x0jW02DG1tGsfUGXbTJyZDP+eu67haSw==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.0.tgz", + "integrity": "sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A==", "dev": true }, "node_modules/wrap-ansi": { @@ -3185,9 +3127,9 @@ "dev": true }, "chokidar": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz", - "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==", + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "dev": true, "requires": { "anymatch": "~3.1.2", @@ -3669,9 +3611,9 @@ "dev": true }, "glob": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -3993,14 +3935,6 @@ "requires": { "braces": "^3.0.1", "picomatch": "^2.2.3" - }, - "dependencies": { - "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", - "dev": true - } } }, "minimatch": { @@ -4028,32 +3962,32 @@ } }, "mocha": { - "version": "9.1.3", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.1.3.tgz", - "integrity": "sha512-Xcpl9FqXOAYqI3j79pEtHBBnQgVXIhpULjGQa7DVb0Po+VzmSIK9kanAiWLHoRR/dbZ2qpdPshuXr8l1VaHCzw==", + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.2.1.tgz", + "integrity": "sha512-T7uscqjJVS46Pq1XDXyo9Uvey9gd3huT/DD9cYBb4K2Xc/vbKRPUWK067bxDQRK0yIz6Jxk73IrnimvASzBNAQ==", "dev": true, "requires": { "@ungap/promise-all-settled": "1.1.2", "ansi-colors": "4.1.1", "browser-stdout": "1.3.1", - "chokidar": "3.5.2", - "debug": "4.3.2", + "chokidar": "3.5.3", + "debug": "4.3.3", "diff": "5.0.0", "escape-string-regexp": "4.0.0", "find-up": "5.0.0", - "glob": "7.1.7", + "glob": "7.2.0", "growl": "1.10.5", "he": "1.2.0", "js-yaml": "4.1.0", "log-symbols": "4.1.0", "minimatch": "3.0.4", "ms": "2.1.3", - "nanoid": "3.1.25", + "nanoid": "3.2.0", "serialize-javascript": "6.0.0", "strip-json-comments": "3.1.1", "supports-color": "8.1.1", "which": "2.0.2", - "workerpool": "6.1.5", + "workerpool": "6.2.0", "yargs": "16.2.0", "yargs-parser": "20.2.4", "yargs-unparser": "2.0.0" @@ -4065,43 +3999,6 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, - "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", - "dev": true, - "requires": { - "ms": "2.1.2" - }, - "dependencies": { - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - } - } - }, - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true - }, - "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, "js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -4160,9 +4057,9 @@ "dev": true }, "nanoid": { - "version": "3.1.25", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.25.tgz", - "integrity": "sha512-rdwtIXaXCLFAQbnfqDRnI6jaRHp9fTcYBjtFKE8eezcZ7LuLjhUaQGNeMXf1HmRoCH32CLz6XwX0TtxEOS/A3Q==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", + "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==", "dev": true }, "natural-compare": { @@ -4263,9 +4160,9 @@ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, "picomatch": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", - "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, "prelude-ls": { @@ -4615,9 +4512,9 @@ "dev": true }, "workerpool": { - "version": "6.1.5", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.5.tgz", - "integrity": "sha512-XdKkCK0Zqc6w3iTxLckiuJ81tiD/o5rBE/m+nXpRCB+/Sq4DqkfXZ/x0jW02DG1tGsfUGXbTJyZDP+eu67haSw==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.0.tgz", + "integrity": "sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A==", "dev": true }, "wrap-ansi": { diff --git a/package.json b/package.json index 19b5c667..a37484c8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@microsoft/powerquery-parser", - "version": "0.5.23", + "version": "0.6.1", "description": "A parser for the Power Query/M formula language.", "author": "Microsoft", "license": "MIT", diff --git a/src/example.ts b/src/example.ts index 1552e362..6bafd3c3 100644 --- a/src/example.ts +++ b/src/example.ts @@ -2,6 +2,7 @@ // Licensed under the MIT license. /* eslint-disable @typescript-eslint/ban-ts-comment */ +/* eslint-disable @typescript-eslint/no-floating-promises */ /* eslint-disable @typescript-eslint/no-unused-vars */ /* eslint-disable no-unused-vars */ @@ -9,10 +10,10 @@ import { Assert, DefaultSettings, Lexer, ResultUtils, Task, TaskUtils } from "." parseText(`let x = 1 in try x otherwise 2`); -function parseText(text: string): void { +async function parseText(text: string): Promise { // Try lexing and parsing the argument which returns a Result object. // A Result is the union (Ok | Error). - const task: Task.TriedLexParseTask = TaskUtils.tryLexParse(DefaultSettings, text); + const task: Task.TriedLexParseTask = await TaskUtils.tryLexParse(DefaultSettings, text); // If it was a success then dump the abstract syntax tree (AST) as verbose JSON to console. if (TaskUtils.isParseStageOk(task)) { diff --git a/src/powerquery-parser/common/arrayUtils.ts b/src/powerquery-parser/common/arrayUtils.ts index 9ae4078f..bb171938 100644 --- a/src/powerquery-parser/common/arrayUtils.ts +++ b/src/powerquery-parser/common/arrayUtils.ts @@ -63,6 +63,15 @@ export function assertNonZeroLength( ); } +export async function asyncMap( + collection: ReadonlyArray, + mapFn: (value: T) => Promise, +): Promise> { + const tasks: ReadonlyArray> = collection.map(mapFn); + + return await Promise.all(tasks); +} + export function concatUnique(left: ReadonlyArray, right: ReadonlyArray): ReadonlyArray { const partial: T[] = [...left]; diff --git a/src/powerquery-parser/common/result/resultUtils.ts b/src/powerquery-parser/common/result/resultUtils.ts index 74558fcc..90752383 100644 --- a/src/powerquery-parser/common/result/resultUtils.ts +++ b/src/powerquery-parser/common/result/resultUtils.ts @@ -35,3 +35,16 @@ export function ensureResult(locale: string, callbackFn: () => T): Result( + locale: string, + callbackFn: () => Promise, +): Promise> { + try { + return boxOk(await callbackFn()); + } catch (error) { + Assert.isInstanceofError(error); + + return boxError(CommonError.ensureCommonError(locale, error)); + } +} diff --git a/src/powerquery-parser/parser/disambiguation/disambiguationUtils.ts b/src/powerquery-parser/parser/disambiguation/disambiguationUtils.ts index d99b1a87..1ad633f9 100644 --- a/src/powerquery-parser/parser/disambiguation/disambiguationUtils.ts +++ b/src/powerquery-parser/parser/disambiguation/disambiguationUtils.ts @@ -20,11 +20,11 @@ import { ParseError } from ".."; // For each given parse function it'll create a deep copy of the state then parse with the function. // Mutates the given state to whatever parse state which matched the most amount of tokens. // Ties are resolved in the order of the given parse functions. -export function readAmbiguous( +export async function readAmbiguous( state: ParseState, parser: Parser, - parseFns: ReadonlyArray<(state: ParseState, parser: Parser) => T>, -): AmbiguousParse { + parseFns: ReadonlyArray<(state: ParseState, parser: Parser) => Promise>, +): Promise> { const trace: Trace = state.traceManager.entry(DisambiguationTraceConstant.Disambiguation, readAmbiguous.name, { [TraceConstant.Length]: parseFns.length, }); @@ -34,13 +34,15 @@ export function readAmbiguous( let maybeBestMatch: AmbiguousParse | undefined = undefined; for (const parseFn of parseFns) { - const variantState: ParseState = parser.copyState(state); + // eslint-disable-next-line no-await-in-loop + const variantState: ParseState = await parser.copyState(state); let maybeNode: T | undefined; let variantResult: Result; try { - maybeNode = parseFn(variantState, parser); + // eslint-disable-next-line no-await-in-loop + maybeNode = await parseFn(variantState, parser); variantResult = ResultUtils.boxOk(maybeNode); } catch (error) { if (!ParseError.isTInnerParseError(error)) { @@ -65,11 +67,11 @@ export function readAmbiguous( } // Peeks at the token stream and either performs an explicit read or an ambiguous read. -export function readAmbiguousBracket( +export async function readAmbiguousBracket( state: ParseState, parser: Parser, allowedVariants: ReadonlyArray, -): TAmbiguousBracketNode { +): Promise { const trace: Trace = state.traceManager.entry( DisambiguationTraceConstant.Disambiguation, readAmbiguousBracket.name, @@ -86,15 +88,15 @@ export function readAmbiguousBracket( switch (disambiguation) { case BracketDisambiguation.FieldProjection: - ambiguousBracket = parser.readFieldProjection(state, parser); + ambiguousBracket = await parser.readFieldProjection(state, parser); break; case BracketDisambiguation.FieldSelection: - ambiguousBracket = parser.readFieldSelection(state, parser); + ambiguousBracket = await parser.readFieldSelection(state, parser); break; case BracketDisambiguation.RecordExpression: - ambiguousBracket = parser.readRecordExpression(state, parser); + ambiguousBracket = await parser.readRecordExpression(state, parser); break; default: @@ -110,7 +112,7 @@ export function readAmbiguousBracket( throw ParseStateUtils.unterminatedBracketError(state); case DismabiguationBehavior.Thorough: - ambiguousBracket = thoroughReadAmbiguousBracket(state, parser, allowedVariants); + ambiguousBracket = await thoroughReadAmbiguousBracket(state, parser, allowedVariants); break; default: @@ -124,14 +126,17 @@ export function readAmbiguousBracket( } // Peeks at the token stream and either performs an explicit read or an ambiguous read. -export function readAmbiguousParenthesis(state: ParseState, parser: Parser): TAmbiguousParenthesisNode { +export async function readAmbiguousParenthesis(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry( DisambiguationTraceConstant.Disambiguation, readAmbiguousParenthesis.name, ); // We might be able to peek at tokens to disambiguate what parenthesized expression is next. - const maybeDisambiguation: ParenthesisDisambiguation | undefined = maybeDisambiguateParenthesis(state, parser); + const maybeDisambiguation: ParenthesisDisambiguation | undefined = await maybeDisambiguateParenthesis( + state, + parser, + ); // Peeking gave us a concrete answer as to what's next. let ambiguousParenthesis: TAmbiguousParenthesisNode; @@ -141,11 +146,11 @@ export function readAmbiguousParenthesis(state: ParseState, parser: Parser): TAm switch (disambiguation) { case ParenthesisDisambiguation.FunctionExpression: - ambiguousParenthesis = parser.readFunctionExpression(state, parser); + ambiguousParenthesis = await parser.readFunctionExpression(state, parser); break; case ParenthesisDisambiguation.ParenthesizedExpression: - ambiguousParenthesis = readParenthesizedExpressionOrBinOpExpression(state, parser); + ambiguousParenthesis = await readParenthesizedExpressionOrBinOpExpression(state, parser); break; default: @@ -161,7 +166,7 @@ export function readAmbiguousParenthesis(state: ParseState, parser: Parser): TAm throw ParseStateUtils.unterminatedParenthesesError(state); case DismabiguationBehavior.Thorough: - ambiguousParenthesis = thoroughReadAmbiguousParenthesis(state, parser); + ambiguousParenthesis = await thoroughReadAmbiguousParenthesis(state, parser); break; default: @@ -177,7 +182,10 @@ export function readAmbiguousParenthesis(state: ParseState, parser: Parser): TAm } // Peeks at tokens which might give a concrete disambiguation. -export function maybeDisambiguateParenthesis(state: ParseState, parser: Parser): ParenthesisDisambiguation | undefined { +export async function maybeDisambiguateParenthesis( + state: ParseState, + parser: Parser, +): Promise { const trace: Trace = state.traceManager.entry( DisambiguationTraceConstant.Disambiguation, maybeDisambiguateParenthesis.name, @@ -204,13 +212,16 @@ export function maybeDisambiguateParenthesis(state: ParseState, parser: Parser): // '(x as number) as number' could either be either case, // so we need to consume test if the trailing 'as number' is followed by a FatArrow. if (ParseStateUtils.isTokenKind(state, Token.TokenKind.KeywordAs, offsetTokenIndex + 1)) { - const checkpoint: ParseStateCheckpoint = parser.createCheckpoint(state); + // eslint-disable-next-line no-await-in-loop + const checkpoint: ParseStateCheckpoint = await parser.createCheckpoint(state); unsafeMoveTo(state, offsetTokenIndex + 2); try { - parser.readNullablePrimitiveType(state, parser); + // eslint-disable-next-line no-await-in-loop + await parser.readNullablePrimitiveType(state, parser); } catch { - parser.restoreCheckpoint(state, checkpoint); + // eslint-disable-next-line no-await-in-loop + await parser.restoreCheckpoint(state, checkpoint); if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.FatArrow)) { return ParenthesisDisambiguation.FunctionExpression; @@ -225,7 +236,8 @@ export function maybeDisambiguateParenthesis(state: ParseState, parser: Parser): maybeDisambiguation = ParenthesisDisambiguation.ParenthesizedExpression; } - parser.restoreCheckpoint(state, checkpoint); + // eslint-disable-next-line no-await-in-loop + await parser.restoreCheckpoint(state, checkpoint); } else if (ParseStateUtils.isTokenKind(state, Token.TokenKind.FatArrow, offsetTokenIndex + 1)) { maybeDisambiguation = ParenthesisDisambiguation.FunctionExpression; } else { @@ -304,17 +316,17 @@ const enum DisambiguationTraceConstant { // Copy the current state and attempt to read for each of the following: // FieldProjection, FieldSelection, and RecordExpression. // Mutates the given state with the read attempt which matched the most tokens. -function thoroughReadAmbiguousBracket( +async function thoroughReadAmbiguousBracket( state: ParseState, parser: Parser, allowedVariants: ReadonlyArray, -): TAmbiguousBracketNode { +): Promise { const trace: Trace = state.traceManager.entry( DisambiguationTraceConstant.Disambiguation, readAmbiguousBracket.name, ); - const ambiguousBracket: TAmbiguousBracketNode = thoroughReadAmbiguous( + const ambiguousBracket: TAmbiguousBracketNode = await thoroughReadAmbiguous( state, parser, bracketDisambiguationParseFunctions(parser, allowedVariants), @@ -328,18 +340,18 @@ function thoroughReadAmbiguousBracket( // Copy the current state and attempt to read for each of the following: // FunctionExpression, ParenthesisExpression. // Mutates the given state with the read attempt which matched the most tokens. -function thoroughReadAmbiguousParenthesis(state: ParseState, parser: Parser): TAmbiguousParenthesisNode { - return thoroughReadAmbiguous(state, parser, [ +async function thoroughReadAmbiguousParenthesis(state: ParseState, parser: Parser): Promise { + return await thoroughReadAmbiguous(state, parser, [ parser.readFunctionExpression, readParenthesizedExpressionOrBinOpExpression, ]); } -function thoroughReadAmbiguous( +async function thoroughReadAmbiguous( state: ParseState, parser: Parser, - parseFns: ReadonlyArray<(state: ParseState, parser: Parser) => T>, -): T { + parseFns: ReadonlyArray<(state: ParseState, parser: Parser) => Promise>, +): Promise { const trace: Trace = state.traceManager.entry( DisambiguationTraceConstant.Disambiguation, thoroughReadAmbiguous.name, @@ -348,9 +360,9 @@ function thoroughReadAmbiguous = readAmbiguous(state, parser, parseFns); + const ambiguousParse: AmbiguousParse = await readAmbiguous(state, parser, parseFns); - parser.applyState(state, ambiguousParse.parseState); + await parser.applyState(state, ambiguousParse.parseState); if (ResultUtils.isOk(ambiguousParse.result)) { trace.exit({ @@ -376,7 +388,7 @@ function thoroughReadAmbiguous, -): ReadonlyArray<(state: ParseState, parser: Parser) => TAmbiguousBracketNode> { +): ReadonlyArray<(state: ParseState, parser: Parser) => Promise> { return allowedVariants.map((bracketDisambiguation: BracketDisambiguation) => { switch (bracketDisambiguation) { case BracketDisambiguation.FieldProjection: @@ -397,11 +409,11 @@ function bracketDisambiguationParseFunctions( // When the next token is an open parenthesis we can't directly read // a ParenthesisExpression as it may leave trailing tokens behind. // `(1) + 2` -function readParenthesizedExpressionOrBinOpExpression( +async function readParenthesizedExpressionOrBinOpExpression( state: ParseState, parser: Parser, -): Ast.ParenthesizedExpression | Ast.TLogicalExpression { - const node: Ast.TNode = parser.readLogicalExpression(state, parser); +): Promise { + const node: Ast.TNode = await parser.readLogicalExpression(state, parser); const leftMostNode: Ast.TNode = NodeIdMapUtils.assertUnboxLeftMostLeaf( state.contextState.nodeIdMapCollection, diff --git a/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/leafSelectors.ts b/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/leafSelectors.ts index 0042d8e3..b77cbe19 100644 --- a/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/leafSelectors.ts +++ b/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/leafSelectors.ts @@ -58,7 +58,7 @@ export function maybeRightMostLeaf( nodeIdMapCollection: Collection, rootId: number, maybeCondition: ((node: Ast.TNode) => boolean) | undefined = undefined, -): Ast.TNode | undefined { +): Promise { const astNodeById: AstNodeById = nodeIdMapCollection.astNodeById; let nodeIdsToExplore: number[] = [rootId]; let maybeRightMost: Ast.TNode | undefined; @@ -119,13 +119,13 @@ export function maybeRightMostLeaf( } } - return maybeRightMost; + return Promise.resolve(maybeRightMost); } export function maybeRightMostLeafWhere( nodeIdMapCollection: Collection, rootId: number, maybeCondition: ((node: Ast.TNode) => boolean) | undefined, -): Ast.TNode | undefined { +): Promise { return maybeRightMostLeaf(nodeIdMapCollection, rootId, maybeCondition); } diff --git a/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/nodeIdMapUtils.ts b/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/nodeIdMapUtils.ts index e69d84db..e3d3be6c 100644 --- a/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/nodeIdMapUtils.ts +++ b/src/powerquery-parser/parser/nodeIdMap/nodeIdMapUtils/nodeIdMapUtils.ts @@ -69,7 +69,10 @@ export function hasParsedToken(nodeIdMapCollection: Collection, nodeId: number): return false; } -export function xorNodeTokenRange(nodeIdMapCollection: Collection, xorNode: TXorNode): XorNodeTokenRange { +export async function xorNodeTokenRange( + nodeIdMapCollection: Collection, + xorNode: TXorNode, +): Promise { switch (xorNode.kind) { case XorNodeKind.Ast: { const tokenRange: Token.TokenRange = xorNode.node.tokenRange; @@ -84,7 +87,10 @@ export function xorNodeTokenRange(nodeIdMapCollection: Collection, xorNode: TXor const contextNode: ParseContext.TNode = xorNode.node; let tokenIndexEnd: number; - const maybeRightMostChild: Ast.TNode | undefined = maybeRightMostLeaf(nodeIdMapCollection, xorNode.node.id); + const maybeRightMostChild: Ast.TNode | undefined = await maybeRightMostLeaf( + nodeIdMapCollection, + xorNode.node.id, + ); if (maybeRightMostChild === undefined) { tokenIndexEnd = contextNode.tokenIndexStart; diff --git a/src/powerquery-parser/parser/parseState/parseStateUtils.ts b/src/powerquery-parser/parser/parseState/parseStateUtils.ts index 800db8ec..3675c7dd 100644 --- a/src/powerquery-parser/parser/parseState/parseStateUtils.ts +++ b/src/powerquery-parser/parser/parseState/parseStateUtils.ts @@ -45,8 +45,8 @@ export function createState(lexerSnapshot: LexerSnapshot, maybeOverrides: Partia } // If you have a custom parser + parser state, then you'll have to create your own copyState/applyState functions. -// See `benchmark.ts` for an example. -export function applyState(state: ParseState, update: ParseState): void { +// eslint-disable-next-line require-await +export async function applyState(state: ParseState, update: ParseState): Promise { state.tokenIndex = update.tokenIndex; state.maybeCurrentToken = update.maybeCurrentToken; state.maybeCurrentTokenKind = update.maybeCurrentTokenKind; @@ -55,8 +55,8 @@ export function applyState(state: ParseState, update: ParseState): void { } // If you have a custom parser + parser state, then you'll have to create your own copyState/applyState functions. -// See `benchmark.ts` for an example. -export function copyState(state: ParseState): ParseState { +// eslint-disable-next-line require-await +export async function copyState(state: ParseState): Promise { return { ...state, contextState: ParseContextUtils.copyState(state.contextState), diff --git a/src/powerquery-parser/parser/parser/parser.ts b/src/powerquery-parser/parser/parser/parser.ts index 5df2d9cf..feed5fa5 100644 --- a/src/powerquery-parser/parser/parser/parser.ts +++ b/src/powerquery-parser/parser/parser/parser.ts @@ -20,68 +20,67 @@ export interface ParseOk { export interface Parser { // Update `state` to match the the `update`. - readonly applyState: (state: ParseState, update: ParseState) => void; - // Create a deep copy of S. - readonly copyState: (state: ParseState) => ParseState; - - // Checkpoints are a snapshot for a particular state, - // and should enable reverting the state to its earlier version. They do not work on later states. - // Eg. given the history below: - // You can restore checkpoint 2 and then checkpoint 1, - // but restoring checkpoint 1 and then checkpoint 2 will result in undefined behavior. - // Initial state ------- checkpoint 1 -- checkpoint 2 --- current. - readonly createCheckpoint: (state: ParseState) => ParseStateCheckpoint; - readonly restoreCheckpoint: (state: ParseState, checkpoint: ParseStateCheckpoint) => void; + readonly applyState: (state: ParseState, update: ParseState) => Promise; + // Create a deep copy of ParseState. + readonly copyState: (state: ParseState) => Promise; + + // Checkpoints are a snapshot at a particular time. + // You can use a checkpoint to restore the parser's state back to when the checkpoint was created. + // If the checkpoint is used on a parser that didn't create the checkpoint it results in undefiend behavior. + // If the checkpoint is used on a parser whose state is earlier than what the checkpoint recorded + // it results in undefined behavior. + readonly createCheckpoint: (state: ParseState) => Promise; + readonly restoreCheckpoint: (state: ParseState, checkpoint: ParseStateCheckpoint) => Promise; // 12.1.6 Identifiers readonly readIdentifier: (state: ParseState, parser: Parser) => Ast.Identifier; - readonly readGeneralizedIdentifier: (state: ParseState, parser: Parser) => Ast.GeneralizedIdentifier; + readonly readGeneralizedIdentifier: (state: ParseState, parser: Parser) => Promise; readonly readKeyword: (state: ParseState, parser: Parser) => Ast.IdentifierExpression; // 12.2.1 Documents - readonly readDocument: (state: ParseState, parser: Parser) => Ast.TDocument; + readonly readDocument: (state: ParseState, parser: Parser) => Promise; // 12.2.2 Section Documents - readonly readSectionDocument: (state: ParseState, parser: Parser) => Ast.Section; - readonly readSectionMembers: (state: ParseState, parser: Parser) => Ast.IArrayWrapper; - readonly readSectionMember: (state: ParseState, parser: Parser) => Ast.SectionMember; + readonly readSectionDocument: (state: ParseState, parser: Parser) => Promise; + readonly readSectionMembers: (state: ParseState, parser: Parser) => Promise>; + readonly readSectionMember: (state: ParseState, parser: Parser) => Promise; // 12.2.3.1 Expressions - readonly readNullCoalescingExpression: (state: ParseState, Parser: Parser) => Ast.TExpression; - readonly readExpression: (state: ParseState, parser: Parser) => Ast.TExpression; + readonly readNullCoalescingExpression: (state: ParseState, Parser: Parser) => Promise; + readonly readExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.2 Logical expressions - readonly readLogicalExpression: (state: ParseState, parser: Parser) => Ast.TLogicalExpression; + readonly readLogicalExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.3 Is expression - readonly readIsExpression: (state: ParseState, parser: Parser) => Ast.TIsExpression; - readonly readNullablePrimitiveType: (state: ParseState, parser: Parser) => Ast.TNullablePrimitiveType; + readonly readIsExpression: (state: ParseState, parser: Parser) => Promise; + readonly readNullablePrimitiveType: (state: ParseState, parser: Parser) => Promise; // 12.2.3.4 As expression - readonly readAsExpression: (state: ParseState, parser: Parser) => Ast.TAsExpression; + readonly readAsExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.5 Equality expression - readonly readEqualityExpression: (state: ParseState, parser: Parser) => Ast.TEqualityExpression; + readonly readEqualityExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.6 Relational expression - readonly readRelationalExpression: (state: ParseState, parser: Parser) => Ast.TRelationalExpression; + readonly readRelationalExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.7 Arithmetic expressions - readonly readArithmeticExpression: (state: ParseState, parser: Parser) => Ast.TArithmeticExpression; + readonly readArithmeticExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.8 Metadata expression - readonly readMetadataExpression: (state: ParseState, parser: Parser) => Ast.TMetadataExpression; + readonly readMetadataExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.9 Unary expression - readonly readUnaryExpression: (state: ParseState, parser: Parser) => Ast.TUnaryExpression; + readonly readUnaryExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.10 Primary expression - readonly readPrimaryExpression: (state: ParseState, parser: Parser) => Ast.TPrimaryExpression; + readonly readPrimaryExpression: (state: ParseState, parser: Parser) => Promise; readonly readRecursivePrimaryExpression: ( state: ParseState, parser: Parser, head: Ast.TPrimaryExpression, - ) => Ast.RecursivePrimaryExpression; + ) => Promise; // 12.2.3.11 Literal expression readonly readLiteralExpression: (state: ParseState, parser: Parser) => Ast.LiteralExpression; @@ -90,96 +89,106 @@ export interface Parser { readonly readIdentifierExpression: (state: ParseState, parser: Parser) => Ast.IdentifierExpression; // 12.2.3.14 Parenthesized expression - readonly readParenthesizedExpression: (state: ParseState, parser: Parser) => Ast.ParenthesizedExpression; + readonly readParenthesizedExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.15 Not-implemented expression readonly readNotImplementedExpression: (state: ParseState, parser: Parser) => Ast.NotImplementedExpression; // 12.2.3.16 Invoke expression - readonly readInvokeExpression: (state: ParseState, parser: Parser) => Ast.InvokeExpression; + readonly readInvokeExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.17 List expression - readonly readListExpression: (state: ParseState, parser: Parser) => Ast.ListExpression; - readonly readListItem: (state: ParseState, parser: Parser) => Ast.TListItem; + readonly readListExpression: (state: ParseState, parser: Parser) => Promise; + readonly readListItem: (state: ParseState, parser: Parser) => Promise; // 12.2.3.18 Record expression - readonly readRecordExpression: (state: ParseState, parser: Parser) => Ast.RecordExpression; + readonly readRecordExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.19 Item access expression - readonly readItemAccessExpression: (state: ParseState, parser: Parser) => Ast.ItemAccessExpression; + readonly readItemAccessExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.20 Field access expression - readonly readFieldSelection: (state: ParseState, parser: Parser) => Ast.FieldSelector; - readonly readFieldProjection: (state: ParseState, parser: Parser) => Ast.FieldProjection; - readonly readFieldSelector: (state: ParseState, parser: Parser, allowOptional: boolean) => Ast.FieldSelector; + readonly readFieldSelection: (state: ParseState, parser: Parser) => Promise; + readonly readFieldProjection: (state: ParseState, parser: Parser) => Promise; + readonly readFieldSelector: ( + state: ParseState, + parser: Parser, + allowOptional: boolean, + ) => Promise; // 12.2.3.21 Function expression - readonly readFunctionExpression: (state: ParseState, parser: Parser) => Ast.FunctionExpression; + readonly readFunctionExpression: (state: ParseState, parser: Parser) => Promise; readonly readParameterList: ( state: ParseState, parser: Parser, - ) => Ast.IParameterList; - readonly readAsType: (state: ParseState, parser: Parser) => Ast.AsType; + ) => Promise>; + readonly readAsType: (state: ParseState, parser: Parser) => Promise; // 12.2.3.22 Each expression - readonly readEachExpression: (state: ParseState, parser: Parser) => Ast.EachExpression; + readonly readEachExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.23 Let expression - readonly readLetExpression: (state: ParseState, parser: Parser) => Ast.LetExpression; + readonly readLetExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.24 If expression - readonly readIfExpression: (state: ParseState, parser: Parser) => Ast.IfExpression; + readonly readIfExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.25 Type expression - readonly readTypeExpression: (state: ParseState, parser: Parser) => Ast.TTypeExpression; - readonly readType: (state: ParseState, parser: Parser) => Ast.TType; - readonly readPrimaryType: (state: ParseState, parser: Parser) => Ast.TPrimaryType; - readonly readRecordType: (state: ParseState, parser: Parser) => Ast.RecordType; - readonly readTableType: (state: ParseState, parser: Parser) => Ast.TableType; + readonly readTypeExpression: (state: ParseState, parser: Parser) => Promise; + readonly readType: (state: ParseState, parser: Parser) => Promise; + readonly readPrimaryType: (state: ParseState, parser: Parser) => Promise; + readonly readRecordType: (state: ParseState, parser: Parser) => Promise; + readonly readTableType: (state: ParseState, parser: Parser) => Promise; readonly readFieldSpecificationList: ( state: ParseState, parser: Parser, allowOpenMarker: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, - ) => Ast.FieldSpecificationList; - readonly readListType: (state: ParseState, parser: Parser) => Ast.ListType; - readonly readFunctionType: (state: ParseState, parser: Parser) => Ast.FunctionType; - readonly readParameterSpecificationList: (state: ParseState, parser: Parser) => Ast.IParameterList; - readonly readNullableType: (state: ParseState, parser: Parser) => Ast.NullableType; + ) => Promise; + readonly readListType: (state: ParseState, parser: Parser) => Promise; + readonly readFunctionType: (state: ParseState, parser: Parser) => Promise; + readonly readParameterSpecificationList: ( + state: ParseState, + parser: Parser, + ) => Promise>; + readonly readNullableType: (state: ParseState, parser: Parser) => Promise; // 12.2.3.26 Error raising expression - readonly readErrorRaisingExpression: (state: ParseState, parser: Parser) => Ast.ErrorRaisingExpression; + readonly readErrorRaisingExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.3.27 Error handling expression - readonly readErrorHandlingExpression: (state: ParseState, parser: Parser) => Ast.ErrorHandlingExpression; + readonly readErrorHandlingExpression: (state: ParseState, parser: Parser) => Promise; // 12.2.4 Literal Attributes - readonly readRecordLiteral: (state: ParseState, parser: Parser) => Ast.RecordLiteral; + readonly readRecordLiteral: (state: ParseState, parser: Parser) => Promise; readonly readFieldNamePairedAnyLiterals: ( state: ParseState, parser: Parser, onePairRequired: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, - ) => Ast.ICsvArray; - readonly readListLiteral: (state: ParseState, parser: Parser) => Ast.ListLiteral; - readonly readAnyLiteral: (state: ParseState, parser: Parser) => Ast.TAnyLiteral; - readonly readPrimitiveType: (state: ParseState, parser: Parser) => Ast.PrimitiveType; + ) => Promise>; + readonly readListLiteral: (state: ParseState, parser: Parser) => Promise; + readonly readAnyLiteral: (state: ParseState, parser: Parser) => Promise; + readonly readPrimitiveType: (state: ParseState, parser: Parser) => Promise; readonly readIdentifierPairedExpressions: ( state: ParseState, parser: Parser, onePairRequired: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, - ) => Ast.ICsvArray; - readonly readIdentifierPairedExpression: (state: ParseState, parser: Parser) => Ast.IdentifierPairedExpression; + ) => Promise>; + readonly readIdentifierPairedExpression: ( + state: ParseState, + parser: Parser, + ) => Promise; readonly readGeneralizedIdentifierPairedExpressions: ( state: ParseState, parser: Parser, onePairRequired: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, - ) => Ast.ICsvArray; + ) => Promise>; readonly readGeneralizedIdentifierPairedExpression: ( state: ParseState, parser: Parser, - ) => Ast.GeneralizedIdentifierPairedExpression; + ) => Promise; } diff --git a/src/powerquery-parser/parser/parser/parserUtils.ts b/src/powerquery-parser/parser/parser/parserUtils.ts index 3bc24fdd..8546505e 100644 --- a/src/powerquery-parser/parser/parser/parserUtils.ts +++ b/src/powerquery-parser/parser/parser/parserUtils.ts @@ -11,18 +11,18 @@ import { LexerSnapshot } from "../../lexer"; import { ParseError } from ".."; import { ParseSettings } from "../../settings"; -export function tryParse(parseSettings: ParseSettings, lexerSnapshot: LexerSnapshot): TriedParse { - const maybeParserEntryPointFn: ((state: ParseState, parser: Parser) => Ast.TNode) | undefined = +export async function tryParse(parseSettings: ParseSettings, lexerSnapshot: LexerSnapshot): Promise { + const maybeParserEntryPointFn: ((state: ParseState, parser: Parser) => Promise) | undefined = parseSettings?.maybeParserEntryPointFn; if (maybeParserEntryPointFn === undefined) { - return tryParseDocument(parseSettings, lexerSnapshot); + return await tryParseDocument(parseSettings, lexerSnapshot); } const parseState: ParseState = parseSettings.createParseState(lexerSnapshot, defaultOverrides(parseSettings)); try { - const root: Ast.TNode = maybeParserEntryPointFn(parseState, parseSettings.parser); + const root: Ast.TNode = await maybeParserEntryPointFn(parseState, parseSettings.parser); ParseStateUtils.assertIsDoneParsing(parseState); return ResultUtils.boxOk({ @@ -37,7 +37,10 @@ export function tryParse(parseSettings: ParseSettings, lexerSnapshot: LexerSnaps } } -export function tryParseDocument(parseSettings: ParseSettings, lexerSnapshot: LexerSnapshot): TriedParse { +export async function tryParseDocument( + parseSettings: ParseSettings, + lexerSnapshot: LexerSnapshot, +): Promise { let root: Ast.TNode; const expressionDocumentState: ParseState = parseSettings.createParseState( @@ -46,7 +49,7 @@ export function tryParseDocument(parseSettings: ParseSettings, lexerSnapshot: Le ); try { - root = parseSettings.parser.readExpression(expressionDocumentState, parseSettings.parser); + root = await parseSettings.parser.readExpression(expressionDocumentState, parseSettings.parser); ParseStateUtils.assertIsDoneParsing(expressionDocumentState); return ResultUtils.boxOk({ @@ -63,7 +66,7 @@ export function tryParseDocument(parseSettings: ParseSettings, lexerSnapshot: Le ); try { - root = parseSettings.parser.readSectionDocument(sectionDocumentState, parseSettings.parser); + root = await parseSettings.parser.readSectionDocument(sectionDocumentState, parseSettings.parser); ParseStateUtils.assertIsDoneParsing(sectionDocumentState); return ResultUtils.boxOk({ @@ -92,7 +95,6 @@ export function tryParseDocument(parseSettings: ParseSettings, lexerSnapshot: Le // If you have a custom parser + parser state, // then you'll have to create your own (create|restore)Checkpoint functions. -// See `benchmark.ts` for an example. // // Due to performance reasons the backup no longer can include a naive deep copy of the context state. // Instead it's assumed that a backup is made immediately before a try/catch read block. @@ -100,7 +102,8 @@ export function tryParseDocument(parseSettings: ParseSettings, lexerSnapshot: Le // Therefore we only care about the delta between before and after the try/catch block. // Thanks to the invariants above and the fact the ids for nodes are an auto-incrementing integer // we can easily just drop all delete all context nodes past the id of when the backup was created. -export function createCheckpoint(state: ParseState): ParseStateCheckpoint { +// eslint-disable-next-line require-await +export async function createCheckpoint(state: ParseState): Promise { return { tokenIndex: state.tokenIndex, contextStateIdCounter: state.contextState.idCounter, @@ -110,10 +113,9 @@ export function createCheckpoint(state: ParseState): ParseStateCheckpoint { // If you have a custom parser + parser state, // then you'll have to create your own (create|restore)Checkpoint functions. -// See `benchmark.ts` for an example. -// // See createCheckpoint above for more information. -export function restoreCheckpoint(state: ParseState, checkpoint: ParseStateCheckpoint): void { +// eslint-disable-next-line require-await +export async function restoreCheckpoint(state: ParseState, checkpoint: ParseStateCheckpoint): Promise { state.tokenIndex = checkpoint.tokenIndex; state.maybeCurrentToken = state.lexerSnapshot.tokens[state.tokenIndex]; state.maybeCurrentTokenKind = state.maybeCurrentToken?.kind; diff --git a/src/powerquery-parser/parser/parsers/combinatorialParser.ts b/src/powerquery-parser/parser/parsers/combinatorialParser.ts index 7b830ccd..f6a57f29 100644 --- a/src/powerquery-parser/parser/parsers/combinatorialParser.ts +++ b/src/powerquery-parser/parser/parsers/combinatorialParser.ts @@ -31,41 +31,41 @@ export const CombinatorialParser: Parser = { // 12.2.3.2 Logical expressions readLogicalExpression: (state: ParseState, parser: Parser) => - readBinOpExpression(state, parser, Ast.NodeKind.LogicalExpression) as Ast.TLogicalExpression, + readBinOpExpression(state, parser, Ast.NodeKind.LogicalExpression) as Promise, // 12.2.3.3 Is expression readIsExpression: (state: ParseState, parser: Parser) => - readBinOpExpression(state, parser, Ast.NodeKind.IsExpression) as Ast.TIsExpression, + readBinOpExpression(state, parser, Ast.NodeKind.IsExpression) as Promise, // 12.2.3.4 As expression readAsExpression: (state: ParseState, parser: Parser) => - readBinOpExpression(state, parser, Ast.NodeKind.AsExpression) as Ast.TAsExpression, + readBinOpExpression(state, parser, Ast.NodeKind.AsExpression) as Promise, // 12.2.3.5 Equality expression readEqualityExpression: (state: ParseState, parser: Parser) => - readBinOpExpression(state, parser, Ast.NodeKind.EqualityExpression) as Ast.TEqualityExpression, + readBinOpExpression(state, parser, Ast.NodeKind.EqualityExpression) as Promise, // 12.2.3.6 Relational expression readRelationalExpression: (state: ParseState, parser: Parser) => - readBinOpExpression(state, parser, Ast.NodeKind.RelationalExpression) as Ast.TRelationalExpression, + readBinOpExpression(state, parser, Ast.NodeKind.RelationalExpression) as Promise, // 12.2.3.7 Arithmetic expressions readArithmeticExpression: (state: ParseState, parser: Parser) => - readBinOpExpression(state, parser, Ast.NodeKind.ArithmeticExpression) as Ast.TArithmeticExpression, + readBinOpExpression(state, parser, Ast.NodeKind.ArithmeticExpression) as Promise, // 12.2.3.8 Metadata expression readMetadataExpression: (state: ParseState, parser: Parser) => - readBinOpExpression(state, parser, Ast.NodeKind.MetadataExpression) as Ast.TMetadataExpression, + readBinOpExpression(state, parser, Ast.NodeKind.MetadataExpression) as Promise, // 12.2.3.9 Unary expression readUnaryExpression, }; -function readBinOpExpression( +async function readBinOpExpression( state: ParseState, parser: Parser, nodeKind: Ast.NodeKind, -): Ast.TBinOpExpression | Ast.TUnaryExpression | Ast.TNullablePrimitiveType { +): Promise { state.maybeCancellationToken?.throwIfCancelled(); ParseStateUtils.startContext(state, nodeKind); const placeholderContextId: number = Assert.asDefined(state.maybeCurrentContextNode).id; @@ -76,7 +76,7 @@ function readBinOpExpression( let operatorConstants: Ast.IConstant[] = []; let expressions: (Ast.TBinOpExpression | Ast.TUnaryExpression | Ast.TNullablePrimitiveType)[] = [ - parser.readUnaryExpression(state, parser), + await parser.readUnaryExpression(state, parser), ]; let maybeOperator: Constant.TBinOpExpressionOperator | undefined = @@ -97,11 +97,13 @@ function readBinOpExpression( switch (operator) { case Constant.KeywordConstant.As: case Constant.KeywordConstant.Is: - expressions.push(parser.readNullablePrimitiveType(state, parser)); + // eslint-disable-next-line no-await-in-loop + expressions.push(await parser.readNullablePrimitiveType(state, parser)); break; default: - expressions.push(parser.readUnaryExpression(state, parser)); + // eslint-disable-next-line no-await-in-loop + expressions.push(await parser.readUnaryExpression(state, parser)); } maybeOperator = ConstantUtils.maybeBinOpExpressionOperatorKindFrom(state.maybeCurrentTokenKind); @@ -275,7 +277,7 @@ function binOpExpressionNodeKindFrom(operator: Constant.TBinOpExpressionOperator } } -function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUnaryExpression { +async function readUnaryExpression(state: ParseState, parser: Parser): Promise { state.maybeCancellationToken?.throwIfCancelled(); let maybePrimaryExpression: Ast.TPrimaryExpression | undefined; @@ -289,11 +291,11 @@ function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUnaryExpre break; case Token.TokenKind.LeftParenthesis: - maybePrimaryExpression = NaiveParseSteps.readParenthesizedExpression(state, parser); + maybePrimaryExpression = await NaiveParseSteps.readParenthesizedExpression(state, parser); break; case Token.TokenKind.LeftBracket: - maybePrimaryExpression = DisambiguationUtils.readAmbiguousBracket(state, parser, [ + maybePrimaryExpression = await DisambiguationUtils.readAmbiguousBracket(state, parser, [ Disambiguation.BracketDisambiguation.RecordExpression, Disambiguation.BracketDisambiguation.FieldSelection, Disambiguation.BracketDisambiguation.FieldProjection, @@ -302,7 +304,7 @@ function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUnaryExpre break; case Token.TokenKind.LeftBrace: - maybePrimaryExpression = NaiveParseSteps.readListExpression(state, parser); + maybePrimaryExpression = await NaiveParseSteps.readListExpression(state, parser); break; case Token.TokenKind.Ellipsis: @@ -320,7 +322,7 @@ function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUnaryExpre // TypeExpression case Token.TokenKind.KeywordType: - return NaiveParseSteps.readTypeExpression(state, parser); + return await NaiveParseSteps.readTypeExpression(state, parser); case Token.TokenKind.KeywordHashSections: case Token.TokenKind.KeywordHashShared: @@ -336,14 +338,14 @@ function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUnaryExpre // Let Naive throw an error. default: - return NaiveParseSteps.readUnaryExpression(state, parser); + return await NaiveParseSteps.readUnaryExpression(state, parser); } // We should only reach this code block if a primary expression was read. const primaryExpression: Ast.TPrimaryExpression = maybePrimaryExpression; if (ParseStateUtils.isRecursivePrimaryExpressionNext(state, state.tokenIndex)) { - return parser.readRecursivePrimaryExpression(state, parser, primaryExpression); + return await parser.readRecursivePrimaryExpression(state, parser, primaryExpression); } else { return primaryExpression; } diff --git a/src/powerquery-parser/parser/parsers/naive.ts b/src/powerquery-parser/parser/parsers/naive.ts index 554e914a..d9ad1ac5 100644 --- a/src/powerquery-parser/parser/parsers/naive.ts +++ b/src/powerquery-parser/parser/parsers/naive.ts @@ -73,7 +73,11 @@ export function readIdentifier(state: ParseState, _parser: Parser): Ast.Identifi } // This behavior matches the C# parser and not the language specification. -export function readGeneralizedIdentifier(state: ParseState, _parser: Parser): Ast.GeneralizedIdentifier { +// eslint-disable-next-line require-await +export async function readGeneralizedIdentifier( + state: ParseState, + _parser: Parser, +): Promise { const nodeKind: Ast.NodeKind.GeneralizedIdentifier = Ast.NodeKind.GeneralizedIdentifier; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readGeneralizedIdentifier.name, { @@ -185,7 +189,7 @@ export function readKeyword(state: ParseState, _parser: Parser): Ast.IdentifierE // ---------- 12.2.1 Documents ---------- // -------------------------------------- -export function readDocument(state: ParseState, parser: Parser): Ast.TDocument { +export async function readDocument(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readDocument.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -198,14 +202,14 @@ export function readDocument(state: ParseState, parser: Parser): Ast.TDocument { // If Expression document fails (including UnusedTokensRemainError) then try parsing a SectionDocument. // If both fail then return the error which parsed more tokens. try { - document = parser.readExpression(state, parser); + document = await parser.readExpression(state, parser); ParseStateUtils.assertIsDoneParsing(state); } catch (expressionError) { Assert.isInstanceofError(expressionError); // Fast backup deletes context state, but we want to preserve it for the case // where both parsing an expression and section document error out. - const expressionCheckpoint: ParseStateCheckpoint = parser.createCheckpoint(state); + const expressionCheckpoint: ParseStateCheckpoint = await parser.createCheckpoint(state); const expressionErrorContextState: ParseContext.State = state.contextState; // Reset the parser's state. @@ -219,7 +223,7 @@ export function readDocument(state: ParseState, parser: Parser): Ast.TDocument { } try { - document = readSectionDocument(state, parser); + document = await readSectionDocument(state, parser); ParseStateUtils.assertIsDoneParsing(state); } catch (sectionError) { Assert.isInstanceofError(sectionError); @@ -228,7 +232,8 @@ export function readDocument(state: ParseState, parser: Parser): Ast.TDocument { if (expressionCheckpoint.tokenIndex > /* sectionErrorState */ state.tokenIndex) { triedError = expressionError; - parser.restoreCheckpoint(state, expressionCheckpoint); + await parser.restoreCheckpoint(state, expressionCheckpoint); + // eslint-disable-next-line require-atomic-updates state.contextState = expressionErrorContextState; } else { triedError = sectionError; @@ -255,7 +260,7 @@ export function readDocument(state: ParseState, parser: Parser): Ast.TDocument { // ---------- 12.2.2 Section Documents ---------- // ---------------------------------------------- -export function readSectionDocument(state: ParseState, parser: Parser): Ast.Section { +export async function readSectionDocument(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.Section = Ast.NodeKind.Section; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readSectionDocument.name, { @@ -265,7 +270,7 @@ export function readSectionDocument(state: ParseState, parser: Parser): Ast.Sect state.maybeCancellationToken?.throwIfCancelled(); ParseStateUtils.startContext(state, nodeKind); - const maybeLiteralAttributes: Ast.RecordLiteral | undefined = maybeReadLiteralAttributes(state, parser); + const maybeLiteralAttributes: Ast.RecordLiteral | undefined = await maybeReadLiteralAttributes(state, parser); const sectionConstant: Ast.IConstant = readTokenKindAsConstant( state, @@ -287,7 +292,7 @@ export function readSectionDocument(state: ParseState, parser: Parser): Ast.Sect Constant.MiscConstant.Semicolon, ); - const sectionMembers: Ast.IArrayWrapper = parser.readSectionMembers(state, parser); + const sectionMembers: Ast.IArrayWrapper = await parser.readSectionMembers(state, parser); const section: Ast.Section = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -306,7 +311,10 @@ export function readSectionDocument(state: ParseState, parser: Parser): Ast.Sect return section; } -export function readSectionMembers(state: ParseState, parser: Parser): Ast.IArrayWrapper { +export async function readSectionMembers( + state: ParseState, + parser: Parser, +): Promise> { const nodeKind: Ast.NodeKind.ArrayWrapper = Ast.NodeKind.ArrayWrapper; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readSectionMembers.name, { @@ -320,7 +328,8 @@ export function readSectionMembers(state: ParseState, parser: Parser): Ast.IArra const sectionMembers: Ast.SectionMember[] = []; while (state.tokenIndex < totalTokens) { - sectionMembers.push(parser.readSectionMember(state, parser)); + // eslint-disable-next-line no-await-in-loop + sectionMembers.push(await parser.readSectionMember(state, parser)); } const sectionMemberArray: Ast.IArrayWrapper = { @@ -336,7 +345,7 @@ export function readSectionMembers(state: ParseState, parser: Parser): Ast.IArra return sectionMemberArray; } -export function readSectionMember(state: ParseState, parser: Parser): Ast.SectionMember { +export async function readSectionMember(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.SectionMember = Ast.NodeKind.SectionMember; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readSectionMember.name, { @@ -346,12 +355,15 @@ export function readSectionMember(state: ParseState, parser: Parser): Ast.Sectio state.maybeCancellationToken?.throwIfCancelled(); ParseStateUtils.startContext(state, nodeKind); - const maybeLiteralAttributes: Ast.RecordLiteral | undefined = maybeReadLiteralAttributes(state, parser); + const maybeLiteralAttributes: Ast.RecordLiteral | undefined = await maybeReadLiteralAttributes(state, parser); const maybeSharedConstant: Ast.IConstant | undefined = maybeReadTokenKindAsConstant(state, Token.TokenKind.KeywordShared, Constant.KeywordConstant.Shared); - const namePairedExpression: Ast.IdentifierPairedExpression = parser.readIdentifierPairedExpression(state, parser); + const namePairedExpression: Ast.IdentifierPairedExpression = await parser.readIdentifierPairedExpression( + state, + parser, + ); const semicolonConstant: Ast.IConstant = readTokenKindAsConstant( state, @@ -383,14 +395,14 @@ export function readSectionMember(state: ParseState, parser: Parser): Ast.Sectio // ---------- NullCoalescing ---------- // ------------------------------------ -export function readNullCoalescingExpression(state: ParseState, parser: Parser): Ast.TExpression { +export async function readNullCoalescingExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readNullCoalescingExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const expression: Ast.TExpression = recursiveReadBinOpExpression< + const expression: Ast.TExpression = await recursiveReadBinOpExpression< Ast.NodeKind.NullCoalescingExpression, Ast.TLogicalExpression, Constant.MiscConstant.NullCoalescingOperator, @@ -411,7 +423,7 @@ export function readNullCoalescingExpression(state: ParseState, parser: Parser): return expression; } -export function readExpression(state: ParseState, parser: Parser): Ast.TExpression { +export async function readExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -422,31 +434,31 @@ export function readExpression(state: ParseState, parser: Parser): Ast.TExpressi switch (state.maybeCurrentTokenKind) { case Token.TokenKind.KeywordEach: - expression = parser.readEachExpression(state, parser); + expression = await parser.readEachExpression(state, parser); break; case Token.TokenKind.KeywordLet: - expression = parser.readLetExpression(state, parser); + expression = await parser.readLetExpression(state, parser); break; case Token.TokenKind.KeywordIf: - expression = parser.readIfExpression(state, parser); + expression = await parser.readIfExpression(state, parser); break; case Token.TokenKind.KeywordError: - expression = parser.readErrorRaisingExpression(state, parser); + expression = await parser.readErrorRaisingExpression(state, parser); break; case Token.TokenKind.KeywordTry: - expression = parser.readErrorHandlingExpression(state, parser); + expression = await parser.readErrorHandlingExpression(state, parser); break; case Token.TokenKind.LeftParenthesis: - expression = DisambiguationUtils.readAmbiguousParenthesis(state, parser); + expression = await DisambiguationUtils.readAmbiguousParenthesis(state, parser); break; default: - expression = parser.readNullCoalescingExpression(state, parser); + expression = await parser.readNullCoalescingExpression(state, parser); break; } @@ -459,14 +471,14 @@ export function readExpression(state: ParseState, parser: Parser): Ast.TExpressi // ---------- 12.2.3.2 Logical expressions ---------- // -------------------------------------------------- -export function readLogicalExpression(state: ParseState, parser: Parser): Ast.TLogicalExpression { +export async function readLogicalExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readLogicalExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const logicalExpression: Ast.TLogicalExpression = recursiveReadBinOpExpression< + const logicalExpression: Ast.TLogicalExpression = await recursiveReadBinOpExpression< Ast.NodeKind.LogicalExpression, Ast.TLogicalExpression, Constant.LogicalOperator, @@ -489,14 +501,14 @@ export function readLogicalExpression(state: ParseState, parser: Parser): Ast.TL // ---------- 12.2.3.3 Is expression ---------- // -------------------------------------------- -export function readIsExpression(state: ParseState, parser: Parser): Ast.TIsExpression { +export async function readIsExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readIsExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const isExpression: Ast.TIsExpression = recursiveReadBinOpExpression< + const isExpression: Ast.TIsExpression = await recursiveReadBinOpExpression< Ast.NodeKind.IsExpression, Ast.TAsExpression, Constant.KeywordConstant.Is, @@ -516,7 +528,10 @@ export function readIsExpression(state: ParseState, parser: Parser): Ast.TIsExpr } // sub-item of 12.2.3.3 Is expression -export function readNullablePrimitiveType(state: ParseState, parser: Parser): Ast.TNullablePrimitiveType { +export async function readNullablePrimitiveType( + state: ParseState, + parser: Parser, +): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readNullablePrimitiveType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -526,14 +541,14 @@ export function readNullablePrimitiveType(state: ParseState, parser: Parser): As let nullablePrimitiveType: Ast.TNullablePrimitiveType; if (ParseStateUtils.isOnConstantKind(state, Constant.LanguageConstant.Nullable)) { - nullablePrimitiveType = readPairedConstant( + nullablePrimitiveType = await readPairedConstant( state, Ast.NodeKind.NullablePrimitiveType, () => readConstantKind(state, Constant.LanguageConstant.Nullable), () => parser.readPrimitiveType(state, parser), ); } else { - nullablePrimitiveType = parser.readPrimitiveType(state, parser); + nullablePrimitiveType = await parser.readPrimitiveType(state, parser); } trace.exit({ [NaiveTraceConstant.TokenIndex]: state.tokenIndex }); @@ -545,14 +560,14 @@ export function readNullablePrimitiveType(state: ParseState, parser: Parser): As // ---------- 12.2.3.4 As expression ---------- // -------------------------------------------- -export function readAsExpression(state: ParseState, parser: Parser): Ast.TAsExpression { +export async function readAsExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readAsExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const asExpression: Ast.TAsExpression = recursiveReadBinOpExpression< + const asExpression: Ast.TAsExpression = await recursiveReadBinOpExpression< Ast.NodeKind.AsExpression, Ast.TEqualityExpression, Constant.KeywordConstant.As, @@ -575,14 +590,14 @@ export function readAsExpression(state: ParseState, parser: Parser): Ast.TAsExpr // ---------- 12.2.3.5 Equality expression ---------- // -------------------------------------------------- -export function readEqualityExpression(state: ParseState, parser: Parser): Ast.TEqualityExpression { +export async function readEqualityExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readEqualityExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const equalityExpression: Ast.TEqualityExpression = recursiveReadBinOpExpression< + const equalityExpression: Ast.TEqualityExpression = await recursiveReadBinOpExpression< Ast.NodeKind.EqualityExpression, Ast.TEqualityExpression, Constant.EqualityOperator, @@ -605,14 +620,14 @@ export function readEqualityExpression(state: ParseState, parser: Parser): Ast.T // ---------- 12.2.3.6 Relational expression ---------- // ---------------------------------------------------- -export function readRelationalExpression(state: ParseState, parser: Parser): Ast.TRelationalExpression { +export async function readRelationalExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readRelationalExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const relationalExpression: Ast.TRelationalExpression = recursiveReadBinOpExpression< + const relationalExpression: Ast.TRelationalExpression = await recursiveReadBinOpExpression< Ast.NodeKind.RelationalExpression, Ast.TArithmeticExpression, Constant.RelationalOperator, @@ -635,14 +650,14 @@ export function readRelationalExpression(state: ParseState, parser: Parser): Ast // ---------- 12.2.3.7 Arithmetic expressions ---------- // ----------------------------------------------------- -export function readArithmeticExpression(state: ParseState, parser: Parser): Ast.TArithmeticExpression { +export async function readArithmeticExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readArithmeticExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const arithmeticExpression: Ast.TArithmeticExpression = recursiveReadBinOpExpression< + const arithmeticExpression: Ast.TArithmeticExpression = await recursiveReadBinOpExpression< Ast.NodeKind.ArithmeticExpression, Ast.TMetadataExpression, Constant.ArithmeticOperator, @@ -665,7 +680,7 @@ export function readArithmeticExpression(state: ParseState, parser: Parser): Ast // ---------- 12.2.3.8 Metadata expression ---------- // -------------------------------------------------- -export function readMetadataExpression(state: ParseState, parser: Parser): Ast.TMetadataExpression { +export async function readMetadataExpression(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.MetadataExpression = Ast.NodeKind.MetadataExpression; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readMetadataExpression.name, { @@ -675,7 +690,7 @@ export function readMetadataExpression(state: ParseState, parser: Parser): Ast.T state.maybeCancellationToken?.throwIfCancelled(); ParseStateUtils.startContext(state, nodeKind); - const left: Ast.TUnaryExpression = parser.readUnaryExpression(state, parser); + const left: Ast.TUnaryExpression = await parser.readUnaryExpression(state, parser); const maybeMetaConstant: Ast.IConstant | undefined = maybeReadTokenKindAsConstant( state, @@ -685,7 +700,7 @@ export function readMetadataExpression(state: ParseState, parser: Parser): Ast.T if (maybeMetaConstant !== undefined) { const operatorConstant: Ast.IConstant = maybeMetaConstant; - const right: Ast.TUnaryExpression = parser.readUnaryExpression(state, parser); + const right: Ast.TUnaryExpression = await parser.readUnaryExpression(state, parser); const metadataExpression: Ast.MetadataExpression = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -720,7 +735,7 @@ export function readMetadataExpression(state: ParseState, parser: Parser): Ast.T // ---------- 12.2.3.9 Unary expression ---------- // ----------------------------------------------- -export function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUnaryExpression { +export async function readUnaryExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readUnaryExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -765,7 +780,7 @@ export function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUna ParseStateUtils.endContext(state, operators); - const typeExpression: Ast.TTypeExpression = parser.readTypeExpression(state, parser); + const typeExpression: Ast.TTypeExpression = await parser.readTypeExpression(state, parser); const unaryExpression: Ast.UnaryExpression = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -789,7 +804,7 @@ export function readUnaryExpression(state: ParseState, parser: Parser): Ast.TUna // ---------- 12.2.3.10 Primary expression ---------- // -------------------------------------------------- -export function readPrimaryExpression(state: ParseState, parser: Parser): Ast.TPrimaryExpression { +export async function readPrimaryExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readPrimaryExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -807,11 +822,11 @@ export function readPrimaryExpression(state: ParseState, parser: Parser): Ast.TP } else { switch (maybeCurrentTokenKind) { case Token.TokenKind.LeftParenthesis: - primaryExpression = parser.readParenthesizedExpression(state, parser); + primaryExpression = await parser.readParenthesizedExpression(state, parser); break; case Token.TokenKind.LeftBracket: - primaryExpression = DisambiguationUtils.readAmbiguousBracket(state, parser, [ + primaryExpression = await DisambiguationUtils.readAmbiguousBracket(state, parser, [ Disambiguation.BracketDisambiguation.FieldProjection, Disambiguation.BracketDisambiguation.FieldSelection, Disambiguation.BracketDisambiguation.RecordExpression, @@ -820,7 +835,7 @@ export function readPrimaryExpression(state: ParseState, parser: Parser): Ast.TP break; case Token.TokenKind.LeftBrace: - primaryExpression = parser.readListExpression(state, parser); + primaryExpression = await parser.readListExpression(state, parser); break; case Token.TokenKind.Ellipsis: @@ -861,11 +876,11 @@ export function readPrimaryExpression(state: ParseState, parser: Parser): Ast.TP } } -export function readRecursivePrimaryExpression( +export async function readRecursivePrimaryExpression( state: ParseState, parser: Parser, head: Ast.TPrimaryExpression, -): Ast.RecursivePrimaryExpression { +): Promise { const nodeKind: Ast.NodeKind.RecursivePrimaryExpression = Ast.NodeKind.RecursivePrimaryExpression; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readRecursivePrimaryExpression.name, { @@ -927,18 +942,21 @@ export function readRecursivePrimaryExpression( const maybeCurrentTokenKind: Token.TokenKind | undefined = state.maybeCurrentTokenKind; if (maybeCurrentTokenKind === Token.TokenKind.LeftParenthesis) { - recursiveExpressions.push(parser.readInvokeExpression(state, parser)); + // eslint-disable-next-line no-await-in-loop + recursiveExpressions.push(await parser.readInvokeExpression(state, parser)); } else if (maybeCurrentTokenKind === Token.TokenKind.LeftBrace) { - recursiveExpressions.push(parser.readItemAccessExpression(state, parser)); + // eslint-disable-next-line no-await-in-loop + recursiveExpressions.push(await parser.readItemAccessExpression(state, parser)); } else if (maybeCurrentTokenKind === Token.TokenKind.LeftBracket) { - const bracketExpression: Ast.TFieldAccessExpression = DisambiguationUtils.readAmbiguousBracket( + // eslint-disable-next-line no-await-in-loop + const bracketExpression: Ast.TFieldAccessExpression = (await DisambiguationUtils.readAmbiguousBracket( state, parser, [ Disambiguation.BracketDisambiguation.FieldSelection, Disambiguation.BracketDisambiguation.FieldProjection, ], - ) as Ast.TFieldAccessExpression; + )) as Ast.TFieldAccessExpression; recursiveExpressions.push(bracketExpression); } else { @@ -1073,14 +1091,17 @@ export function readIdentifierExpression(state: ParseState, parser: Parser): Ast // ---------- 12.2.3.14 Parenthesized expression ---------- // -------------------------------------------------------- -export function readParenthesizedExpression(state: ParseState, parser: Parser): Ast.ParenthesizedExpression { +export async function readParenthesizedExpression( + state: ParseState, + parser: Parser, +): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readParenthesizedExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const parenthesizedExpression: Ast.ParenthesizedExpression = readWrapped( + const parenthesizedExpression: Ast.ParenthesizedExpression = await readWrapped( state, Ast.NodeKind.ParenthesizedExpression, () => readTokenKindAsConstant(state, Token.TokenKind.LeftParenthesis, Constant.WrapperConstant.LeftParenthesis), @@ -1132,7 +1153,7 @@ export function readNotImplementedExpression(state: ParseState, _parser: Parser) // ---------- Invoke expression ---------- // --------------------------------------- -export function readInvokeExpression(state: ParseState, parser: Parser): Ast.InvokeExpression { +export async function readInvokeExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readInvokeExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -1141,7 +1162,7 @@ export function readInvokeExpression(state: ParseState, parser: Parser): Ast.Inv const continueReadingValues: boolean = !ParseStateUtils.isNextTokenKind(state, Token.TokenKind.RightParenthesis); - const invokeExpression: Ast.InvokeExpression = readWrapped( + const invokeExpression: Ast.InvokeExpression = await readWrapped( state, Ast.NodeKind.InvokeExpression, () => readTokenKindAsConstant(state, Token.TokenKind.LeftParenthesis, Constant.WrapperConstant.LeftParenthesis), @@ -1168,7 +1189,7 @@ export function readInvokeExpression(state: ParseState, parser: Parser): Ast.Inv // ---------- 12.2.3.17 List expression ---------- // ----------------------------------------------- -export function readListExpression(state: ParseState, parser: Parser): Ast.ListExpression { +export async function readListExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readListExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -1177,14 +1198,14 @@ export function readListExpression(state: ParseState, parser: Parser): Ast.ListE const continueReadingValues: boolean = !ParseStateUtils.isNextTokenKind(state, Token.TokenKind.RightBrace); - const listExpression: Ast.ListExpression = readWrapped( + const listExpression: Ast.ListExpression = await readWrapped( state, Ast.NodeKind.ListExpression, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBrace, Constant.WrapperConstant.LeftBrace), - () => - readCsvArray( + async () => + await readCsvArray( state, - () => parser.readListItem(state, parser), + async () => await parser.readListItem(state, parser), continueReadingValues, testCsvContinuationDanglingCommaForBrace, ), @@ -1197,7 +1218,7 @@ export function readListExpression(state: ParseState, parser: Parser): Ast.ListE return listExpression; } -export function readListItem(state: ParseState, parser: Parser): Ast.TListItem { +export async function readListItem(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.RangeExpression = Ast.NodeKind.RangeExpression; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readListItem.name, { @@ -1207,7 +1228,7 @@ export function readListItem(state: ParseState, parser: Parser): Ast.TListItem { state.maybeCancellationToken?.throwIfCancelled(); ParseStateUtils.startContext(state, nodeKind); - const left: Ast.TExpression = parser.readExpression(state, parser); + const left: Ast.TExpression = await parser.readExpression(state, parser); if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.DotDot)) { const rangeConstant: Ast.IConstant = readTokenKindAsConstant( @@ -1216,7 +1237,7 @@ export function readListItem(state: ParseState, parser: Parser): Ast.TListItem { Constant.MiscConstant.DotDot, ); - const right: Ast.TExpression = parser.readExpression(state, parser); + const right: Ast.TExpression = await parser.readExpression(state, parser); const rangeExpression: Ast.RangeExpression = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -1251,7 +1272,7 @@ export function readListItem(state: ParseState, parser: Parser): Ast.TListItem { // ---------- 12.2.3.18 Record expression ---------- // ------------------------------------------------- -export function readRecordExpression(state: ParseState, parser: Parser): Ast.RecordExpression { +export async function readRecordExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readRecordExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -1260,7 +1281,7 @@ export function readRecordExpression(state: ParseState, parser: Parser): Ast.Rec const continueReadingValues: boolean = !ParseStateUtils.isNextTokenKind(state, Token.TokenKind.RightBracket); - const recordExpression: Ast.RecordExpression = readWrapped( + const recordExpression: Ast.RecordExpression = await readWrapped( state, Ast.NodeKind.RecordExpression, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBracket, Constant.WrapperConstant.LeftBracket), @@ -1284,18 +1305,18 @@ export function readRecordExpression(state: ParseState, parser: Parser): Ast.Rec // ---------- 12.2.3.19 Item access expression ---------- // ------------------------------------------------------ -export function readItemAccessExpression(state: ParseState, parser: Parser): Ast.ItemAccessExpression { +export async function readItemAccessExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readItemAccessExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const itemAccessExpression: Ast.ItemAccessExpression = readWrapped( + const itemAccessExpression: Ast.ItemAccessExpression = await readWrapped( state, Ast.NodeKind.ItemAccessExpression, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBrace, Constant.WrapperConstant.LeftBrace), - () => parser.readExpression(state, parser), + async () => await parser.readExpression(state, parser), () => readTokenKindAsConstant(state, Token.TokenKind.RightBrace, Constant.WrapperConstant.RightBrace), true, ); @@ -1309,27 +1330,27 @@ export function readItemAccessExpression(state: ParseState, parser: Parser): Ast // ---------- 12.2.3.20 Field access expression ---------- // ------------------------------------------------------- -export function readFieldSelection(state: ParseState, parser: Parser): Ast.FieldSelector { +export async function readFieldSelection(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readFieldSelection.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const fieldSelector: Ast.FieldSelector = readFieldSelector(state, parser, true); + const fieldSelector: Ast.FieldSelector = await readFieldSelector(state, parser, true); trace.exit({ [NaiveTraceConstant.TokenIndex]: state.tokenIndex }); return fieldSelector; } -export function readFieldProjection(state: ParseState, parser: Parser): Ast.FieldProjection { +export async function readFieldProjection(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readFieldProjection.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const fieldProjection: Ast.FieldProjection = readWrapped( + const fieldProjection: Ast.FieldProjection = await readWrapped( state, Ast.NodeKind.FieldProjection, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBracket, Constant.WrapperConstant.LeftBracket), @@ -1349,14 +1370,18 @@ export function readFieldProjection(state: ParseState, parser: Parser): Ast.Fiel return fieldProjection; } -export function readFieldSelector(state: ParseState, parser: Parser, allowOptional: boolean): Ast.FieldSelector { +export async function readFieldSelector( + state: ParseState, + parser: Parser, + allowOptional: boolean, +): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readFieldSelector.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const fieldSelector: Ast.FieldSelector = readWrapped( + const fieldSelector: Ast.FieldSelector = await readWrapped( state, Ast.NodeKind.FieldSelector, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBracket, Constant.WrapperConstant.LeftBracket), @@ -1374,7 +1399,7 @@ export function readFieldSelector(state: ParseState, parser: Parser, allowOption // ---------- 12.2.3.21 Function expression ---------- // --------------------------------------------------- -export function readFunctionExpression(state: ParseState, parser: Parser): Ast.FunctionExpression { +export async function readFunctionExpression(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.FunctionExpression = Ast.NodeKind.FunctionExpression; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readFunctionExpression.name, { @@ -1384,12 +1409,12 @@ export function readFunctionExpression(state: ParseState, parser: Parser): Ast.F state.maybeCancellationToken?.throwIfCancelled(); ParseStateUtils.startContext(state, nodeKind); - const parameters: Ast.IParameterList = parser.readParameterList( + const parameters: Ast.IParameterList = await parser.readParameterList( state, parser, ); - const maybeFunctionReturnType: Ast.AsNullablePrimitiveType | undefined = maybeReadAsNullablePrimitiveType( + const maybeFunctionReturnType: Ast.AsNullablePrimitiveType | undefined = await maybeReadAsNullablePrimitiveType( state, parser, ); @@ -1400,7 +1425,7 @@ export function readFunctionExpression(state: ParseState, parser: Parser): Ast.F Constant.MiscConstant.FatArrow, ); - const expression: Ast.TExpression = parser.readExpression(state, parser); + const expression: Ast.TExpression = await parser.readExpression(state, parser); const functionExpression: Ast.FunctionExpression = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -1418,17 +1443,17 @@ export function readFunctionExpression(state: ParseState, parser: Parser): Ast.F return functionExpression; } -export function readParameterList( +export async function readParameterList( state: ParseState, parser: Parser, -): Ast.IParameterList { +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readParameterList.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const parameterList: Ast.IParameterList = genericReadParameterList( + const parameterList: Ast.IParameterList = await genericReadParameterList( state, parser, () => maybeReadAsNullablePrimitiveType(state, parser), @@ -1439,14 +1464,17 @@ export function readParameterList( return parameterList; } -function maybeReadAsNullablePrimitiveType(state: ParseState, parser: Parser): Ast.AsNullablePrimitiveType | undefined { +async function maybeReadAsNullablePrimitiveType( + state: ParseState, + parser: Parser, +): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, maybeReadAsNullablePrimitiveType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const maybeAsNullablePrimitiveType: Ast.AsNullablePrimitiveType | undefined = maybeReadPairedConstant( + const maybeAsNullablePrimitiveType: Ast.AsNullablePrimitiveType | undefined = await maybeReadPairedConstant( state, Ast.NodeKind.AsNullablePrimitiveType, () => ParseStateUtils.isOnTokenKind(state, Token.TokenKind.KeywordAs), @@ -1459,14 +1487,14 @@ function maybeReadAsNullablePrimitiveType(state: ParseState, parser: Parser): As return maybeAsNullablePrimitiveType; } -export function readAsType(state: ParseState, parser: Parser): Ast.AsType { +export async function readAsType(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readAsType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const asType: Ast.AsType = readPairedConstant( + const asType: Ast.AsType = await readPairedConstant( state, Ast.NodeKind.AsType, () => readTokenKindAsConstant(state, Token.TokenKind.KeywordAs, Constant.KeywordConstant.As), @@ -1482,14 +1510,14 @@ export function readAsType(state: ParseState, parser: Parser): Ast.AsType { // ---------- 12.2.3.22 Each expression ---------- // ----------------------------------------------- -export function readEachExpression(state: ParseState, parser: Parser): Ast.EachExpression { +export async function readEachExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readEachExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const eachExpression: Ast.EachExpression = readPairedConstant( + const eachExpression: Ast.EachExpression = await readPairedConstant( state, Ast.NodeKind.EachExpression, () => readTokenKindAsConstant(state, Token.TokenKind.KeywordEach, Constant.KeywordConstant.Each), @@ -1505,7 +1533,7 @@ export function readEachExpression(state: ParseState, parser: Parser): Ast.EachE // ---------- 12.2.3.23 Let expression ---------- // ---------------------------------------------- -export function readLetExpression(state: ParseState, parser: Parser): Ast.LetExpression { +export async function readLetExpression(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.LetExpression = Ast.NodeKind.LetExpression; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readLetExpression.name, { @@ -1522,7 +1550,7 @@ export function readLetExpression(state: ParseState, parser: Parser): Ast.LetExp ); const identifierPairedExpression: Ast.ICsvArray = - parser.readIdentifierPairedExpressions( + await parser.readIdentifierPairedExpressions( state, parser, !ParseStateUtils.isNextTokenKind(state, Token.TokenKind.KeywordIn), @@ -1535,7 +1563,7 @@ export function readLetExpression(state: ParseState, parser: Parser): Ast.LetExp Constant.KeywordConstant.In, ); - const expression: Ast.TExpression = parser.readExpression(state, parser); + const expression: Ast.TExpression = await parser.readExpression(state, parser); const letExpression: Ast.LetExpression = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -1557,7 +1585,7 @@ export function readLetExpression(state: ParseState, parser: Parser): Ast.LetExp // ---------- 12.2.3.24 If expression ---------- // --------------------------------------------- -export function readIfExpression(state: ParseState, parser: Parser): Ast.IfExpression { +export async function readIfExpression(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.IfExpression = Ast.NodeKind.IfExpression; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readIfExpression.name, { @@ -1573,7 +1601,7 @@ export function readIfExpression(state: ParseState, parser: Parser): Ast.IfExpre Constant.KeywordConstant.If, ); - const condition: Ast.TExpression = parser.readExpression(state, parser); + const condition: Ast.TExpression = await parser.readExpression(state, parser); const thenConstant: Ast.IConstant = readTokenKindAsConstant( state, @@ -1581,7 +1609,7 @@ export function readIfExpression(state: ParseState, parser: Parser): Ast.IfExpre Constant.KeywordConstant.Then, ); - const trueExpression: Ast.TExpression = parser.readExpression(state, parser); + const trueExpression: Ast.TExpression = await parser.readExpression(state, parser); const elseConstant: Ast.IConstant = readTokenKindAsConstant( state, @@ -1589,7 +1617,7 @@ export function readIfExpression(state: ParseState, parser: Parser): Ast.IfExpre Constant.KeywordConstant.Else, ); - const falseExpression: Ast.TExpression = parser.readExpression(state, parser); + const falseExpression: Ast.TExpression = await parser.readExpression(state, parser); const ifExpression: Ast.IfExpression = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -1613,7 +1641,7 @@ export function readIfExpression(state: ParseState, parser: Parser): Ast.IfExpre // ---------- 12.2.3.25 Type expression ---------- // ----------------------------------------------- -export function readTypeExpression(state: ParseState, parser: Parser): Ast.TTypeExpression { +export async function readTypeExpression(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readTypeExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -1623,14 +1651,14 @@ export function readTypeExpression(state: ParseState, parser: Parser): Ast.TType let typeExpression: Ast.TTypeExpression; if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.KeywordType)) { - typeExpression = readPairedConstant( + typeExpression = await readPairedConstant( state, Ast.NodeKind.TypePrimaryType, () => readTokenKindAsConstant(state, Token.TokenKind.KeywordType, Constant.KeywordConstant.Type), () => parser.readPrimaryType(state, parser), ); } else { - typeExpression = parser.readPrimaryExpression(state, parser); + typeExpression = await parser.readPrimaryExpression(state, parser); } trace.exit({ [NaiveTraceConstant.TokenIndex]: state.tokenIndex }); @@ -1638,14 +1666,14 @@ export function readTypeExpression(state: ParseState, parser: Parser): Ast.TType return typeExpression; } -export function readType(state: ParseState, parser: Parser): Ast.TType { +export async function readType(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const triedReadPrimaryType: TriedReadPrimaryType = tryReadPrimaryType(state, parser); + const triedReadPrimaryType: TriedReadPrimaryType = await tryReadPrimaryType(state, parser); if (ResultUtils.isOk(triedReadPrimaryType)) { trace.exit({ @@ -1664,14 +1692,14 @@ export function readType(state: ParseState, parser: Parser): Ast.TType { } } -export function readPrimaryType(state: ParseState, parser: Parser): Ast.TPrimaryType { +export async function readPrimaryType(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readPrimaryType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const triedReadPrimaryType: TriedReadPrimaryType = tryReadPrimaryType(state, parser); + const triedReadPrimaryType: TriedReadPrimaryType = await tryReadPrimaryType(state, parser); if (ResultUtils.isOk(triedReadPrimaryType)) { trace.exit({ @@ -1690,7 +1718,7 @@ export function readPrimaryType(state: ParseState, parser: Parser): Ast.TPrimary } } -export function readRecordType(state: ParseState, parser: Parser): Ast.RecordType { +export async function readRecordType(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.RecordType = Ast.NodeKind.RecordType; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readRecordType.name, { @@ -1700,7 +1728,7 @@ export function readRecordType(state: ParseState, parser: Parser): Ast.RecordTyp state.maybeCancellationToken?.throwIfCancelled(); ParseStateUtils.startContext(state, nodeKind); - const fields: Ast.FieldSpecificationList = parser.readFieldSpecificationList( + const fields: Ast.FieldSpecificationList = await parser.readFieldSpecificationList( state, parser, true, @@ -1720,7 +1748,7 @@ export function readRecordType(state: ParseState, parser: Parser): Ast.RecordTyp return recordType; } -export function readTableType(state: ParseState, parser: Parser): Ast.TableType { +export async function readTableType(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.TableType = Ast.NodeKind.TableType; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readTableType.name, { @@ -1745,9 +1773,14 @@ export function readTableType(state: ParseState, parser: Parser): Ast.TableType let rowType: Ast.FieldSpecificationList | Ast.TPrimaryExpression; if (isPrimaryExpressionExpected) { - rowType = parser.readPrimaryExpression(state, parser); + rowType = await parser.readPrimaryExpression(state, parser); } else { - rowType = parser.readFieldSpecificationList(state, parser, false, testCsvContinuationDanglingCommaForBracket); + rowType = await parser.readFieldSpecificationList( + state, + parser, + false, + testCsvContinuationDanglingCommaForBracket, + ); } const tableType: Ast.TableType = { @@ -1764,12 +1797,12 @@ export function readTableType(state: ParseState, parser: Parser): Ast.TableType return tableType; } -export function readFieldSpecificationList( +export async function readFieldSpecificationList( state: ParseState, parser: Parser, allowOpenMarker: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, -): Ast.FieldSpecificationList { +): Promise { const nodeKind: Ast.NodeKind.FieldSpecificationList = Ast.NodeKind.FieldSpecificationList; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readFieldSpecificationList.name, { @@ -1835,12 +1868,12 @@ export function readFieldSpecificationList( const maybeOptionalConstant: Ast.IConstant | undefined = maybeReadConstantKind(state, Constant.LanguageConstant.Optional); - const name: Ast.GeneralizedIdentifier = parser.readGeneralizedIdentifier(state, parser); + // eslint-disable-next-line no-await-in-loop + const name: Ast.GeneralizedIdentifier = await parser.readGeneralizedIdentifier(state, parser); - const maybeFieldTypeSpecification: Ast.FieldTypeSpecification | undefined = maybeReadFieldTypeSpecification( - state, - parser, - ); + const maybeFieldTypeSpecification: Ast.FieldTypeSpecification | undefined = + // eslint-disable-next-line no-await-in-loop + await maybeReadFieldTypeSpecification(state, parser); const field: Ast.FieldSpecification = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -1919,7 +1952,10 @@ export function readFieldSpecificationList( return fieldSpecificationList; } -function maybeReadFieldTypeSpecification(state: ParseState, parser: Parser): Ast.FieldTypeSpecification | undefined { +async function maybeReadFieldTypeSpecification( + state: ParseState, + parser: Parser, +): Promise { const nodeKind: Ast.NodeKind.FieldTypeSpecification = Ast.NodeKind.FieldTypeSpecification; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, maybeReadFieldTypeSpecification.name, { @@ -1936,7 +1972,7 @@ function maybeReadFieldTypeSpecification(state: ParseState, parser: Parser): Ast ); if (maybeEqualConstant) { - const fieldType: Ast.TType = parser.readType(state, parser); + const fieldType: Ast.TType = await parser.readType(state, parser); const fieldTypeSpecification: Ast.FieldTypeSpecification = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -1980,14 +2016,14 @@ function fieldSpecificationListReadError(state: ParseState, allowOpenMarker: boo } } -export function readListType(state: ParseState, parser: Parser): Ast.ListType { +export async function readListType(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readListType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const listType: Ast.ListType = readWrapped( + const listType: Ast.ListType = await readWrapped( state, Ast.NodeKind.ListType, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBrace, Constant.WrapperConstant.LeftBrace), @@ -2001,7 +2037,7 @@ export function readListType(state: ParseState, parser: Parser): Ast.ListType { return listType; } -export function readFunctionType(state: ParseState, parser: Parser): Ast.FunctionType { +export async function readFunctionType(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.FunctionType = Ast.NodeKind.FunctionType; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readFunctionType.name, { @@ -2016,8 +2052,8 @@ export function readFunctionType(state: ParseState, parser: Parser): Ast.Functio Constant.PrimitiveTypeConstant.Function, ); - const parameters: Ast.IParameterList = parser.readParameterSpecificationList(state, parser); - const functionReturnType: Ast.AsType = parser.readAsType(state, parser); + const parameters: Ast.IParameterList = await parser.readParameterSpecificationList(state, parser); + const functionReturnType: Ast.AsType = await parser.readAsType(state, parser); const functionType: Ast.FunctionType = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -2034,7 +2070,7 @@ export function readFunctionType(state: ParseState, parser: Parser): Ast.Functio return functionType; } -function tryReadPrimaryType(state: ParseState, parser: Parser): TriedReadPrimaryType { +async function tryReadPrimaryType(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, tryReadPrimaryType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -2053,21 +2089,21 @@ function tryReadPrimaryType(state: ParseState, parser: Parser): TriedReadPrimary let attempt: TriedReadPrimaryType; if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.LeftBracket)) { - attempt = ResultUtils.boxOk(parser.readRecordType(state, parser)); + attempt = ResultUtils.boxOk(await parser.readRecordType(state, parser)); } else if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.LeftBrace)) { - attempt = ResultUtils.boxOk(parser.readListType(state, parser)); + attempt = ResultUtils.boxOk(await parser.readListType(state, parser)); } else if (isTableTypeNext) { - attempt = ResultUtils.boxOk(parser.readTableType(state, parser)); + attempt = ResultUtils.boxOk(await parser.readTableType(state, parser)); } else if (isFunctionTypeNext) { - attempt = ResultUtils.boxOk(parser.readFunctionType(state, parser)); + attempt = ResultUtils.boxOk(await parser.readFunctionType(state, parser)); } else if (ParseStateUtils.isOnConstantKind(state, Constant.LanguageConstant.Nullable)) { - attempt = ResultUtils.boxOk(parser.readNullableType(state, parser)); + attempt = ResultUtils.boxOk(await parser.readNullableType(state, parser)); } else { - const checkpoint: ParseStateCheckpoint = parser.createCheckpoint(state); - const triedReadPrimitiveType: TriedReadPrimaryType = tryReadPrimitiveType(state, parser); + const checkpoint: ParseStateCheckpoint = await parser.createCheckpoint(state); + const triedReadPrimitiveType: TriedReadPrimaryType = await tryReadPrimitiveType(state, parser); if (ResultUtils.isError(triedReadPrimitiveType)) { - parser.restoreCheckpoint(state, checkpoint); + await parser.restoreCheckpoint(state, checkpoint); } attempt = triedReadPrimitiveType; @@ -2078,15 +2114,20 @@ function tryReadPrimaryType(state: ParseState, parser: Parser): TriedReadPrimary return attempt; } -export function readParameterSpecificationList(state: ParseState, parser: Parser): Ast.IParameterList { +export async function readParameterSpecificationList( + state: ParseState, + parser: Parser, +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readParameterSpecificationList.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const parameterList: Ast.IParameterList = genericReadParameterList(state, parser, () => - parser.readAsType(state, parser), + const parameterList: Ast.IParameterList = await genericReadParameterList( + state, + parser, + async () => await parser.readAsType(state, parser), ); trace.exit({ [NaiveTraceConstant.TokenIndex]: state.tokenIndex }); @@ -2094,14 +2135,14 @@ export function readParameterSpecificationList(state: ParseState, parser: Parser return parameterList; } -export function readNullableType(state: ParseState, parser: Parser): Ast.NullableType { +export async function readNullableType(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readNullableType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const nullableType: Ast.NullableType = readPairedConstant( + const nullableType: Ast.NullableType = await readPairedConstant( state, Ast.NodeKind.NullableType, () => readConstantKind(state, Constant.LanguageConstant.Nullable), @@ -2117,14 +2158,17 @@ export function readNullableType(state: ParseState, parser: Parser): Ast.Nullabl // ---------- 12.2.3.26 Error raising expression ---------- // -------------------------------------------------------- -export function readErrorRaisingExpression(state: ParseState, parser: Parser): Ast.ErrorRaisingExpression { +export async function readErrorRaisingExpression( + state: ParseState, + parser: Parser, +): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readErrorRaisingExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const errorRaisingExpression: Ast.ErrorRaisingExpression = readPairedConstant( + const errorRaisingExpression: Ast.ErrorRaisingExpression = await readPairedConstant( state, Ast.NodeKind.ErrorRaisingExpression, () => readTokenKindAsConstant(state, Token.TokenKind.KeywordError, Constant.KeywordConstant.Error), @@ -2140,7 +2184,10 @@ export function readErrorRaisingExpression(state: ParseState, parser: Parser): A // ---------- 12.2.3.27 Error handling expression ---------- // --------------------------------------------------------- -export function readErrorHandlingExpression(state: ParseState, parser: Parser): Ast.ErrorHandlingExpression { +export async function readErrorHandlingExpression( + state: ParseState, + parser: Parser, +): Promise { const nodeKind: Ast.NodeKind.ErrorHandlingExpression = Ast.NodeKind.ErrorHandlingExpression; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readErrorHandlingExpression.name, { @@ -2156,11 +2203,11 @@ export function readErrorHandlingExpression(state: ParseState, parser: Parser): Constant.KeywordConstant.Try, ); - const protectedExpression: Ast.TExpression = parser.readExpression(state, parser); + const protectedExpression: Ast.TExpression = await parser.readExpression(state, parser); const otherwiseExpressionNodeKind: Ast.NodeKind.OtherwiseExpression = Ast.NodeKind.OtherwiseExpression; - const maybeOtherwiseExpression: Ast.OtherwiseExpression | undefined = maybeReadPairedConstant( + const maybeOtherwiseExpression: Ast.OtherwiseExpression | undefined = await maybeReadPairedConstant( state, otherwiseExpressionNodeKind, () => ParseStateUtils.isOnTokenKind(state, Token.TokenKind.KeywordOtherwise), @@ -2187,7 +2234,7 @@ export function readErrorHandlingExpression(state: ParseState, parser: Parser): // ---------- 12.2.4 Literal Attributes ---------- // ----------------------------------------------- -export function readRecordLiteral(state: ParseState, parser: Parser): Ast.RecordLiteral { +export async function readRecordLiteral(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readRecordLiteral.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -2201,7 +2248,7 @@ export function readRecordLiteral(state: ParseState, parser: Parser): Ast.Record Constant.WrapperConstant.LeftBracket, Ast.ICsvArray, Constant.WrapperConstant.RightBracket - > = readWrapped( + > = await readWrapped( state, Ast.NodeKind.RecordLiteral, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBracket, Constant.WrapperConstant.LeftBracket), @@ -2226,19 +2273,19 @@ export function readRecordLiteral(state: ParseState, parser: Parser): Ast.Record return recordLiteral; } -export function readFieldNamePairedAnyLiterals( +export async function readFieldNamePairedAnyLiterals( state: ParseState, parser: Parser, continueReadingValues: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, -): Ast.ICsvArray { +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readFieldNamePairedAnyLiterals.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const csvArray: Ast.ICsvArray = readCsvArray( + const csvArray: Ast.ICsvArray = await readCsvArray( state, () => readKeyValuePair< @@ -2260,7 +2307,7 @@ export function readFieldNamePairedAnyLiterals( return csvArray; } -export function readListLiteral(state: ParseState, parser: Parser): Ast.ListLiteral { +export async function readListLiteral(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readListLiteral.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -2274,7 +2321,7 @@ export function readListLiteral(state: ParseState, parser: Parser): Ast.ListLite Constant.WrapperConstant.LeftBrace, Ast.ICsvArray, Constant.WrapperConstant.RightBrace - > = readWrapped( + > = await readWrapped( state, Ast.NodeKind.ListLiteral, () => readTokenKindAsConstant(state, Token.TokenKind.LeftBrace, Constant.WrapperConstant.LeftBrace), @@ -2299,7 +2346,7 @@ export function readListLiteral(state: ParseState, parser: Parser): Ast.ListLite return listLiteral; } -export function readAnyLiteral(state: ParseState, parser: Parser): Ast.TAnyLiteral { +export async function readAnyLiteral(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readAnyLiteral.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -2309,9 +2356,9 @@ export function readAnyLiteral(state: ParseState, parser: Parser): Ast.TAnyLiter let anyLiteral: Ast.TAnyLiteral; if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.LeftBracket)) { - anyLiteral = parser.readRecordLiteral(state, parser); + anyLiteral = await parser.readRecordLiteral(state, parser); } else if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.LeftBrace)) { - anyLiteral = parser.readListLiteral(state, parser); + anyLiteral = await parser.readListLiteral(state, parser); } else { anyLiteral = parser.readLiteralExpression(state, parser); } @@ -2321,14 +2368,14 @@ export function readAnyLiteral(state: ParseState, parser: Parser): Ast.TAnyLiter return anyLiteral; } -export function readPrimitiveType(state: ParseState, parser: Parser): Ast.PrimitiveType { +export async function readPrimitiveType(state: ParseState, parser: Parser): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readPrimitiveType.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const triedReadPrimitiveType: TriedReadPrimitiveType = tryReadPrimitiveType(state, parser); + const triedReadPrimitiveType: TriedReadPrimitiveType = await tryReadPrimitiveType(state, parser); if (ResultUtils.isOk(triedReadPrimitiveType)) { trace.exit({ @@ -2347,7 +2394,7 @@ export function readPrimitiveType(state: ParseState, parser: Parser): Ast.Primit } } -function tryReadPrimitiveType(state: ParseState, parser: Parser): TriedReadPrimitiveType { +async function tryReadPrimitiveType(state: ParseState, parser: Parser): Promise { const nodeKind: Ast.NodeKind.PrimitiveType = Ast.NodeKind.PrimitiveType; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, tryReadPrimitiveType.name, { @@ -2356,7 +2403,7 @@ function tryReadPrimitiveType(state: ParseState, parser: Parser): TriedReadPrimi ParseStateUtils.startContext(state, nodeKind); - const checkpoint: ParseStateCheckpoint = parser.createCheckpoint(state); + const checkpoint: ParseStateCheckpoint = await parser.createCheckpoint(state); const expectedTokenKinds: ReadonlyArray = [ Token.TokenKind.Identifier, @@ -2409,7 +2456,7 @@ function tryReadPrimitiveType(state: ParseState, parser: Parser): TriedReadPrimi default: { const token: Token.Token = ParseStateUtils.assertGetTokenAt(state, state.tokenIndex); - parser.restoreCheckpoint(state, checkpoint); + await parser.restoreCheckpoint(state, checkpoint); return ResultUtils.boxError( new ParseError.InvalidPrimitiveTypeError( @@ -2428,7 +2475,7 @@ function tryReadPrimitiveType(state: ParseState, parser: Parser): TriedReadPrimi readToken(state); } else { const details: { tokenKind: Token.TokenKind | undefined } = { tokenKind: state.maybeCurrentTokenKind }; - parser.restoreCheckpoint(state, checkpoint); + await parser.restoreCheckpoint(state, checkpoint); trace.exit({ [NaiveTraceConstant.TokenIndex]: state.tokenIndex, @@ -2461,19 +2508,19 @@ function tryReadPrimitiveType(state: ParseState, parser: Parser): TriedReadPrimi // ---------- key-value pairs ---------- // ------------------------------------- -export function readIdentifierPairedExpressions( +export async function readIdentifierPairedExpressions( state: ParseState, parser: Parser, continueReadingValues: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, -): Ast.ICsvArray { +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readIdentifierPairedExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const csvArray: Ast.ICsvArray = readCsvArray( + const csvArray: Ast.ICsvArray = await readCsvArray( state, () => parser.readIdentifierPairedExpression(state, parser), continueReadingValues, @@ -2485,12 +2532,12 @@ export function readIdentifierPairedExpressions( return csvArray; } -export function readGeneralizedIdentifierPairedExpressions( +export async function readGeneralizedIdentifierPairedExpressions( state: ParseState, parser: Parser, continueReadingValues: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, -): Ast.ICsvArray { +): Promise> { const trace: Trace = state.traceManager.entry( NaiveTraceConstant.Parse, readGeneralizedIdentifierPairedExpressions.name, @@ -2498,7 +2545,7 @@ export function readGeneralizedIdentifierPairedExpressions( state.maybeCancellationToken?.throwIfCancelled(); - const csvArray: Ast.ICsvArray = readCsvArray( + const csvArray: Ast.ICsvArray = await readCsvArray( state, () => parser.readGeneralizedIdentifierPairedExpression(state, parser), continueReadingValues, @@ -2510,10 +2557,10 @@ export function readGeneralizedIdentifierPairedExpressions( return csvArray; } -export function readGeneralizedIdentifierPairedExpression( +export async function readGeneralizedIdentifierPairedExpression( state: ParseState, parser: Parser, -): Ast.GeneralizedIdentifierPairedExpression { +): Promise { const trace: Trace = state.traceManager.entry( NaiveTraceConstant.Parse, readGeneralizedIdentifierPairedExpression.name, @@ -2524,7 +2571,7 @@ export function readGeneralizedIdentifierPairedExpression( state.maybeCancellationToken?.throwIfCancelled(); - const generalizedIdentifierPairedExpression: Ast.GeneralizedIdentifierPairedExpression = readKeyValuePair< + const generalizedIdentifierPairedExpression: Ast.GeneralizedIdentifierPairedExpression = await readKeyValuePair< Ast.NodeKind.GeneralizedIdentifierPairedExpression, Ast.GeneralizedIdentifier, Ast.TExpression @@ -2540,22 +2587,26 @@ export function readGeneralizedIdentifierPairedExpression( return generalizedIdentifierPairedExpression; } -export function readIdentifierPairedExpression(state: ParseState, parser: Parser): Ast.IdentifierPairedExpression { +export async function readIdentifierPairedExpression( + state: ParseState, + parser: Parser, +): Promise { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readIdentifierPairedExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); state.maybeCancellationToken?.throwIfCancelled(); - const identifierPairedExpression: Ast.IdentifierPairedExpression = readKeyValuePair< + const identifierPairedExpression: Ast.IdentifierPairedExpression = await readKeyValuePair< Ast.NodeKind.IdentifierPairedExpression, Ast.Identifier, Ast.TExpression >( state, Ast.NodeKind.IdentifierPairedExpression, - () => parser.readIdentifier(state, parser), - () => parser.readExpression(state, parser), + // eslint-disable-next-line require-await + async () => parser.readIdentifier(state, parser), + async () => await parser.readExpression(state, parser), ); trace.exit({ [NaiveTraceConstant.TokenIndex]: state.tokenIndex }); @@ -2572,7 +2623,7 @@ export function readIdentifierPairedExpression(state: ParseState, parser: Parser // The helper function is nearly a copy except it replaces Left and leftReader with Right and rightReader. // // The reason the code is duplicated across two functions is because I can't think of a cleaner way to do it. -function recursiveReadBinOpExpression< +async function recursiveReadBinOpExpression< Kind extends Ast.TBinOpExpressionNodeKind, Left, Op extends Constant.TBinOpExpressionOperator, @@ -2580,16 +2631,16 @@ function recursiveReadBinOpExpression< >( state: ParseState, nodeKind: Kind, - leftReader: () => Left, + leftReader: () => Promise, maybeOperatorFrom: (tokenKind: Token.TokenKind | undefined) => Op | undefined, - rightReader: () => Right, -): Left | Ast.IBinOpExpression { + rightReader: () => Promise, +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, recursiveReadBinOpExpression.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); ParseStateUtils.startContext(state, nodeKind); - const left: Left = leftReader(); + const left: Left = await leftReader(); // If no operator, return Left const maybeOperator: Op | undefined = maybeOperatorFrom(state.maybeCurrentTokenKind); @@ -2611,7 +2662,7 @@ function recursiveReadBinOpExpression< maybeOperator, ); - const right: Right | Ast.IBinOpExpression = recursiveReadBinOpExpressionHelper< + const right: Right | Ast.IBinOpExpression = await recursiveReadBinOpExpressionHelper< Kind, Op, Right @@ -2639,7 +2690,7 @@ function recursiveReadBinOpExpression< // Given the string `1 + 2 + 3` the function will recursively parse 2 Ast nodes, // where their TokenRange's are represented by brackets: // 1 + [2 + [3]] -function recursiveReadBinOpExpressionHelper< +async function recursiveReadBinOpExpressionHelper< Kind extends Ast.TBinOpExpressionNodeKind, OperatorKind extends Constant.TBinOpExpressionOperator, Right, @@ -2647,14 +2698,14 @@ function recursiveReadBinOpExpressionHelper< state: ParseState, nodeKind: Kind, maybeOperatorFrom: (tokenKind: Token.TokenKind | undefined) => OperatorKind | undefined, - rightReader: () => Right, -): Right | Ast.IBinOpExpression { + rightReader: () => Promise, +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, recursiveReadBinOpExpressionHelper.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); ParseStateUtils.startContext(state, nodeKind); - const rightAsLeft: Right = rightReader(); + const rightAsLeft: Right = await rightReader(); const maybeOperator: OperatorKind | undefined = maybeOperatorFrom(state.maybeCurrentTokenKind); @@ -2675,11 +2726,13 @@ function recursiveReadBinOpExpressionHelper< maybeOperator, ); - const right: Right | Ast.IBinOpExpression = recursiveReadBinOpExpressionHelper< - Kind, - OperatorKind, - Right - >(state, nodeKind, maybeOperatorFrom, rightReader); + const right: Right | Ast.IBinOpExpression = + await recursiveReadBinOpExpressionHelper( + state, + nodeKind, + maybeOperatorFrom, + rightReader, + ); const binOpExpression: Ast.IBinOpExpression = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -2700,12 +2753,12 @@ function recursiveReadBinOpExpressionHelper< return binOpExpression; } -function readCsvArray( +async function readCsvArray( state: ParseState, - valueReader: () => T, + valueReader: () => Promise, continueReadingValues: boolean, testPostCommaError: (state: ParseState) => ParseError.TInnerParseError | undefined, -): Ast.TCsvArray & Ast.ICsvArray { +): Promise> { const nodeKind: Ast.NodeKind.ArrayWrapper = Ast.NodeKind.ArrayWrapper; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readCsvArray.name, { @@ -2731,7 +2784,8 @@ function readCsvArray( throw maybeError; } - const node: T = valueReader(); + // eslint-disable-next-line no-await-in-loop + const node: T = await valueReader(); const maybeCommaConstant: Ast.IConstant | undefined = maybeReadTokenKindAsConstant( state, @@ -2770,19 +2824,19 @@ function readCsvArray( return csvArray; } -function readKeyValuePair( +async function readKeyValuePair( state: ParseState, nodeKind: Kind, - keyReader: () => Key, - valueReader: () => Value, -): Ast.IKeyValuePair { + keyReader: () => Promise, + valueReader: () => Promise, +): Promise> { ParseStateUtils.startContext(state, nodeKind); const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readKeyValuePair.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); - const key: Key = keyReader(); + const key: Key = await keyReader(); const equalConstant: Ast.IConstant = readTokenKindAsConstant( state, @@ -2790,7 +2844,7 @@ function readKeyValuePair( Constant.MiscConstant.Equal, ); - const value: Value = valueReader(); + const value: Value = await valueReader(); const keyValuePair: Ast.IKeyValuePair = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -2807,12 +2861,16 @@ function readKeyValuePair( return keyValuePair; } -function readPairedConstant( +async function readPairedConstant< + Kind extends Ast.TPairedConstantNodeKind, + ConstantKind extends Constant.TConstant, + Paired, +>( state: ParseState, nodeKind: Kind, constantReader: () => Ast.TConstant & Ast.IConstant, - pairedReader: () => Paired, -): Ast.IPairedConstant { + pairedReader: () => Promise, +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readPairedConstant.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -2820,7 +2878,7 @@ function readPairedConstant = constantReader(); - const paired: Paired = pairedReader(); + const paired: Paired = await pairedReader(); const pairedConstant: Ast.IPairedConstant = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -2836,7 +2894,7 @@ function readPairedConstant boolean, constantReader: () => Ast.TConstant & Ast.IConstant, - pairedReader: () => Paired, -): Ast.IPairedConstant | undefined { + pairedReader: () => Promise, +): Promise | undefined> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, maybeReadPairedConstant.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -2854,7 +2912,12 @@ function maybeReadPairedConstant< let pairedConstant: Ast.IPairedConstant | undefined; if (condition()) { - pairedConstant = readPairedConstant(state, nodeKind, constantReader, pairedReader); + pairedConstant = await readPairedConstant( + state, + nodeKind, + constantReader, + pairedReader, + ); } else { ParseStateUtils.incrementAttributeCounter(state); pairedConstant = undefined; @@ -2868,11 +2931,11 @@ function maybeReadPairedConstant< return pairedConstant; } -function genericReadParameterList( +async function genericReadParameterList( state: ParseState, parser: Parser, - typeReader: () => T, -): Ast.IParameterList { + typeReader: () => Promise, +): Promise> { const nodeKind: Ast.NodeKind.ParameterList = Ast.NodeKind.ParameterList; const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, genericReadParameterList.name, { @@ -2931,8 +2994,10 @@ function genericReadParameterList( reachedOptionalParameter = true; } + // eslint-disable-next-line no-await-in-loop const name: Ast.Identifier = parser.readIdentifier(state, parser); - const maybeParameterType: T = typeReader(); + // eslint-disable-next-line no-await-in-loop + const maybeParameterType: T = await typeReader(); const parameter: Ast.IParameter = { ...ParseStateUtils.assertGetContextNodeMetadata(state), @@ -3000,7 +3065,7 @@ function genericReadParameterList( return parameterList; } -function readWrapped< +async function readWrapped< Kind extends Ast.TWrappedNodeKind, Open extends Constant.WrapperConstant, Content, @@ -3009,10 +3074,10 @@ function readWrapped< state: ParseState, nodeKind: Kind, openConstantReader: () => Ast.IConstant, - contentReader: () => Content, + contentReader: () => Promise, closeConstantReader: () => Ast.IConstant, allowOptionalConstant: boolean, -): WrappedRead { +): Promise> { const trace: Trace = state.traceManager.entry(NaiveTraceConstant.Parse, readWrapped.name, { [NaiveTraceConstant.TokenIndex]: state.tokenIndex, }); @@ -3020,7 +3085,7 @@ function readWrapped< ParseStateUtils.startContext(state, nodeKind); const openWrapperConstant: Ast.IConstant = openConstantReader(); - const content: Content = contentReader(); + const content: Content = await contentReader(); const closeWrapperConstant: Ast.IConstant = closeConstantReader(); let maybeOptionalConstant: Ast.IConstant | undefined; @@ -3216,9 +3281,9 @@ function maybeReadConstantKind( } } -function maybeReadLiteralAttributes(state: ParseState, parser: Parser): Ast.RecordLiteral | undefined { +async function maybeReadLiteralAttributes(state: ParseState, parser: Parser): Promise { if (ParseStateUtils.isOnTokenKind(state, Token.TokenKind.LeftBracket)) { - return parser.readRecordLiteral(state, parser); + return await parser.readRecordLiteral(state, parser); } else { ParseStateUtils.incrementAttributeCounter(state); diff --git a/src/powerquery-parser/parser/parsers/recursiveDescentParser.ts b/src/powerquery-parser/parser/parsers/recursiveDescentParser.ts index 8c23484f..b6c09f0f 100644 --- a/src/powerquery-parser/parser/parsers/recursiveDescentParser.ts +++ b/src/powerquery-parser/parser/parsers/recursiveDescentParser.ts @@ -7,6 +7,9 @@ import { ParseStateUtils } from "../parseState"; export const RecursiveDescentParser: Parser = { ...NaiveParseSteps, + + readIdentifier: NaiveParseSteps.readIdentifier, + applyState: ParseStateUtils.applyState, copyState: ParseStateUtils.copyState, createCheckpoint: ParserUtils.createCheckpoint, diff --git a/src/powerquery-parser/settings/settings.ts b/src/powerquery-parser/settings/settings.ts index 3653254e..0025b0e8 100644 --- a/src/powerquery-parser/settings/settings.ts +++ b/src/powerquery-parser/settings/settings.ts @@ -22,7 +22,7 @@ export interface ParseSettings extends CommonSettings { lexerSnapshot: LexerSnapshot, maybeOverrides: Partial | undefined, ) => ParseState; - readonly maybeParserEntryPointFn: ((state: ParseState, parser: Parser) => Ast.TNode) | undefined; + readonly maybeParserEntryPointFn: ((state: ParseState, parser: Parser) => Promise) | undefined; } export type Settings = LexSettings & ParseSettings; diff --git a/src/powerquery-parser/task/taskUtils.ts b/src/powerquery-parser/task/taskUtils.ts index 4ff5e0db..2f378105 100644 --- a/src/powerquery-parser/task/taskUtils.ts +++ b/src/powerquery-parser/task/taskUtils.ts @@ -190,8 +190,8 @@ export function tryLex(settings: LexSettings, text: string): TriedLexTask { } } -export function tryParse(settings: ParseSettings, lexerSnapshot: Lexer.LexerSnapshot): TriedParseTask { - const triedParse: Parser.TriedParse = ParserUtils.tryParse(settings, lexerSnapshot); +export async function tryParse(settings: ParseSettings, lexerSnapshot: Lexer.LexerSnapshot): Promise { + const triedParse: Parser.TriedParse = await ParserUtils.tryParse(settings, lexerSnapshot); if (ResultUtils.isOk(triedParse)) { return createParseTaskOk(lexerSnapshot, triedParse.value.root, triedParse.value.state); @@ -202,7 +202,7 @@ export function tryParse(settings: ParseSettings, lexerSnapshot: Lexer.LexerSnap } } -export function tryLexParse(settings: LexSettings & ParseSettings, text: string): TriedLexParseTask { +export async function tryLexParse(settings: LexSettings & ParseSettings, text: string): Promise { const triedLexTask: TriedLexTask = tryLex(settings, text); if (triedLexTask.resultKind === ResultKind.Error) { @@ -211,7 +211,7 @@ export function tryLexParse(settings: LexSettings & ParseSettings, text: string) const lexerSnapshot: Lexer.LexerSnapshot = triedLexTask.lexerSnapshot; - return tryParse(settings, lexerSnapshot); + return await tryParse(settings, lexerSnapshot); } function createLexTaskOk(lexerSnapshot: Lexer.LexerSnapshot): LexTaskOk { diff --git a/src/test/libraryTest/parser/children.ts b/src/test/libraryTest/parser/children.ts index 0e9206aa..3800cf06 100644 --- a/src/test/libraryTest/parser/children.ts +++ b/src/test/libraryTest/parser/children.ts @@ -29,7 +29,7 @@ function createActual(lexParseOk: Task.ParseTaskOk): ChildIdsByIdEntry[] { } describe("Parser.Children", () => { - it(`() as number => 1`, () => { + it(`WIP () as number => 1`, async () => { const text: string = `() as number => 1`; const expected: ReadonlyArray = [ @@ -51,13 +51,13 @@ describe("Parser.Children", () => { ]; const actual: ReadonlyArray = createActual( - TestAssertUtils.assertGetLexParseOk(DefaultSettings, text), + await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text), ); expect(actual).to.deep.equal(expected); }); - it(`null ?? 1 ?? 2`, () => { + it(`null ?? 1 ?? 2`, async () => { const text: string = `null ?? 1 ?? 2`; const expected: ReadonlyArray = [ @@ -74,7 +74,7 @@ describe("Parser.Children", () => { ]; const actual: ReadonlyArray = createActual( - TestAssertUtils.assertGetLexParseOk(DefaultSettings, text), + await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text), ); expect(actual).to.deep.equal(expected); diff --git a/src/test/libraryTest/parser/columnNumber.ts b/src/test/libraryTest/parser/columnNumber.ts index ac6c5f6a..ad5ea1da 100644 --- a/src/test/libraryTest/parser/columnNumber.ts +++ b/src/test/libraryTest/parser/columnNumber.ts @@ -8,8 +8,8 @@ import { Assert, DefaultSettings } from "../../.."; import { ParseError } from "../../../powerquery-parser/parser"; import { TestAssertUtils } from "../../testUtils"; -function assertGetExpectedTokenKindError(text: string): ParseError.ExpectedTokenKindError { - const error: ParseError.ParseError = TestAssertUtils.assertGetParseError(DefaultSettings, text); +async function assertGetExpectedTokenKindError(text: string): Promise { + const error: ParseError.ParseError = await TestAssertUtils.assertGetParseError(DefaultSettings, text); const innerError: ParseError.TInnerParseError = error.innerError; Assert.isTrue( @@ -20,8 +20,8 @@ function assertGetExpectedTokenKindError(text: string): ParseError.ExpectedToken return innerError as ParseError.ExpectedTokenKindError; } -function assertErrorAt(text: string, lineNumber: number, columnNumber: number, codeUnit: number): void { - const error: ParseError.ExpectedTokenKindError = assertGetExpectedTokenKindError(text); +async function assertErrorAt(text: string, lineNumber: number, columnNumber: number, codeUnit: number): Promise { + const error: ParseError.ExpectedTokenKindError = await assertGetExpectedTokenKindError(text); const foundToken: ParseError.TokenWithColumnNumber = Assert.asDefined(error.maybeFoundToken); expect(foundToken.token.positionStart.codeUnit).to.equal(codeUnit, "codeUnit"); @@ -30,27 +30,27 @@ function assertErrorAt(text: string, lineNumber: number, columnNumber: number, c } describe(`Parser.ColumnNumber`, () => { - it(`if x foo`, () => { - assertErrorAt(`if x foo`, 0, 5, 5); + it(`if x foo`, async () => { + await assertErrorAt(`if x foo`, 0, 5, 5); }); - it(`if x \\nfoo`, () => { - assertErrorAt(`if x \nfoo`, 1, 0, 6); + it(`if x \\nfoo`, async () => { + await assertErrorAt(`if x \nfoo`, 1, 0, 6); }); - it(`if x \\n foo`, () => { - assertErrorAt(`if x \n foo`, 1, 1, 7); + it(`if x \\n foo`, async () => { + await assertErrorAt(`if x \n foo`, 1, 1, 7); }); - it(`if \u006E\u0303 foo`, () => { - assertErrorAt(`if \u006E\u0303 foo`, 0, 5, 6); + it(`if \u006E\u0303 foo`, async () => { + await assertErrorAt(`if \u006E\u0303 foo`, 0, 5, 6); }); - it(`if \u006E\u0303 \\nfoo`, () => { - assertErrorAt(`if \u006E\u0303 \nfoo`, 1, 0, 7); + it(`if \u006E\u0303 \\nfoo`, async () => { + await assertErrorAt(`if \u006E\u0303 \nfoo`, 1, 0, 7); }); - it(`if \u006E\u0303 \\n foo`, () => { - assertErrorAt(`if \u006E\u0303 \n foo`, 1, 1, 8); + it(`if \u006E\u0303 \\n foo`, async () => { + await assertErrorAt(`if \u006E\u0303 \n foo`, 1, 1, 8); }); }); diff --git a/src/test/libraryTest/parser/error.ts b/src/test/libraryTest/parser/error.ts index 94789077..6c5469b9 100644 --- a/src/test/libraryTest/parser/error.ts +++ b/src/test/libraryTest/parser/error.ts @@ -4,193 +4,205 @@ import "mocha"; import { expect } from "chai"; -import { Assert, DefaultSettings, Lexer, Localization, Parser, Settings, Templates } from "../../.."; +import { Assert, DefaultSettings, Lexer, Localization, Settings, Templates } from "../../.."; +import { + Disambiguation, + ParseError, + Parser, + ParseState, + ParseStateUtils, + RecursiveDescentParser, +} from "../../../powerquery-parser/parser"; +import { Ast } from "../../../powerquery-parser/language"; import { TestAssertUtils } from "../../testUtils"; const DefaultSettingsWithStrict: Settings = { ...DefaultSettings, - createParseState: (lexerSnapshot: Lexer.LexerSnapshot, maybeOverrides: Partial | undefined) => { + createParseState: (lexerSnapshot: Lexer.LexerSnapshot, maybeOverrides: Partial | undefined) => { maybeOverrides = maybeOverrides ?? {}; - return Parser.ParseStateUtils.createState(lexerSnapshot, { + return ParseStateUtils.createState(lexerSnapshot, { ...maybeOverrides, disambiguationBehavior: - maybeOverrides.disambiguationBehavior ?? Parser.Disambiguation.DismabiguationBehavior.Strict, + maybeOverrides.disambiguationBehavior ?? Disambiguation.DismabiguationBehavior.Strict, }); }, }; -function assertGetCsvContinuationError(text: string): Parser.ParseError.ExpectedCsvContinuationError { - const innerError: Parser.ParseError.TInnerParseError = TestAssertUtils.assertGetParseError( - DefaultSettingsWithStrict, - text, +async function assertGetCsvContinuationError(text: string): Promise { + const innerError: ParseError.TInnerParseError = ( + await TestAssertUtils.assertGetParseError(DefaultSettingsWithStrict, text) ).innerError; Assert.isTrue( - innerError instanceof Parser.ParseError.ExpectedCsvContinuationError, + innerError instanceof ParseError.ExpectedCsvContinuationError, "innerError instanceof ParseError.ExpectedCsvContinuationError", ); - return innerError as Parser.ParseError.ExpectedCsvContinuationError; + return innerError as ParseError.ExpectedCsvContinuationError; } describe("Parser.Error", () => { - it("RequiredParameterAfterOptionalParameterError: (optional x, y) => x", () => { + it("RequiredParameterAfterOptionalParameterError: (optional x, y) => x", async () => { const text: string = "(optional x, y) => x"; - const innerError: Parser.ParseError.TInnerParseError = TestAssertUtils.assertGetParseError( - DefaultSettingsWithStrict, - text, + const innerError: ParseError.TInnerParseError = ( + await TestAssertUtils.assertGetParseError(DefaultSettingsWithStrict, text) ).innerError; - expect(innerError instanceof Parser.ParseError.RequiredParameterAfterOptionalParameterError).to.equal( + expect(innerError instanceof ParseError.RequiredParameterAfterOptionalParameterError).to.equal( true, innerError.message, ); }); - it("UnterminatedSequence (Bracket): let x = [", () => { + it("UnterminatedSequence (Bracket): let x = [", async () => { const text: string = "let x = ["; - const innerError: Parser.ParseError.TInnerParseError = TestAssertUtils.assertGetParseError( - DefaultSettingsWithStrict, - text, + const innerError: ParseError.TInnerParseError = ( + await TestAssertUtils.assertGetParseError(DefaultSettingsWithStrict, text) ).innerError; - expect(innerError instanceof Parser.ParseError.UnterminatedSequence).to.equal(true, innerError.message); + expect(innerError instanceof ParseError.UnterminatedSequence).to.equal(true, innerError.message); - expect((innerError as Parser.ParseError.UnterminatedSequence).kind).to.equal( - Parser.ParseError.SequenceKind.Bracket, + expect((innerError as ParseError.UnterminatedSequence).kind).to.equal( + ParseError.SequenceKind.Bracket, innerError.message, ); }); - it("UnterminatedSequence (Parenthesis): let x = (1", () => { + it("UnterminatedSequence (Parenthesis): let x = (1", async () => { const text: string = "let x = (1"; - const innerError: Parser.ParseError.TInnerParseError = TestAssertUtils.assertGetParseError( - DefaultSettingsWithStrict, - text, + const innerError: ParseError.TInnerParseError = ( + await TestAssertUtils.assertGetParseError(DefaultSettingsWithStrict, text) ).innerError; - expect(innerError instanceof Parser.ParseError.UnterminatedSequence).to.equal(true, innerError.message); + expect(innerError instanceof ParseError.UnterminatedSequence).to.equal(true, innerError.message); - expect((innerError as Parser.ParseError.UnterminatedSequence).kind).to.equal( - Parser.ParseError.SequenceKind.Parenthesis, + expect((innerError as ParseError.UnterminatedSequence).kind).to.equal( + ParseError.SequenceKind.Parenthesis, innerError.message, ); }); describe(`UnusedTokensRemainError`, () => { - it("default parser", () => { + it("default parser", async () => { const text: string = "1 1"; - const innerError: Parser.ParseError.TInnerParseError = TestAssertUtils.assertGetParseError( - DefaultSettingsWithStrict, - text, + const innerError: ParseError.TInnerParseError = ( + await TestAssertUtils.assertGetParseError(DefaultSettingsWithStrict, text) ).innerError; - expect(innerError instanceof Parser.ParseError.UnusedTokensRemainError).to.equal(true, innerError.message); + expect(innerError instanceof ParseError.UnusedTokensRemainError).to.equal(true, innerError.message); }); - it("custom start", () => { + it("custom start", async () => { const customSettings: Settings = { ...DefaultSettings, - parser: Parser.RecursiveDescentParser, - maybeParserEntryPointFn: Parser.RecursiveDescentParser.readIdentifier, + parser: RecursiveDescentParser, + // eslint-disable-next-line require-await + maybeParserEntryPointFn: async (state: ParseState, parser: Parser): Promise => + parser.readIdentifier(state, parser), }; const text: string = "a b"; - const innerError: Parser.ParseError.TInnerParseError = TestAssertUtils.assertGetParseError( - customSettings, - text, + const innerError: ParseError.TInnerParseError = ( + await TestAssertUtils.assertGetParseError(customSettings, text) ).innerError; - expect(innerError instanceof Parser.ParseError.UnusedTokensRemainError).to.equal(true, innerError.message); + expect(innerError instanceof ParseError.UnusedTokensRemainError).to.equal(true, innerError.message); }); }); - it(`Dangling Comma for LetExpression`, () => { + it(`Dangling Comma for LetExpression`, async () => { const text: string = "let a = 1, in 1"; - const continuationError: Parser.ParseError.ExpectedCsvContinuationError = assertGetCsvContinuationError(text); + + const continuationError: ParseError.ExpectedCsvContinuationError = await assertGetCsvContinuationError(text); expect(continuationError.message).to.equal( Localization.error_parse_csvContinuation( Templates.DefaultTemplates, - Parser.ParseError.CsvContinuationKind.LetExpression, + ParseError.CsvContinuationKind.LetExpression, ), ); }); - it(`Dangling Comma for ListExpression`, () => { + it(`Dangling Comma for ListExpression`, async () => { const text: string = "{1, }"; - const continuationError: Parser.ParseError.ExpectedCsvContinuationError = assertGetCsvContinuationError(text); + + const continuationError: ParseError.ExpectedCsvContinuationError = await assertGetCsvContinuationError(text); expect(continuationError.message).to.equal( Localization.error_parse_csvContinuation( Templates.DefaultTemplates, - Parser.ParseError.CsvContinuationKind.DanglingComma, + ParseError.CsvContinuationKind.DanglingComma, ), ); }); - it(`Dangling Comma for FunctionExpression`, () => { + it(`Dangling Comma for FunctionExpression`, async () => { const text: string = "(a, ) => a"; - const continuationError: Parser.ParseError.ExpectedCsvContinuationError = assertGetCsvContinuationError(text); + + const continuationError: ParseError.ExpectedCsvContinuationError = await assertGetCsvContinuationError(text); expect(continuationError.message).to.equal( Localization.error_parse_csvContinuation( Templates.DefaultTemplates, - Parser.ParseError.CsvContinuationKind.DanglingComma, + ParseError.CsvContinuationKind.DanglingComma, ), ); }); - it(`Dangling Comma for FunctionType`, () => { + it(`Dangling Comma for FunctionType`, async () => { const text: string = "type function (a as number, ) as number"; - const continuationError: Parser.ParseError.ExpectedCsvContinuationError = assertGetCsvContinuationError(text); + + const continuationError: ParseError.ExpectedCsvContinuationError = await assertGetCsvContinuationError(text); expect(continuationError.message).to.equal( Localization.error_parse_csvContinuation( Templates.DefaultTemplates, - Parser.ParseError.CsvContinuationKind.DanglingComma, + ParseError.CsvContinuationKind.DanglingComma, ), ); }); - it(`Dangling Comma for RecordExpression`, () => { + it(`Dangling Comma for RecordExpression`, async () => { const text: string = "[a = 1,]"; - const continuationError: Parser.ParseError.ExpectedCsvContinuationError = assertGetCsvContinuationError(text); + + const continuationError: ParseError.ExpectedCsvContinuationError = await assertGetCsvContinuationError(text); expect(continuationError.message).to.equal( Localization.error_parse_csvContinuation( Templates.DefaultTemplates, - Parser.ParseError.CsvContinuationKind.DanglingComma, + ParseError.CsvContinuationKind.DanglingComma, ), ); }); - it(`Dangling Comma for RecordType`, () => { + it(`Dangling Comma for RecordType`, async () => { const text: string = "type [a = 1,]"; - const continuationError: Parser.ParseError.ExpectedCsvContinuationError = assertGetCsvContinuationError(text); + + const continuationError: ParseError.ExpectedCsvContinuationError = await assertGetCsvContinuationError(text); expect(continuationError.message).to.equal( Localization.error_parse_csvContinuation( Templates.DefaultTemplates, - Parser.ParseError.CsvContinuationKind.DanglingComma, + ParseError.CsvContinuationKind.DanglingComma, ), ); }); - it(`Dangling Comma for TableType`, () => { + it(`Dangling Comma for TableType`, async () => { const text: string = "type table [a = 1,]"; - const continuationError: Parser.ParseError.ExpectedCsvContinuationError = assertGetCsvContinuationError(text); + + const continuationError: ParseError.ExpectedCsvContinuationError = await assertGetCsvContinuationError(text); expect(continuationError.message).to.equal( Localization.error_parse_csvContinuation( Templates.DefaultTemplates, - Parser.ParseError.CsvContinuationKind.DanglingComma, + ParseError.CsvContinuationKind.DanglingComma, ), ); }); diff --git a/src/test/libraryTest/parser/idUtils.ts b/src/test/libraryTest/parser/idUtils.ts index 2a385585..9aa3fb28 100644 --- a/src/test/libraryTest/parser/idUtils.ts +++ b/src/test/libraryTest/parser/idUtils.ts @@ -206,7 +206,7 @@ function traverseVisitNode(state: TraverseState, xorNode: TXorNode): void { } describe("idUtils", () => { - it(`1`, () => { + it(`1`, async () => { const text: string = `1`; const expected: AbridgedNodeIdMapCollection = { @@ -218,11 +218,11 @@ describe("idUtils", () => { parentIdById: [], }; - const triedLexParse: Task.TriedLexParseTask = TaskUtils.tryLexParse(DefaultSettings, text); + const triedLexParse: Task.TriedLexParseTask = await TaskUtils.tryLexParse(DefaultSettings, text); expectLinksMatch(triedLexParse, expected); }); - it(`-1`, () => { + it(`-1`, async () => { const text: string = `-1`; const expected: AbridgedNodeIdMapCollection = { @@ -246,11 +246,11 @@ describe("idUtils", () => { ], }; - const triedLexParse: Task.TriedLexParseTask = TaskUtils.tryLexParse(DefaultSettings, text); + const triedLexParse: Task.TriedLexParseTask = await TaskUtils.tryLexParse(DefaultSettings, text); expectLinksMatch(triedLexParse, expected); }); - it(`1 + 2`, () => { + it(`1 + 2`, async () => { const text: string = `1 + 2`; const expected: AbridgedNodeIdMapCollection = { @@ -270,11 +270,11 @@ describe("idUtils", () => { ], }; - const triedLexParse: Task.TriedLexParseTask = TaskUtils.tryLexParse(DefaultSettings, text); + const triedLexParse: Task.TriedLexParseTask = await TaskUtils.tryLexParse(DefaultSettings, text); expectLinksMatch(triedLexParse, expected); }); - it(`foo()`, () => { + it(`foo()`, async () => { const text: string = `foo()`; const expected: AbridgedNodeIdMapCollection = { @@ -306,7 +306,7 @@ describe("idUtils", () => { ], }; - const triedLexParse: Task.TriedLexParseTask = TaskUtils.tryLexParse(DefaultSettings, text); + const triedLexParse: Task.TriedLexParseTask = await TaskUtils.tryLexParse(DefaultSettings, text); expectLinksMatch(triedLexParse, expected); }); }); diff --git a/src/test/libraryTest/parser/nodeIdMapUtils.ts b/src/test/libraryTest/parser/nodeIdMapUtils.ts index 0a2b7369..efab6c2f 100644 --- a/src/test/libraryTest/parser/nodeIdMapUtils.ts +++ b/src/test/libraryTest/parser/nodeIdMapUtils.ts @@ -20,9 +20,9 @@ import { TestAssertUtils } from "../../testUtils"; describe("nodeIdMapIterator", () => { describe(`iterRecord`, () => { - it(`normalize record key`, () => { + it(`normalize record key`, async () => { const text: string = `let key = [#"foo" = bar] in key`; - const parseOk: Task.ParseTaskOk = TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); + const parseOk: Task.ParseTaskOk = await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); const recordIds: Set = MapUtils.assertGet( parseOk.nodeIdMapCollection.idsByNodeKind, @@ -47,9 +47,9 @@ describe("nodeIdMapIterator", () => { }); describe(`iterFunctionExpressionParameters`, () => { - it(`ast`, () => { + it(`ast`, async () => { const text: string = `(x, y as number) => x + y`; - const parseOk: Task.ParseTaskOk = TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); + const parseOk: Task.ParseTaskOk = await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); const functionExpressionIds: Set = MapUtils.assertGet( parseOk.nodeIdMapCollection.idsByNodeKind, @@ -84,9 +84,9 @@ describe("nodeIdMapIterator", () => { expect(secondParameter.name.literal).to.equal("y"); }); - it(`context`, () => { + it(`context`, async () => { const text: string = `(x, y as number) => let`; - const parseError: ParseError.ParseError = TestAssertUtils.assertGetParseError(DefaultSettings, text); + const parseError: ParseError.ParseError = await TestAssertUtils.assertGetParseError(DefaultSettings, text); const functionExpressionIds: Set = MapUtils.assertGet( parseError.state.contextState.nodeIdMapCollection.idsByNodeKind, @@ -123,9 +123,9 @@ describe("nodeIdMapIterator", () => { }); describe(`iterFunctionExpressionParameterNameLiterals`, () => { - it(`ast`, () => { + it(`ast`, async () => { const text: string = `(x, y as number) => x + y`; - const parseOk: Task.ParseTaskOk = TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); + const parseOk: Task.ParseTaskOk = await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); const functionExpressionIds: Set = MapUtils.assertGet( parseOk.nodeIdMapCollection.idsByNodeKind, @@ -149,9 +149,9 @@ describe("nodeIdMapIterator", () => { expect(parameterNames).to.deep.equal(["x", "y"]); }); - it(`context`, () => { + it(`context`, async () => { const text: string = `(x, y as number) => let`; - const parseError: ParseError.ParseError = TestAssertUtils.assertGetParseError(DefaultSettings, text); + const parseError: ParseError.ParseError = await TestAssertUtils.assertGetParseError(DefaultSettings, text); const functionExpressionIds: Set = MapUtils.assertGet( parseError.state.contextState.nodeIdMapCollection.idsByNodeKind, @@ -177,9 +177,9 @@ describe("nodeIdMapIterator", () => { }); describe("maybeUnboxWrappedContent", () => { - it("Ast", () => { + it("Ast", async () => { const text: string = `[a = 1]`; - const parseOk: Task.ParseTaskOk = TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); + const parseOk: Task.ParseTaskOk = await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); const nodeIdMapCollection: NodeIdMap.Collection = parseOk.nodeIdMapCollection; const recordExpressionNodeIds: Set = Assert.asDefined( @@ -195,9 +195,14 @@ describe("nodeIdMapIterator", () => { ); }); - it("Context", () => { + it("Context", async () => { const text: string = `[a = 1][`; - const parseError: Parser.ParseError.ParseError = TestAssertUtils.assertGetParseError(DefaultSettings, text); + + const parseError: Parser.ParseError.ParseError = await TestAssertUtils.assertGetParseError( + DefaultSettings, + text, + ); + const nodeIdMapCollection: NodeIdMap.Collection = parseError.state.contextState.nodeIdMapCollection; const recordExpressionNodeIds: Set = Assert.asDefined( diff --git a/src/test/libraryTest/parser/simple.ts b/src/test/libraryTest/parser/simple.ts index d6694824..0d47c2ad 100644 --- a/src/test/libraryTest/parser/simple.ts +++ b/src/test/libraryTest/parser/simple.ts @@ -27,8 +27,8 @@ interface NthNodeOfKindState extends Traverse.ITraversalState { - const lexParseOk: Task.ParseTaskOk = TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); +async function collectAbridgeNodeFromAst(text: string): Promise> { + const lexParseOk: Task.ParseTaskOk = await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); const state: CollectAbridgeNodeState = { locale: DefaultLocale, @@ -53,12 +53,12 @@ function collectAbridgeNodeFromAst(text: string): ReadonlyArray { return triedTraverse.value; } -function assertGetNthNodeOfKind( +async function assertGetNthNodeOfKind( text: string, nodeKind: Language.Ast.NodeKind, nthRequired: number, -): N { - const parseTaskOk: Task.ParseTaskOk = TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); +): Promise { + const parseTaskOk: Task.ParseTaskOk = await TestAssertUtils.assertGetLexParseOk(DefaultSettings, text); const state: NthNodeOfKindState = { locale: DefaultLocale, @@ -104,21 +104,21 @@ function nthNodeEarlyExit(state: NthNodeOfKindState, _: Language.Ast.TNode): boo return state.nthCounter === state.nthRequired; } -function assertAbridgeNodes(text: string, expected: ReadonlyArray): void { - const actual: ReadonlyArray = collectAbridgeNodeFromAst(text); +async function assertAbridgeNodes(text: string, expected: ReadonlyArray): Promise { + const actual: ReadonlyArray = await collectAbridgeNodeFromAst(text); expect(actual).deep.equal(expected, JSON.stringify(actual)); } describe("Parser.AbridgedNode", () => { describe(`custom IParser.read`, () => { - it(`readParameterSpecificationList`, () => { + it(`readParameterSpecificationList`, async () => { const customSettings: Settings = { ...DefaultSettings, parser: Parser.RecursiveDescentParser, maybeParserEntryPointFn: Parser.RecursiveDescentParser.readParameterSpecificationList, }; - const triedLexParseTask: Task.TriedLexParseTask = TaskUtils.tryLexParse( + const triedLexParseTask: Task.TriedLexParseTask = await TaskUtils.tryLexParse( customSettings, "(a as number, optional b as text)", ); @@ -128,7 +128,7 @@ describe("Parser.AbridgedNode", () => { }); describe(`${Language.Ast.NodeKind.ArithmeticExpression}`, () => { - it(`1 & 2`, () => { + it(`1 & 2`, async () => { const text: string = `1 & 2`; const expected: ReadonlyArray = [ @@ -138,9 +138,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -149,7 +149,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.ArithmeticOperator.And); }); - it(`1 * 2`, () => { + it(`1 * 2`, async () => { const text: string = `1 * 2`; const expected: ReadonlyArray = [ @@ -159,9 +159,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -170,7 +170,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.ArithmeticOperator.Multiplication); }); - it(`1 / 2`, () => { + it(`1 / 2`, async () => { const text: string = `1 / 2`; const expected: ReadonlyArray = [ @@ -180,9 +180,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -191,7 +191,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.ArithmeticOperator.Division); }); - it(`1 + 2`, () => { + it(`1 + 2`, async () => { const text: string = `1 + 2`; const expected: ReadonlyArray = [ @@ -201,9 +201,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -212,7 +212,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.ArithmeticOperator.Addition); }); - it(`1 - 2`, () => { + it(`1 - 2`, async () => { const text: string = `1 - 2`; const expected: ReadonlyArray = [ @@ -222,9 +222,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -233,7 +233,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.ArithmeticOperator.Subtraction); }); - it(`1 + 2 + 3 + 4`, () => { + it(`1 + 2 + 3 + 4`, async () => { const text: string = `1 + 2 + 3 + 4`; const expected: ReadonlyArray = [ @@ -249,12 +249,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.AsExpression}`, () => { - it(`1 as number`, () => { + it(`1 as number`, async () => { const text: string = `1 as number`; const expected: ReadonlyArray = [ @@ -264,10 +264,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`type function (x as number) as number`, () => { + it(`type function (x as number) as number`, async () => { const text: string = `type function (x as number) as number`; const expected: ReadonlyArray = [ @@ -290,7 +290,7 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); @@ -298,7 +298,7 @@ describe("Parser.AbridgedNode", () => { // Ast.NodeKind.Csv covered by many - it(`${Language.Ast.NodeKind.EachExpression}`, () => { + it(`${Language.Ast.NodeKind.EachExpression}`, async () => { const text: string = `each 1`; const expected: ReadonlyArray = [ @@ -307,11 +307,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); describe(`${Language.Ast.NodeKind.EqualityExpression}`, () => { - it(`1 = 2`, () => { + it(`1 = 2`, async () => { const text: string = `1 = 2`; const expected: ReadonlyArray = [ @@ -321,9 +321,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -332,7 +332,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.EqualityOperator.EqualTo); }); - it(`1 <> 2`, () => { + it(`1 <> 2`, async () => { const text: string = `1 <> 2`; const expected: ReadonlyArray = [ @@ -342,9 +342,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -355,7 +355,7 @@ describe("Parser.AbridgedNode", () => { }); describe(`${Language.Ast.NodeKind.ErrorHandlingExpression}`, () => { - it(`try 1`, () => { + it(`try 1`, async () => { const text: string = `try 1`; const expected: ReadonlyArray = [ @@ -364,10 +364,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`try 1 otherwise 2`, () => { + it(`try 1 otherwise 2`, async () => { const text: string = `try 1 otherwise 2`; const expected: ReadonlyArray = [ @@ -379,11 +379,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); - it(`${Language.Ast.NodeKind.ErrorRaisingExpression}`, () => { + it(`${Language.Ast.NodeKind.ErrorRaisingExpression}`, async () => { const text: string = `error 1`; const expected: ReadonlyArray = [ @@ -392,11 +392,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); describe(`${Language.Ast.NodeKind.FieldProjection}`, () => { - it(`x[[y]]`, () => { + it(`x[[y]]`, async () => { const text: string = `x[[y]]`; const expected: ReadonlyArray = [ @@ -415,10 +415,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`x[[y], [z]]`, () => { + it(`x[[y], [z]]`, async () => { const text: string = `x[[y], [z]]`; const expected: ReadonlyArray = [ @@ -443,10 +443,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`x[[y]]?`, () => { + it(`x[[y]]?`, async () => { const text: string = `x[[y]]?`; const expected: ReadonlyArray = [ @@ -466,12 +466,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.FieldSelector}`, () => { - it(`[x]`, () => { + it(`[x]`, async () => { const text: string = `[x]`; const expected: ReadonlyArray = [ @@ -481,10 +481,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`[x]?`, () => { + it(`[x]?`, async () => { const text: string = `[x]?`; const expected: ReadonlyArray = [ @@ -495,12 +495,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.FieldSpecification}`, () => { - it(`type [x]`, () => { + it(`type [x]`, async () => { const text: string = `type [x]`; const expected: ReadonlyArray = [ @@ -516,10 +516,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`type [optional x]`, () => { + it(`type [optional x]`, async () => { const text: string = `type [optional x]`; const expected: ReadonlyArray = [ @@ -536,10 +536,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`type [x = number]`, () => { + it(`type [x = number]`, async () => { const text: string = `type [x = number]`; const expected: ReadonlyArray = [ @@ -558,12 +558,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.FieldSpecificationList}`, () => { - it(`${Language.Ast.NodeKind.FieldSpecificationList}`, () => { + it(`${Language.Ast.NodeKind.FieldSpecificationList}`, async () => { const text: string = `type [x]`; const expected: ReadonlyArray = [ @@ -579,10 +579,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`type [x, ...]`, () => { + it(`type [x, ...]`, async () => { const text: string = `type [x, ...]`; const expected: ReadonlyArray = [ @@ -600,14 +600,14 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); // Ast.NodeKind.FieldTypeSpecification covered by FieldSpecification describe(`${Language.Ast.NodeKind.FunctionExpression}`, () => { - it(`() => 1`, () => { + it(`() => 1`, async () => { const text: string = `() => 1`; const expected: ReadonlyArray = [ @@ -620,10 +620,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(x) => 1`, () => { + it(`(x) => 1`, async () => { const text: string = `(x) => 1`; const expected: ReadonlyArray = [ @@ -639,10 +639,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(x, y, z) => 1`, () => { + it(`(x, y, z) => 1`, async () => { const text: string = `(x, y, z) => 1`; const expected: ReadonlyArray = [ @@ -666,10 +666,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(optional x) => 1`, () => { + it(`(optional x) => 1`, async () => { const text: string = `(optional x) => 1`; const expected: ReadonlyArray = [ @@ -686,10 +686,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(x as nullable text) => 1`, () => { + it(`(x as nullable text) => 1`, async () => { const text: string = `(x as nullable text) => 1`; const expected: ReadonlyArray = [ @@ -710,10 +710,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(x) as number => x`, () => { + it(`(x) as number => x`, async () => { const text: string = `(x) as number => x`; const expected: ReadonlyArray = [ @@ -733,10 +733,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(x as number) as number => x`, () => { + it(`(x as number) as number => x`, async () => { const text: string = `(x as number) as number => x`; const expected: ReadonlyArray = [ @@ -759,10 +759,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(x as number) as nullable number => x`, () => { + it(`(x as number) as nullable number => x`, async () => { const text: string = `(x as number) as nullable number => x`; const expected: ReadonlyArray = [ @@ -787,10 +787,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`let Fn = () as nullable text => "asd" in Fn`, () => { + it(`let Fn = () as nullable text => "asd" in Fn`, async () => { const text: string = `let Fn = () as nullable text => "asd" in Fn`; const expected: ReadonlyArray = [ @@ -818,12 +818,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.FunctionType}`, () => { - it(`type function () as number`, () => { + it(`type function () as number`, async () => { const text: string = `type function () as number`; const expected: ReadonlyArray = [ @@ -840,10 +840,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`type function (x as number) as number`, () => { + it(`type function (x as number) as number`, async () => { const text: string = `type function (x as number) as number`; const expected: ReadonlyArray = [ @@ -866,14 +866,14 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); // Ast.NodeKind.FieldTypeSpecification covered by AsType describe(`${Language.Ast.NodeKind.GeneralizedIdentifier}`, () => { - it(`[foo bar]`, () => { + it(`[foo bar]`, async () => { const text: string = `[foo bar]`; const expected: ReadonlyArray = [ @@ -883,10 +883,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`[1]`, () => { + it(`[1]`, async () => { const text: string = `[1]`; const expected: ReadonlyArray = [ @@ -896,10 +896,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`[a.1]`, () => { + it(`[a.1]`, async () => { const text: string = `[a.1]`; const expected: ReadonlyArray = [ @@ -909,10 +909,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`[#"a""" = 1]`, () => { + it(`[#"a""" = 1]`, async () => { const text: string = `[#"a""" = 1]`; const expected: ReadonlyArray = [ @@ -927,11 +927,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); - it(`Ast.NodeKind.GeneralizedIdentifierPairedAnyLiteral`, () => { + it(`Ast.NodeKind.GeneralizedIdentifierPairedAnyLiteral`, async () => { const text: string = `[x=1] section;`; const expected: ReadonlyArray = [ @@ -950,10 +950,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.ArrayWrapper, 4], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`${Language.Ast.NodeKind.GeneralizedIdentifierPairedExpression}`, () => { + it(`${Language.Ast.NodeKind.GeneralizedIdentifierPairedExpression}`, async () => { const text: string = `[x=1]`; const expected: ReadonlyArray = [ @@ -968,13 +968,13 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); // Ast.NodeKind.Identifier covered by many describe(`${Language.Ast.NodeKind.IdentifierExpression}`, () => { - it(`@foo`, () => { + it(`@foo`, async () => { const text: string = `@foo`; const expected: ReadonlyArray = [ @@ -983,11 +983,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); - it(`${Language.Ast.NodeKind.IdentifierPairedExpression}`, () => { + it(`${Language.Ast.NodeKind.IdentifierPairedExpression}`, async () => { const text: string = `section; x = 1;`; const expected: ReadonlyArray = [ @@ -1003,10 +1003,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`${Language.Ast.NodeKind.IfExpression}`, () => { + it(`${Language.Ast.NodeKind.IfExpression}`, async () => { const text: string = `if x then x else x`; const expected: ReadonlyArray = [ @@ -1022,10 +1022,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`${Language.Ast.NodeKind.InvokeExpression}`, () => { + it(`${Language.Ast.NodeKind.InvokeExpression}`, async () => { const text: string = `foo()`; const expected: ReadonlyArray = [ @@ -1039,11 +1039,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); describe(`${Language.Ast.NodeKind.IsExpression}`, () => { - it(`1 is number`, () => { + it(`1 is number`, async () => { const text: string = `1 is number`; const expected: ReadonlyArray = [ @@ -1053,10 +1053,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`1 is number is number`, () => { + it(`1 is number is number`, async () => { const text: string = `1 is number is number`; const expected: ReadonlyArray = [ @@ -1069,11 +1069,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); - it(`${Language.Ast.NodeKind.ItemAccessExpression}`, () => { + it(`${Language.Ast.NodeKind.ItemAccessExpression}`, async () => { const text: string = `x{1}`; const expected: ReadonlyArray = [ @@ -1087,10 +1087,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`${Language.Ast.NodeKind.ItemAccessExpression} optional`, () => { + it(`${Language.Ast.NodeKind.ItemAccessExpression} optional`, async () => { const text: string = `x{1}?`; const expected: ReadonlyArray = [ @@ -1105,11 +1105,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); describe(`keywords`, () => { - it(`#sections`, () => { + it(`#sections`, async () => { const text: string = `#sections`; const expected: ReadonlyArray = [ @@ -1117,10 +1117,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`#shared`, () => { + it(`#shared`, async () => { const text: string = `#shared`; const expected: ReadonlyArray = [ @@ -1128,12 +1128,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.LetExpression}`, () => { - it(`let x = 1 in x`, () => { + it(`let x = 1 in x`, async () => { const text: string = `let x = 1 in x`; const expected: ReadonlyArray = [ @@ -1150,10 +1150,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`let x = 1 in try x`, () => { + it(`let x = 1 in try x`, async () => { const text: string = `let x = 1 in try x`; const expected: ReadonlyArray = [ @@ -1172,12 +1172,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.ListExpression}`, () => { - it(`{}`, () => { + it(`{}`, async () => { const text: string = `{}`; const expected: ReadonlyArray = [ @@ -1187,10 +1187,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`{1, 2}`, () => { + it(`{1, 2}`, async () => { const text: string = `{1, 2}`; const expected: ReadonlyArray = [ @@ -1205,10 +1205,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`{1..2}`, () => { + it(`{1..2}`, async () => { const text: string = `{1..2}`; const expected: ReadonlyArray = [ @@ -1223,10 +1223,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`{1..2, 3..4}`, () => { + it(`{1..2, 3..4}`, async () => { const text: string = `{1..2, 3..4}`; const expected: ReadonlyArray = [ @@ -1247,10 +1247,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`{1, 2..3}`, () => { + it(`{1, 2..3}`, async () => { const text: string = `{1, 2..3}`; const expected: ReadonlyArray = [ @@ -1268,10 +1268,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`{1..2, 3}`, () => { + it(`{1..2, 3}`, async () => { const text: string = `{1..2, 3}`; const expected: ReadonlyArray = [ @@ -1289,10 +1289,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`let x = 1, y = {x..2} in y`, () => { + it(`let x = 1, y = {x..2} in y`, async () => { const text: string = `let x = 1, y = {x..2} in y`; const expected: ReadonlyArray = [ @@ -1324,12 +1324,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.ListLiteral}`, () => { - it(`[foo = {1}] section;`, () => { + it(`[foo = {1}] section;`, async () => { const text: string = `[foo = {1}] section;`; const expected: ReadonlyArray = [ @@ -1353,10 +1353,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.ArrayWrapper, 4], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`[foo = {}] section;`, () => { + it(`[foo = {}] section;`, async () => { const text: string = `[foo = {}] section;`; const expected: ReadonlyArray = [ @@ -1378,11 +1378,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.ArrayWrapper, 4], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); - it(`${Language.Ast.NodeKind.ListType}`, () => { + it(`${Language.Ast.NodeKind.ListType}`, async () => { const text: string = `type {number}`; const expected: ReadonlyArray = [ @@ -1394,103 +1394,103 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); describe(`${Language.Ast.NodeKind.LiteralExpression}`, () => { - it(`true`, () => { + it(`true`, async () => { const text: string = `true`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`false`, () => { + it(`false`, async () => { const text: string = `false`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`1`, () => { + it(`1`, async () => { const text: string = `1`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`0x1`, () => { + it(`0x1`, async () => { const text: string = `0x1`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`0X1`, () => { + it(`0X1`, async () => { const text: string = `0X1`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`1.2`, () => { + it(`1.2`, async () => { const text: string = `1.2`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`.1`, () => { + it(`.1`, async () => { const text: string = ".1"; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`1e2`, () => { + it(`1e2`, async () => { const text: string = "1e2"; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`1e+2`, () => { + it(`1e+2`, async () => { const text: string = "1e+2"; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`1e-2`, () => { + it(`1e-2`, async () => { const text: string = "1e-2"; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`#nan`, () => { + it(`#nan`, async () => { const text: string = `#nan`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`#infinity`, () => { + it(`#infinity`, async () => { const text: string = `#infinity`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`""`, () => { + it(`""`, async () => { const text: string = `""`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`""""`, () => { + it(`""""`, async () => { const text: string = `""""`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`null`, () => { + it(`null`, async () => { const text: string = `null`; const expected: ReadonlyArray = [[Language.Ast.NodeKind.LiteralExpression, undefined]]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.LogicalExpression}`, () => { - it(`true and true`, () => { + it(`true and true`, async () => { const text: string = `true and true`; const expected: ReadonlyArray = [ @@ -1500,10 +1500,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`true or true`, () => { + it(`true or true`, async () => { const text: string = `true or true`; const expected: ReadonlyArray = [ @@ -1513,11 +1513,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); - it(`${Language.Ast.NodeKind.MetadataExpression}`, () => { + it(`${Language.Ast.NodeKind.MetadataExpression}`, async () => { const text: string = `1 meta 1`; const expected: ReadonlyArray = [ @@ -1527,10 +1527,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`${Language.Ast.NodeKind.NotImplementedExpression}`, () => { + it(`${Language.Ast.NodeKind.NotImplementedExpression}`, async () => { const text: string = `...`; const expected: ReadonlyArray = [ @@ -1538,10 +1538,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 0], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`${Language.Ast.NodeKind.NullablePrimitiveType}`, () => { + it(`${Language.Ast.NodeKind.NullablePrimitiveType}`, async () => { const text: string = `1 is nullable number`; const expected: ReadonlyArray = [ @@ -1553,10 +1553,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`${Language.Ast.NodeKind.NullableType}`, () => { + it(`${Language.Ast.NodeKind.NullableType}`, async () => { const text: string = `type nullable number`; const expected: ReadonlyArray = [ @@ -1567,11 +1567,11 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); describe(`${Language.Ast.NodeKind.NullCoalescingExpression}`, () => { - it(`1 ?? a`, () => { + it(`1 ?? a`, async () => { const text: string = `1 ?? a`; const expected: ReadonlyArray = [ @@ -1582,10 +1582,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Identifier, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`1 ?? 1 ?? 1`, () => { + it(`1 ?? 1 ?? 1`, async () => { const text: string = `1 ?? 1 ?? 1`; const expected: ReadonlyArray = [ @@ -1598,7 +1598,7 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); @@ -1609,7 +1609,7 @@ describe("Parser.AbridgedNode", () => { // Ast.NodeKind.ParameterList covered by many describe(`${Language.Ast.NodeKind.ParenthesizedExpression}`, () => { - it(`(1)`, () => { + it(`(1)`, async () => { const text: string = `(1)`; const expected: ReadonlyArray = [ @@ -1619,10 +1619,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(1) + 1`, () => { + it(`(1) + 1`, async () => { const text: string = `(1) + 1`; const expected: ReadonlyArray = [ @@ -1635,10 +1635,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`(if true then true else false) and true`, () => { + it(`(if true then true else false) and true`, async () => { const text: string = `(if true then true else false) and true`; const expected: ReadonlyArray = [ @@ -1657,10 +1657,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`((1)) and true`, () => { + it(`((1)) and true`, async () => { const text: string = `((1)) and true`; const expected: ReadonlyArray = [ @@ -1676,12 +1676,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.PrimitiveType}`, () => { - it(`1 as time`, () => { + it(`1 as time`, async () => { const text: string = `1 as time`; const expected: ReadonlyArray = [ @@ -1691,12 +1691,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.PrimitiveType, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.RecordExpression}`, () => { - it(`[x=1]`, () => { + it(`[x=1]`, async () => { const text: string = `[x=1]`; const expected: ReadonlyArray = [ @@ -1711,10 +1711,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`[]`, () => { + it(`[]`, async () => { const text: string = `[]`; const expected: ReadonlyArray = [ @@ -1724,14 +1724,14 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); // Ast.NodeKind.RecordLiteral covered by many describe(`${Language.Ast.NodeKind.RecordType}`, () => { - it(`type [x]`, () => { + it(`type [x]`, async () => { const text: string = `type [x]`; const expected: ReadonlyArray = [ @@ -1747,10 +1747,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`type [x, ...]`, () => { + it(`type [x, ...]`, async () => { const text: string = `type [x, ...]`; const expected: ReadonlyArray = [ @@ -1768,14 +1768,14 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); // Ast.NodeKind.RecursivePrimaryExpression covered by many describe(`${Language.Ast.NodeKind.RelationalExpression}`, () => { - it(`1 > 2`, () => { + it(`1 > 2`, async () => { const text: string = `1 > 2`; const expected: ReadonlyArray = [ @@ -1785,9 +1785,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -1796,7 +1796,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.RelationalOperator.GreaterThan); }); - it(`1 >= 2`, () => { + it(`1 >= 2`, async () => { const text: string = `1 >= 2`; const expected: ReadonlyArray = [ @@ -1806,9 +1806,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -1817,7 +1817,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.RelationalOperator.GreaterThanEqualTo); }); - it(`1 < 2`, () => { + it(`1 < 2`, async () => { const text: string = `1 < 2`; const expected: ReadonlyArray = [ @@ -1827,9 +1827,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -1838,7 +1838,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.RelationalOperator.LessThan); }); - it(`1 <= 2`, () => { + it(`1 <= 2`, async () => { const text: string = `1 <= 2`; const expected: ReadonlyArray = [ @@ -1848,9 +1848,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -1861,7 +1861,7 @@ describe("Parser.AbridgedNode", () => { }); describe(`${Language.Ast.NodeKind.Section}`, () => { - it(`section;`, () => { + it(`section;`, async () => { const text: string = `section;`; const expected: ReadonlyArray = [ @@ -1871,10 +1871,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.ArrayWrapper, 4], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`[] section;`, () => { + it(`[] section;`, async () => { const text: string = `[] section;`; const expected: ReadonlyArray = [ @@ -1888,10 +1888,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.ArrayWrapper, 4], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`section foo;`, () => { + it(`section foo;`, async () => { const text: string = `section foo;`; const expected: ReadonlyArray = [ @@ -1902,10 +1902,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.ArrayWrapper, 4], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`section; x = 1;`, () => { + it(`section; x = 1;`, async () => { const text: string = `section; x = 1;`; const expected: ReadonlyArray = [ @@ -1921,10 +1921,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`section; x = 1; y = 2;`, () => { + it(`section; x = 1; y = 2;`, async () => { const text: string = `section; x = 1; y = 2;`; const expected: ReadonlyArray = [ @@ -1946,12 +1946,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.SectionMember}`, () => { - it(`section; x = 1;`, () => { + it(`section; x = 1;`, async () => { const text: string = `section; x = 1;`; const expected: ReadonlyArray = [ @@ -1967,10 +1967,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`section; [] x = 1;`, () => { + it(`section; [] x = 1;`, async () => { const text: string = `section; [] x = 1;`; const expected: ReadonlyArray = [ @@ -1990,10 +1990,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`section; shared x = 1;`, () => { + it(`section; shared x = 1;`, async () => { const text: string = `section; shared x = 1;`; const expected: ReadonlyArray = [ @@ -2010,12 +2010,12 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 3], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); describe(`${Language.Ast.NodeKind.TableType}`, () => { - it(`type table [x]`, () => { + it(`type table [x]`, async () => { const text: string = `type table [x]`; const expected: ReadonlyArray = [ @@ -2032,10 +2032,10 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); - it(`type table (x)`, () => { + it(`type table (x)`, async () => { const text: string = `type table (x)`; const expected: ReadonlyArray = [ @@ -2050,14 +2050,14 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.Constant, 2], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); }); }); // Ast.NodeKind.TypePrimaryType covered by many describe(`${Language.Ast.NodeKind.UnaryExpression}`, () => { - it(`-1`, () => { + it(`-1`, async () => { const text: string = `-1`; const expected: ReadonlyArray = [ @@ -2067,9 +2067,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -2078,7 +2078,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.UnaryOperator.Negative); }); - it(`not 1`, () => { + it(`not 1`, async () => { const text: string = `not 1`; const expected: ReadonlyArray = [ @@ -2088,9 +2088,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, @@ -2099,7 +2099,7 @@ describe("Parser.AbridgedNode", () => { expect(operatorNode.constantKind).to.equal(Language.Constant.UnaryOperator.Not); }); - it(`+1`, () => { + it(`+1`, async () => { const text: string = `+1`; const expected: ReadonlyArray = [ @@ -2109,9 +2109,9 @@ describe("Parser.AbridgedNode", () => { [Language.Ast.NodeKind.LiteralExpression, 1], ]; - assertAbridgeNodes(text, expected); + await assertAbridgeNodes(text, expected); - const operatorNode: Language.Ast.TConstant = assertGetNthNodeOfKind( + const operatorNode: Language.Ast.TConstant = await assertGetNthNodeOfKind( text, Language.Ast.NodeKind.Constant, 1, diff --git a/src/test/resourceTest/benchmark/createBenchmarks.ts b/src/test/resourceTest/benchmark/createBenchmarks.ts index dece937d..bdbaa243 100644 --- a/src/test/resourceTest/benchmark/createBenchmarks.ts +++ b/src/test/resourceTest/benchmark/createBenchmarks.ts @@ -32,7 +32,7 @@ for (const filePath of TestFileUtils.getPowerQueryFilesRecursively(SourceFilesDi for (let iteration: number = 0; iteration < NumberOfRunsPerFile; iteration += 1) { const stream: fs.WriteStream = createOutputStream(filePath, iteration); - stream.on("open", () => { + stream.on("open", async () => { if (iteration % 10 === 0 || iteration === NumberOfRunsPerFile - 1) { console.log( `Running iteration ${iteration + 1} out of ${NumberOfRunsPerFile} for ${path.basename(filePath)}`, @@ -44,7 +44,7 @@ for (const filePath of TestFileUtils.getPowerQueryFilesRecursively(SourceFilesDi traceManager: new BenchmarkTraceManager((message: string) => stream.write(message)), }; - TestFileUtils.tryLexParse(benchmarkSettings, filePath); + await TestFileUtils.tryLexParse(benchmarkSettings, filePath); }); } } diff --git a/src/test/resourceTest/lexParse.ts b/src/test/resourceTest/lexParse.ts index 19ce33b6..05a48f03 100644 --- a/src/test/resourceTest/lexParse.ts +++ b/src/test/resourceTest/lexParse.ts @@ -35,8 +35,8 @@ function parseAllFiles(settings: Settings, parserName: string): void { for (const filePath of TestFileUtils.getPowerQueryFilesRecursively(fileDirectory)) { const testName: string = testNameFromFilePath(filePath); - it(testName, () => { - const triedLexParseTask: Task.TriedLexParseTask = TestFileUtils.tryLexParse(settings, filePath); + it(testName, async () => { + const triedLexParseTask: Task.TriedLexParseTask = await TestFileUtils.tryLexParse(settings, filePath); TaskUtils.assertIsParseStageOk(triedLexParseTask); }); } diff --git a/src/test/testUtils/assertUtils.ts b/src/test/testUtils/assertUtils.ts index f5b86dfc..df1854a4 100644 --- a/src/test/testUtils/assertUtils.ts +++ b/src/test/testUtils/assertUtils.ts @@ -6,15 +6,21 @@ import "mocha"; import { Assert, Lexer, LexSettings, Parser, ParseSettings, Task } from "../.."; import { TaskUtils } from "../../powerquery-parser"; -export function assertGetLexParseOk(settings: LexSettings & ParseSettings, text: string): Task.ParseTaskOk { - const triedLexParseTask: Task.TriedLexParseTask = TaskUtils.tryLexParse(settings, text); +export async function assertGetLexParseOk( + settings: LexSettings & ParseSettings, + text: string, +): Promise { + const triedLexParseTask: Task.TriedLexParseTask = await TaskUtils.tryLexParse(settings, text); TaskUtils.assertIsParseStageOk(triedLexParseTask); return triedLexParseTask; } -export function assertGetParseError(settings: LexSettings & ParseSettings, text: string): Parser.ParseError.ParseError { - const triedParse: Parser.TriedParse = assertGetTriedParse(settings, text); +export async function assertGetParseError( + settings: LexSettings & ParseSettings, + text: string, +): Promise { + const triedParse: Parser.TriedParse = await assertGetTriedParse(settings, text); Assert.isError(triedParse); if (!Parser.ParseError.isParseError(triedParse.error)) { @@ -24,8 +30,8 @@ export function assertGetParseError(settings: LexSettings & ParseSettings, text: return triedParse.error; } -export function assertGetParseOk(settings: LexSettings & ParseSettings, text: string): Parser.ParseOk { - const triedParse: Parser.TriedParse = assertGetTriedParse(settings, text); +export async function assertGetParseOk(settings: LexSettings & ParseSettings, text: string): Promise { + const triedParse: Parser.TriedParse = await assertGetTriedParse(settings, text); Assert.isOk(triedParse); return triedParse.value; @@ -33,7 +39,7 @@ export function assertGetParseOk(settings: LexSettings & ParseSettings, text: st // I only care about errors coming from the parse stage. // If I use tryLexParse I might get a CommonError which could have come either from lexing or parsing. -function assertGetTriedParse(settings: LexSettings & ParseSettings, text: string): Parser.TriedParse { +async function assertGetTriedParse(settings: LexSettings & ParseSettings, text: string): Promise { const triedLex: Lexer.TriedLex = Lexer.tryLex(settings, text); Assert.isOk(triedLex); const lexerState: Lexer.State = triedLex.value; @@ -43,5 +49,5 @@ function assertGetTriedParse(settings: LexSettings & ParseSettings, text: string Assert.isOk(triedSnapshot); const lexerSnapshot: Lexer.LexerSnapshot = triedSnapshot.value; - return Parser.ParserUtils.tryParse(settings, lexerSnapshot); + return await Parser.ParserUtils.tryParse(settings, lexerSnapshot); } diff --git a/src/test/testUtils/fileUtils.ts b/src/test/testUtils/fileUtils.ts index a02d4f0e..bfd4b91a 100644 --- a/src/test/testUtils/fileUtils.ts +++ b/src/test/testUtils/fileUtils.ts @@ -49,10 +49,13 @@ export function writeContents(filePath: string, contents: string): void { }); } -export function tryLexParse(settings: LexSettings & ParseSettings, filePath: string): Task.TriedLexParseTask { +export async function tryLexParse( + settings: LexSettings & ParseSettings, + filePath: string, +): Promise { const contents: string = readContents(filePath); - return TaskUtils.tryLexParse(settings, contents); + return await TaskUtils.tryLexParse(settings, contents); } function isDirectory(maybePath: string): boolean { diff --git a/tsconfig.json b/tsconfig.json index 7777e2ba..38dd1d41 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,7 +15,7 @@ "rootDir": "src", "sourceMap": true, "strict": true, - "target": "es5", + "target": "es6", "tsBuildInfoFile": "./tsconfig.tsbuildinfo" }, "exclude": ["node_modules"],