diff --git a/.gitignore b/.gitignore index 63621b0fd5..fab4586cb8 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,7 @@ node_modules test-results unit-test-results *.local +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ diff --git a/e2e-tests/fixtures/Dictionaries.ts b/e2e-tests/fixtures/Dictionaries.ts index 42765fe37f..b8f9148dc4 100644 --- a/e2e-tests/fixtures/Dictionaries.ts +++ b/e2e-tests/fixtures/Dictionaries.ts @@ -24,13 +24,15 @@ export class Dictionaries { this.updatePage(page); } - async createDictionary() { + async createDictionary(): Promise { await expect(this.tableRow).not.toBeVisible(); await this.fillInputFile(); await this.createButton.click(); await this.tableRow.waitFor({ state: 'attached' }); await this.tableRow.waitFor({ state: 'visible' }); await expect(this.tableRow).toBeVisible(); + + return this.dictionaryName; } /** diff --git a/e2e-tests/fixtures/ExpansionRules.ts b/e2e-tests/fixtures/ExpansionRules.ts index 84caf5bc21..247880e6ef 100644 --- a/e2e-tests/fixtures/ExpansionRules.ts +++ b/e2e-tests/fixtures/ExpansionRules.ts @@ -2,8 +2,8 @@ import { expect, type Locator, type Page } from '@playwright/test'; import { adjectives, animals, colors, uniqueNamesGenerator } from 'unique-names-generator'; import { fillEditorText } from '../utilities/editor.js'; import { getOptionValueFromText } from '../utilities/selectors.js'; -import { Dictionaries } from './Dictionaries.js'; import { Models } from './Models.js'; +import { Parcels } from './Parcels.js'; export class ExpansionRules { cancelButton: Locator; @@ -12,13 +12,13 @@ export class ExpansionRules { confirmModalDeleteButton: Locator; inputActivityType: Locator; inputActivityTypeSelector: string = 'select[name="activityType"]'; - inputCommandDictionary: Locator; - inputCommandDictionarySelector: string = 'select[name="commandDictionary"]'; inputEditor: Locator; inputModel: Locator; inputModelSelector: string = 'select[name="modelId"]'; inputName: Locator; inputNameSelector: string = 'input[name="name"]'; + inputParcel: Locator; + inputParcelSelector: string = 'select[name="parcel"]'; newButton: Locator; ruleActivityType = 'PeelBanana'; ruleLogic: string = `export default function({ activityInstance: ActivityType }): ExpansionReturn { return [C.FSW_CMD_0({ boolean_arg_0: true, enum_arg_0: "OFF", float_arg_0: 0.0 })]; }`; @@ -29,7 +29,7 @@ export class ExpansionRules { constructor( public page: Page, - public dictionaries: Dictionaries, + public parcels: Parcels, public models: Models, ) { this.updatePage(page); @@ -41,7 +41,7 @@ export class ExpansionRules { await this.newButton.click(); await this.page.waitForURL(`${baseURL}/expansion/rules/new`); await expect(this.saveButton).toBeDisabled(); - await this.selectCommandDictionary(); + await this.selectParcel(); await this.selectModel(); await this.selectActivityType(); await this.fillInputName(); @@ -103,15 +103,6 @@ export class ExpansionRules { await this.inputActivityType.evaluate(e => e.blur()); } - async selectCommandDictionary() { - const { dictionaryName } = this.dictionaries; - await this.page.waitForSelector(`option:has-text("${dictionaryName} - 1.0.0")`, { state: 'attached' }); - const value = await getOptionValueFromText(this.page, this.inputCommandDictionarySelector, dictionaryName); - await this.inputCommandDictionary.focus(); - await this.inputCommandDictionary.selectOption(value); - await this.inputCommandDictionary.evaluate(e => e.blur()); - } - async selectModel() { const { modelName } = this.models; await this.page.waitForSelector(`option:has-text("${modelName}")`, { state: 'attached' }); @@ -121,6 +112,15 @@ export class ExpansionRules { await this.inputModel.evaluate(e => e.blur()); } + async selectParcel() { + const { parcelName } = this.parcels; + await this.page.waitForSelector(`option:has-text("${parcelName}")`, { state: 'attached' }); + const value = await getOptionValueFromText(this.page, this.inputParcelSelector, parcelName); + await this.inputParcel.focus(); + await this.inputParcel.selectOption(value); + await this.inputParcel.evaluate(e => e.blur()); + } + updatePage(page: Page): void { this.cancelButton = page.locator(`button:has-text("Cancel")`); this.closeButton = page.locator(`button:has-text("Close")`); @@ -129,7 +129,7 @@ export class ExpansionRules { `.modal:has-text("Delete Expansion Rule") >> button:has-text("Delete")`, ); this.inputActivityType = page.locator(this.inputActivityTypeSelector); - this.inputCommandDictionary = page.locator(this.inputCommandDictionarySelector); + this.inputParcel = page.locator(this.inputParcelSelector); this.inputEditor = page.locator('.panel >> textarea.inputarea'); this.inputModel = page.locator(this.inputModelSelector); this.inputName = page.locator(this.inputNameSelector); diff --git a/e2e-tests/fixtures/ExpansionSets.ts b/e2e-tests/fixtures/ExpansionSets.ts index 7b376d47ab..c7477fe36a 100644 --- a/e2e-tests/fixtures/ExpansionSets.ts +++ b/e2e-tests/fixtures/ExpansionSets.ts @@ -1,17 +1,17 @@ import { expect, type Locator, type Page } from '@playwright/test'; import { adjectives, animals, colors, uniqueNamesGenerator } from 'unique-names-generator'; import { getOptionValueFromText } from '../utilities/selectors.js'; -import { Dictionaries } from './Dictionaries.js'; import { ExpansionRules } from './ExpansionRules.js'; import { Models } from './Models.js'; +import { Parcels } from './Parcels.js'; export class ExpansionSets { - inputCommandDictionary: Locator; - inputCommandDictionarySelector: string = 'select[name="commandDictionary"]'; inputModel: Locator; inputModelSelector: string = 'select[name="modelId"]'; inputName: Locator; inputNameSelector: string = 'input[name="name"]'; + inputParcel: Locator; + inputParcelSelector: string = 'select[name="Parcel"]'; inputRule: Locator; inputRuleSelector: string; newButton: Locator; @@ -20,7 +20,7 @@ export class ExpansionSets { constructor( public page: Page, - public dictionaries: Dictionaries, + public parcels: Parcels, public models: Models, public expansionRules: ExpansionRules, ) { @@ -34,7 +34,7 @@ export class ExpansionSets { await this.newButton.click(); await this.page.waitForURL(`${baseURL}/expansion/sets/new`); await expect(this.saveButton).toBeDisabled(); - await this.selectCommandDictionary(); + await this.selectParcel(); await this.selectModel(); await this.selectRule(); await this.fillInputName(); @@ -56,15 +56,6 @@ export class ExpansionSets { await expect(this.setsNavButton).toHaveClass(/selected/); } - async selectCommandDictionary() { - const { dictionaryName } = this.dictionaries; - await this.page.waitForSelector(`option:has-text("${dictionaryName} - 1.0.0")`, { state: 'attached' }); - const value = await getOptionValueFromText(this.page, this.inputCommandDictionarySelector, dictionaryName); - await this.inputCommandDictionary.focus(); - await this.inputCommandDictionary.selectOption(value); - await this.inputCommandDictionary.evaluate(e => e.blur()); - } - async selectModel() { const { modelName } = this.models; await this.page.waitForSelector(`option:has-text("${modelName}")`, { state: 'attached' }); @@ -74,15 +65,24 @@ export class ExpansionSets { await this.inputModel.evaluate(e => e.blur()); } + async selectParcel() { + const { parcelName } = this.parcels; + await this.page.waitForSelector(`option:has-text("${parcelName}")`, { state: 'attached' }); + const value = await getOptionValueFromText(this.page, this.inputParcelSelector, parcelName); + await this.inputParcel.focus(); + await this.inputParcel.selectOption(value); + await this.inputParcel.evaluate(e => e.blur()); + } + async selectRule() { await this.page.waitForSelector(this.inputRuleSelector, { state: 'attached' }); await this.inputRule.first().click(); } updatePage(page: Page): void { - this.inputCommandDictionary = page.locator(this.inputCommandDictionarySelector); this.inputModel = page.locator(this.inputModelSelector); this.inputName = page.locator(this.inputNameSelector); + this.inputParcel = page.locator(this.inputParcelSelector); this.inputRule = page.locator(this.inputRuleSelector); this.newButton = page.locator(`button:has-text("New")`); this.page = page; diff --git a/e2e-tests/fixtures/Parcels.ts b/e2e-tests/fixtures/Parcels.ts new file mode 100644 index 0000000000..2c2ef5053b --- /dev/null +++ b/e2e-tests/fixtures/Parcels.ts @@ -0,0 +1,74 @@ +import { Locator, Page, expect } from '@playwright/test'; +import { adjectives, animals, colors, uniqueNamesGenerator } from 'unique-names-generator'; + +export class Parcels { + closeButton: Locator; + confirmModal: Locator; + confirmModalDeleteButton: Locator; + createButton: Locator; + nameField: Locator; + newButton: Locator; + parcelName: string; + tableRow: Locator; + tableRowDeleteButton: Locator; + + constructor(public page: Page) { + this.parcelName = uniqueNamesGenerator({ dictionaries: [adjectives, colors, animals] }); + + this.updatePage(page); + } + + async createParcel(dictionaryName: string) { + await this.newButton.click(); + await this.page.getByText(dictionaryName).click(); + await this.updatePage(this.page); + await expect(this.tableRow).not.toBeVisible(); + await this.nameField.fill(this.parcelName); + await this.createButton.click(); + await this.closeButton.click(); + await this.tableRow.waitFor({ state: 'attached' }); + await this.tableRow.waitFor({ state: 'visible' }); + await expect(this.tableRow).toBeVisible(); + } + + async deleteParcel() { + await expect(this.tableRow).toBeVisible(); + await expect(this.tableRowDeleteButton).not.toBeVisible(); + + await this.tableRow.hover(); + await this.tableRowDeleteButton.waitFor({ state: 'attached' }); + await this.tableRowDeleteButton.waitFor({ state: 'visible' }); + await expect(this.tableRowDeleteButton).toBeVisible(); + + await expect(this.confirmModal).not.toBeVisible(); + await this.tableRowDeleteButton.click(); + await this.confirmModal.waitFor({ state: 'attached' }); + await this.confirmModal.waitFor({ state: 'visible' }); + await expect(this.confirmModal).toBeVisible(); + + await expect(this.confirmModalDeleteButton).toBeVisible(); + await this.confirmModalDeleteButton.click(); + await this.tableRow.waitFor({ state: 'detached' }); + await this.tableRow.waitFor({ state: 'hidden' }); + await expect(this.tableRow).not.toBeVisible(); + } + + async goto() { + await this.page.goto('/parcels', { waitUntil: 'networkidle' }); + await this.page.waitForTimeout(250); + } + + updatePage(page: Page): void { + this.closeButton = page.locator(`button:has-text("Close")`); + this.confirmModal = page.locator(`.modal:has-text("Delete Parcel")`); + this.confirmModalDeleteButton = page.locator(`.modal:has-text("Delete Parcel") >> button:has-text("Delete")`); + this.createButton = page.locator(`button:has-text("Save")`); + this.nameField = page.locator(`input[name="parcelName"]`); + this.newButton = page.locator(`button:has-text("New")`); + this.page = page; + this.tableRow = page.locator(`.ag-row:has-text("${this.parcelName}")`); + this.tableRowDeleteButton = page.locator( + `.ag-row:has-text("${this.parcelName}") >> button[aria-label="Delete Parcel"]`, + ); + } +} diff --git a/e2e-tests/tests/expansion.test.ts b/e2e-tests/tests/expansion.test.ts index c2d18c3446..064d0cd864 100644 --- a/e2e-tests/tests/expansion.test.ts +++ b/e2e-tests/tests/expansion.test.ts @@ -3,12 +3,14 @@ import { Dictionaries } from '../fixtures/Dictionaries.js'; import { ExpansionRules } from '../fixtures/ExpansionRules.js'; import { ExpansionSets } from '../fixtures/ExpansionSets.js'; import { Models } from '../fixtures/Models.js'; +import { Parcels } from '../fixtures/Parcels.js'; let context: BrowserContext; let dictionaries: Dictionaries; let expansionRules: ExpansionRules; let expansionSets: ExpansionSets; let models: Models; let page: Page; +let parcels: Parcels; test.beforeAll(async ({ browser }) => { context = await browser.newContext(); @@ -16,21 +18,23 @@ test.beforeAll(async ({ browser }) => { models = new Models(page); dictionaries = new Dictionaries(page); - expansionRules = new ExpansionRules(page, dictionaries, models); - expansionSets = new ExpansionSets(page, dictionaries, models, expansionRules); + parcels = new Parcels(page); + expansionRules = new ExpansionRules(page, parcels, models); + expansionSets = new ExpansionSets(page, parcels, models, expansionRules); await models.goto(); await models.createModel(); await dictionaries.goto(); - await dictionaries.createDictionary(); + const dictionaryName = await dictionaries.createDictionary(); + await parcels.goto(); + await parcels.createParcel(dictionaryName); await expansionRules.goto(); }); test.afterAll(async () => { await models.goto(); await models.deleteModel(); - await dictionaries.goto(); - await dictionaries.deleteDictionary(); + await parcels.goto(); await page.close(); await context.close(); }); diff --git a/e2e-tests/tests/parcels.test.ts b/e2e-tests/tests/parcels.test.ts new file mode 100644 index 0000000000..5440661990 --- /dev/null +++ b/e2e-tests/tests/parcels.test.ts @@ -0,0 +1,34 @@ +import test, { type BrowserContext, type Page } from '@playwright/test'; +import { Dictionaries } from '../fixtures/Dictionaries.js'; +import { Parcels } from '../fixtures/Parcels.js'; +let context: BrowserContext; +let dictionaries: Dictionaries; +let dictionaryName: string; +let parcels: Parcels; +let page: Page; + +test.beforeAll(async ({ browser }) => { + context = await browser.newContext(); + page = await context.newPage(); + dictionaries = new Dictionaries(page); + parcels = new Parcels(page); + + await dictionaries.goto(); + dictionaryName = await dictionaries.createDictionary(); + await parcels.goto(); +}); + +test.afterAll(async () => { + await page.close(); + await context.close(); +}); + +test.describe.serial('Parcels', () => { + test('Create parcel', async () => { + await parcels.createParcel(dictionaryName); + }); + + test('Delete parcel', async () => { + await parcels.deleteParcel(); + }); +}); diff --git a/package-lock.json b/package-lock.json index 9e69527f9a..ac297cd005 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,8 @@ "license": "MIT", "dependencies": { "@fontsource/jetbrains-mono": "^5.0.19", - "@nasa-jpl/aerie-ampcs": "^1.0.1", + "@nasa-jpl/aerie-ampcs": "^1.0.5", + "@nasa-jpl/seq-json-schema": "^1.0.20", "@nasa-jpl/stellar": "^1.1.18", "@sveltejs/adapter-node": "5.0.1", "@sveltejs/kit": "^2.5.4", @@ -29,7 +30,9 @@ "d3-selection": "^3.0.0", "d3-shape": "^3.2.0", "d3-time": "^3.1.0", + "fastest-levenshtein": "^1.0.16", "graphql-ws": "^5.14.0", + "json-source-map": "^0.6.1", "jszip": "^3.10.1", "jwt-decode": "^4.0.0", "lodash-es": "^4.17.21", @@ -45,7 +48,14 @@ "toastify-js": "^1.12.0" }, "devDependencies": { - "@playwright/test": "^1.42.1", + "@codemirror/autocomplete": "^6.16.0", + "@codemirror/lang-json": "^6.0.1", + "@codemirror/language": "^6.10.1", + "@codemirror/lint": "^6.5.0", + "@lezer/generator": "^1.7.0", + "@lezer/highlight": "^1.2.0", + "@lezer/lr": "^1.4.0", + "@playwright/test": "^1.44.0", "@poppanator/sveltekit-svg": "^4.2.1", "@sveltejs/vite-plugin-svelte": "^3.0.0", "@testing-library/svelte": "^4.0.2", @@ -72,6 +82,7 @@ "@vitest/coverage-v8": "^1.4.0", "@vitest/ui": "^1.4.0", "cloc": "2.0.0-cloc", + "codemirror": "^6.0.1", "d3-format": "^3.1.0", "d3-zoom": "^3.0.0", "esbuild": "^0.20.2", @@ -282,6 +293,99 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, + "node_modules/@codemirror/autocomplete": { + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.16.0.tgz", + "integrity": "sha512-P/LeCTtZHRTCU4xQsa89vSKWecYv1ZqwzOd5topheGRf+qtacFgBeIMQi3eL8Kt/BUNvxUWkx+5qP2jlGoARrg==", + "dev": true, + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0" + }, + "peerDependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@codemirror/commands": { + "version": "6.3.3", + "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.3.3.tgz", + "integrity": "sha512-dO4hcF0fGT9tu1Pj1D2PvGvxjeGkbC6RGcZw6Qs74TH+Ed1gw98jmUgd2axWvIZEqTeTuFrg1lEB1KV6cK9h1A==", + "dev": true, + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.4.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.1.0" + } + }, + "node_modules/@codemirror/lang-json": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.1.tgz", + "integrity": "sha512-+T1flHdgpqDDlJZ2Lkil/rLiRy684WMLc74xUnjJH48GQdfJo/pudlTRreZmKwzP8/tGdKf83wlbAdOCzlJOGQ==", + "dev": true, + "dependencies": { + "@codemirror/language": "^6.0.0", + "@lezer/json": "^1.0.0" + } + }, + "node_modules/@codemirror/language": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.10.1.tgz", + "integrity": "sha512-5GrXzrhq6k+gL5fjkAwt90nYDmjlzTIJV8THnxNFtNKWotMIlzzN+CpqxqwXOECnUdOndmSeWntVrVcv5axWRQ==", + "dev": true, + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.23.0", + "@lezer/common": "^1.1.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/lint": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.5.0.tgz", + "integrity": "sha512-+5YyicIaaAZKU8K43IQi8TBy6mF6giGeWAH7N96Z5LC30Wm5JMjqxOYIE9mxwMG1NbhT2mA3l9hA4uuKUM3E5g==", + "dev": true, + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/search": { + "version": "6.5.6", + "resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.6.tgz", + "integrity": "sha512-rpMgcsh7o0GuCDUXKPvww+muLA1pDJaFrpq/CCHtpQJYz8xopu4D1hPcKRoDD0YlF8gZaqTNIRa4VRBWyhyy7Q==", + "dev": true, + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/state": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", + "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==", + "dev": true + }, + "node_modules/@codemirror/view": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.0.tgz", + "integrity": "sha512-nSSmzONpqsNzshPOxiKhK203R6BvABepugAe34QfQDbNDslyjkqBuKgrK5ZBvqNXpfxz5iLrlGTmEfhbQyH46A==", + "dev": true, + "dependencies": { + "@codemirror/state": "^6.4.0", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, "node_modules/@csstools/css-parser-algorithms": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-2.6.1.tgz", @@ -970,14 +1074,67 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@nasa-jpl/aerie-ampcs": { + "node_modules/@lezer/common": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", + "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==", + "dev": true + }, + "node_modules/@lezer/generator": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/@lezer/generator/-/generator-1.7.0.tgz", + "integrity": "sha512-IJ16tx3biLKlCXUzcK4v8S10AVa2BSM2rB12rtAL6f1hL2TS/HQQlGCoWRvanlL2J4mCYEEIv9uG7n4kVMkVDA==", + "dev": true, + "dependencies": { + "@lezer/common": "^1.1.0", + "@lezer/lr": "^1.3.0" + }, + "bin": { + "lezer-generator": "src/lezer-generator.cjs" + } + }, + "node_modules/@lezer/highlight": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.0.tgz", + "integrity": "sha512-WrS5Mw51sGrpqjlh3d4/fOwpEV2Hd3YOkp9DBt4k8XZQcoTHZFB7sx030A6OcahF4J1nDQAa3jXlTVVYH50IFA==", + "dev": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/json": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@nasa-jpl/aerie-ampcs/-/aerie-ampcs-1.0.2.tgz", - "integrity": "sha512-71xu4EbMII085RlwEs6oi+baqp5EQDTUhTLq3eJ62Pl68aimf2kTIqn2FmrVySHTr1ag4PTY8QAvWnQIyfpr1A==", + "resolved": "https://registry.npmjs.org/@lezer/json/-/json-1.0.2.tgz", + "integrity": "sha512-xHT2P4S5eeCYECyKNPhr4cbEL9tc8w83SPwRC373o9uEdrvGKTZoJVAGxpOsZckMlEh9W23Pc72ew918RWQOBQ==", + "dev": true, + "dependencies": { + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.0.tgz", + "integrity": "sha512-Wst46p51km8gH0ZUmeNrtpRYmdlRHUpN1DQd3GFAyKANi8WVz8c2jHYTf1CVScFaCjQw1iO3ZZdqGDxQPRErTg==", + "dev": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@nasa-jpl/aerie-ampcs": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@nasa-jpl/aerie-ampcs/-/aerie-ampcs-1.0.5.tgz", + "integrity": "sha512-IBz57zw2HlTVJcPXPLq56OzMJ7LAK4uWPgrC5yh9+x4UM7WS46cW9AbdbiYWpIOMh8zW+/UWju7QqszowIKTTQ==", "dependencies": { "xml-js": "^1.6.11" } }, + "node_modules/@nasa-jpl/seq-json-schema": { + "version": "1.0.20", + "resolved": "https://registry.npmjs.org/@nasa-jpl/seq-json-schema/-/seq-json-schema-1.0.20.tgz", + "integrity": "sha512-fEIxZ7xlV8y+ybCN5yd2bhgEXLx4gbysa5W6KfXSBq9hY2gpYlYJ1PTNr0JA+N7KRK36Wh1lQnFtYpHfI/Owxw==" + }, "node_modules/@nasa-jpl/stellar": { "version": "1.1.18", "resolved": "https://registry.npmjs.org/@nasa-jpl/stellar/-/stellar-1.1.18.tgz", @@ -1019,12 +1176,12 @@ } }, "node_modules/@playwright/test": { - "version": "1.42.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz", - "integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==", + "version": "1.44.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.0.tgz", + "integrity": "sha512-rNX5lbNidamSUorBhB4XZ9SQTjAqfe5M+p37Z8ic0jPFBMo5iCtQz1kRWkEMg+rYOKSlVycpQmpqjSFq7LXOfg==", "dev": true, "dependencies": { - "playwright": "1.42.1" + "playwright": "1.44.0" }, "bin": { "playwright": "cli.js" @@ -2531,6 +2688,21 @@ "@types/estree": "^1.0.0" } }, + "node_modules/codemirror": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.1.tgz", + "integrity": "sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==", + "dev": true, + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/commands": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/search": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -2632,6 +2804,12 @@ } } }, + "node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", + "dev": true + }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -3682,7 +3860,6 @@ "version": "1.0.16", "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz", "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==", - "dev": true, "engines": { "node": ">= 4.9.1" } @@ -4782,6 +4959,11 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" }, + "node_modules/json-source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/json-source-map/-/json-source-map-0.6.1.tgz", + "integrity": "sha512-1QoztHPsMQqhDq0hlXY5ZqcEdUzxQEIxgFkKl4WUp2pgShObl+9ovi4kRh2TfvAfxAoHOJ9vIMEqk3k4iex7tg==" + }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", @@ -5543,12 +5725,12 @@ } }, "node_modules/playwright": { - "version": "1.42.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz", - "integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==", + "version": "1.44.0", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.0.tgz", + "integrity": "sha512-F9b3GUCLQ3Nffrfb6dunPOkE5Mh68tR7zN32L4jCk4FjQamgesGay7/dAAe1WaMEGV04DkdJfcJzjoCKygUaRQ==", "dev": true, "dependencies": { - "playwright-core": "1.42.1" + "playwright-core": "1.44.0" }, "bin": { "playwright": "cli.js" @@ -5561,9 +5743,9 @@ } }, "node_modules/playwright-core": { - "version": "1.42.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz", - "integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==", + "version": "1.44.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.0.tgz", + "integrity": "sha512-ZTbkNpFfYcGWohvTTl+xewITm7EOuqIqex0c7dNZ+aXsbrLj0qI8XlGKfPpipjm0Wny/4Lt4CJsWJk1stVS5qQ==", "dev": true, "bin": { "playwright-core": "cli.js" @@ -6479,6 +6661,12 @@ "integrity": "sha512-UfJMcSJc+SEXEl9lH/VLHSZbThQyLpw1vLO1Lb+j4RWDvG3N2f7yj3PVQA3cmkTBNldJ9eFnM+xEXxHIXrYiJw==", "dev": true }, + "node_modules/style-mod": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", + "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", + "dev": true + }, "node_modules/stylelint": { "version": "16.3.1", "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-16.3.1.tgz", @@ -7785,6 +7973,12 @@ } } }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "dev": true + }, "node_modules/w3c-xmlserializer": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", diff --git a/package.json b/package.json index 41b6392e71..26c9e568b1 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,8 @@ }, "dependencies": { "@fontsource/jetbrains-mono": "^5.0.19", - "@nasa-jpl/aerie-ampcs": "^1.0.1", + "@nasa-jpl/aerie-ampcs": "^1.0.5", + "@nasa-jpl/seq-json-schema": "^1.0.20", "@nasa-jpl/stellar": "^1.1.18", "@sveltejs/adapter-node": "5.0.1", "@sveltejs/kit": "^2.5.4", @@ -58,7 +59,9 @@ "d3-selection": "^3.0.0", "d3-shape": "^3.2.0", "d3-time": "^3.1.0", + "fastest-levenshtein": "^1.0.16", "graphql-ws": "^5.14.0", + "json-source-map": "^0.6.1", "jszip": "^3.10.1", "jwt-decode": "^4.0.0", "lodash-es": "^4.17.21", @@ -74,7 +77,14 @@ "toastify-js": "^1.12.0" }, "devDependencies": { - "@playwright/test": "^1.42.1", + "@codemirror/autocomplete": "^6.16.0", + "@codemirror/lang-json": "^6.0.1", + "@codemirror/language": "^6.10.1", + "@codemirror/lint": "^6.5.0", + "@lezer/generator": "^1.7.0", + "@lezer/highlight": "^1.2.0", + "@lezer/lr": "^1.4.0", + "@playwright/test": "^1.44.0", "@poppanator/sveltekit-svg": "^4.2.1", "@sveltejs/vite-plugin-svelte": "^3.0.0", "@testing-library/svelte": "^4.0.2", @@ -101,6 +111,7 @@ "@vitest/coverage-v8": "^1.4.0", "@vitest/ui": "^1.4.0", "cloc": "2.0.0-cloc", + "codemirror": "^6.0.1", "d3-format": "^3.1.0", "d3-zoom": "^3.0.0", "esbuild": "^0.20.2", diff --git a/src/app.d.ts b/src/app.d.ts index e9ff2c1a14..cfcca1c2d8 100644 --- a/src/app.d.ts +++ b/src/app.d.ts @@ -1,4 +1,9 @@ +/* eslint-disable no-var */ /* eslint @typescript-eslint/no-unused-vars: 0 */ +import type { ParameterDictionary } from '@nasa-jpl/aerie-ampcs'; +import type { SeqJson } from '@nasa-jpl/seq-json-schema/types'; +import type { GlobalType } from './types/global-type'; +import type { ArgDelegator } from './utilities/new-sequence-editor/extension-points'; declare global { namespace App { @@ -43,6 +48,29 @@ declare global { const content: string; export default content; } + + var CONDITIONAL_KEYWORDS: { ELSE: string; ELSE_IF?: string[]; END_IF: string; IF: string[] } | undefined; + var LOOP_KEYWORDS: + | { + BREAK: string; + CONTINUE: string; + END_WHILE_LOOP: string; + WHILE_LOOP: string[]; + } + | undefined; + var GLOBALS: GlobalType[] | undefined; + var ARG_DELEGATOR: ArgDelegator | undefined; + function LINT(commandDictionary, view, node); + function TO_SEQ_JSON( + seqJson: SeqJson, + parameterDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, + ); + function FROM_SEQ_JSON( + seqJson: SeqJson, + parameterDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, + ); } export {}; diff --git a/src/components/expansion/ExpansionLogicEditor.svelte b/src/components/expansion/ExpansionLogicEditor.svelte index 51908586be..6feace209b 100644 --- a/src/components/expansion/ExpansionLogicEditor.svelte +++ b/src/components/expansion/ExpansionLogicEditor.svelte @@ -3,6 +3,7 @@ + + + + {displayTextPlural} + + + + {#if dictionaries.length} + + {:else} + No {displayTextPlural} Found + {/if} + + diff --git a/src/components/parcels/ParcelForm.svelte b/src/components/parcels/ParcelForm.svelte new file mode 100644 index 0000000000..227c68d3bb --- /dev/null +++ b/src/components/parcels/ParcelForm.svelte @@ -0,0 +1,318 @@ + + + + + + + + + + {mode === 'create' ? 'New Parcel' : 'Edit Parcel'} + +
+ + +
+
+ + + {#if mode === 'edit'} +
+ + +
+ +
+ + +
+ {/if} + +
+ + +
+
+
+ +
+ + + + + + + +
+
+ + diff --git a/src/components/parcels/Parcels.svelte b/src/components/parcels/Parcels.svelte new file mode 100644 index 0000000000..2cf0fc8471 --- /dev/null +++ b/src/components/parcels/Parcels.svelte @@ -0,0 +1,180 @@ + + + + + + + + + Parcels + +
+ +
+
+ + + {#if filteredParcels.length} + + {:else} +
No Parcels Found
+ {/if} +
+
diff --git a/src/components/sequencing/ArgumentTooltip.svelte b/src/components/sequencing/ArgumentTooltip.svelte new file mode 100644 index 0000000000..9f7ffce1e2 --- /dev/null +++ b/src/components/sequencing/ArgumentTooltip.svelte @@ -0,0 +1,67 @@ + + + + +
+
+ Name: {arg.name} +
+ Type: {arg.arg_type} +
+ Description: {arg.description} + + {#if arg.arg_type === 'boolean' || arg.arg_type === 'enum' || arg.arg_type === 'float' || arg.arg_type === 'integer' || arg.arg_type === 'numeric' || arg.arg_type === 'time' || arg.arg_type === 'unsigned' || arg.arg_type === 'var_string'} +
+ Default Value: {arg.default_value ?? 'None'} + {/if} + + {#if arg.arg_type === 'float' || arg.arg_type === 'integer' || arg.arg_type === 'numeric' || arg.arg_type === 'unsigned'} +
+ Range: {arg.range ? `[${arg.range.min}, ${arg.range.max}]` : 'None'} + {/if} + + {#if arg.arg_type === 'float' || arg.arg_type === 'integer' || arg.arg_type === 'numeric' || arg.arg_type === 'time' || arg.arg_type === 'unsigned'} +
+ Units: {arg.units === 'none' ? 'None' : arg.units} + {/if} + + {#if arg.arg_type === 'boolean' || arg.arg_type === 'enum' || arg.arg_type === 'float' || arg.arg_type === 'integer' || arg.arg_type === 'numeric' || arg.arg_type === 'time' || arg.arg_type === 'unsigned'} +
+ Bit Length: {arg.bit_length ?? 'None'} + {/if} + + {#if arg.arg_type === 'repeat' || arg.arg_type === 'var_string'} +
+ Prefix Bit Length: {arg.prefix_bit_length ?? 'None'} + {/if} + + {#if arg.arg_type === 'enum'} +
+ Enum Name: {arg.enum_name} +
+ Enum Symbols: {enumSymbolsDisplayStr} + {/if} +
+
+
+ + diff --git a/src/components/sequencing/CommandTooltip.svelte b/src/components/sequencing/CommandTooltip.svelte new file mode 100644 index 0000000000..3a2fb01d12 --- /dev/null +++ b/src/components/sequencing/CommandTooltip.svelte @@ -0,0 +1,42 @@ + + + + +
+
+ {commandExample} +
+ +
+ +
+ {command.description} +
+
+ + diff --git a/src/components/sequencing/SequenceEditor.svelte b/src/components/sequencing/SequenceEditor.svelte index 22864ef083..a855a843bf 100644 --- a/src/components/sequencing/SequenceEditor.svelte +++ b/src/components/sequencing/SequenceEditor.svelte @@ -1,147 +1,340 @@ - - - - {title} - -
- -
-
- - - - -
- - - - - - Seq JSON (Read-only) - -
- {#if !disableSeqJSONGeneration} - - {/if} - -
-
- - - -
+ + + + + {title} + +
+ + + + +
+
+ + +
+ + + + + + + + Seq JSON (Read-only) + +
+ +
+
+ + +
+ + + + + + + {#if !!commandDictionary && !!selectedNode && showCommandFormBuilder} + + {/if} + + diff --git a/src/components/sequencing/SequenceForm.svelte b/src/components/sequencing/SequenceForm.svelte index 402177f3fe..a6e4dd4a4a 100644 --- a/src/components/sequencing/SequenceForm.svelte +++ b/src/components/sequencing/SequenceForm.svelte @@ -3,14 +3,23 @@ @@ -205,6 +237,11 @@ +
+ + Filter to my sequences +
+ {#if filteredSequences.length} + + diff --git a/src/components/sequencing/form/AddMissingArgsButton.svelte b/src/components/sequencing/form/AddMissingArgsButton.svelte new file mode 100644 index 0000000000..da3f23ef1e --- /dev/null +++ b/src/components/sequencing/form/AddMissingArgsButton.svelte @@ -0,0 +1,9 @@ + + + + +
+ +
diff --git a/src/components/sequencing/form/ArgEditor.svelte b/src/components/sequencing/form/ArgEditor.svelte new file mode 100644 index 0000000000..886dd0a259 --- /dev/null +++ b/src/components/sequencing/form/ArgEditor.svelte @@ -0,0 +1,123 @@ + + + + +{#if !argInfo.argDef} + {#if argInfo.text} +
Unknown Argument
+ { + if (argInfo.node) { + setInEditor(argInfo.node, ''); + } + }} + /> + {/if} +{:else} + + {#if argInfo.argDef.arg_type === 'enum' && argInfo.node} + {#if argInfo.node?.name === 'String'} + { + if (argInfo.node) { + setInEditor(argInfo.node, val); + } + }} + /> + {:else} + + {/if} + {:else if isNumberArg(argInfo.argDef) && argInfo.node?.name === 'Number'} + { + if (argInfo.node) { + setInEditor(argInfo.node, val); + } + }} + /> + {:else if isFswCommandArgumentVarString(argInfo.argDef)} + { + if (argInfo.node) { + setInEditor(argInfo.node, val); + } + }} + /> + {:else if isFswCommandArgumentRepeat(argInfo.argDef) && !!argInfo.children} + {#each argInfo.children as childArgInfo} + {#if childArgInfo.node} + + {/if} + {/each} + {#if argInfo.children.find(childArgInfo => !childArgInfo.node)} + { + if (argInfo.node && argInfo.children) { + addDefaultArgs(argInfo.node, getMissingArgDefs(argInfo.children)); + } + }} + /> + {:else if !!argInfo.argDef.repeat} +
+ +
+ {/if} + {:else} +
Unexpected value for definition
+ {/if} +{/if} diff --git a/src/components/sequencing/form/ArgTitle.svelte b/src/components/sequencing/form/ArgTitle.svelte new file mode 100644 index 0000000000..49673bea98 --- /dev/null +++ b/src/components/sequencing/form/ArgTitle.svelte @@ -0,0 +1,26 @@ + + + + +
+ {getArgTitle(argDef)} + {argDef.description} +
diff --git a/src/components/sequencing/form/EnumEditor.svelte b/src/components/sequencing/form/EnumEditor.svelte new file mode 100644 index 0000000000..06f3136f2a --- /dev/null +++ b/src/components/sequencing/form/EnumEditor.svelte @@ -0,0 +1,67 @@ + + + + +
+ {#if enumValues.length > SEARCH_THRESHOLD} + + {:else} + + {/if} +
+ + diff --git a/src/components/sequencing/form/ExtraArgumentEditor.svelte b/src/components/sequencing/form/ExtraArgumentEditor.svelte new file mode 100644 index 0000000000..4e31d8a183 --- /dev/null +++ b/src/components/sequencing/form/ExtraArgumentEditor.svelte @@ -0,0 +1,12 @@ + + + + +
+ + {initVal} +
diff --git a/src/components/sequencing/form/NumEditor.svelte b/src/components/sequencing/form/NumEditor.svelte new file mode 100644 index 0000000000..6ce49109c0 --- /dev/null +++ b/src/components/sequencing/form/NumEditor.svelte @@ -0,0 +1,66 @@ + + + + +
+ + + + {#if typeof min === 'number' && typeof max === 'number' && min === max && valFloat !== max} + + {/if} +
+ + diff --git a/src/components/sequencing/form/SelectedCommand.svelte b/src/components/sequencing/form/SelectedCommand.svelte new file mode 100644 index 0000000000..1454a23298 --- /dev/null +++ b/src/components/sequencing/form/SelectedCommand.svelte @@ -0,0 +1,232 @@ + + + + +
+ {#if !!commandNode} +
Selected Command
+ {#if !!commandDef} + {#if !!timeTagNode} +
Time Tag: {timeTagNode.text.trim()}
+ {/if} +
+
+ {commandDef.stem} + {commandDef.description} +
+
+
+
+ {#each editorArgInfoArray as argInfo} + + addDefaultArgs(commandDictionary, editorSequenceView, commandNode, missingArgDefArray)} + /> + {/each} + {#if missingArgDefArray.length} + { + if (commandNode) { + addDefaultArgs(commandDictionary, editorSequenceView, commandNode, missingArgDefArray); + } + }} + /> + {/if} +
+ {/if} + {/if} +
+ + diff --git a/src/components/sequencing/form/StringEditor.svelte b/src/components/sequencing/form/StringEditor.svelte new file mode 100644 index 0000000000..8d3d33cdb6 --- /dev/null +++ b/src/components/sequencing/form/StringEditor.svelte @@ -0,0 +1,37 @@ + + + + +
+ +
+ + diff --git a/src/components/sequencing/form/TimeTagEditor.svelte b/src/components/sequencing/form/TimeTagEditor.svelte new file mode 100644 index 0000000000..7f4208d081 --- /dev/null +++ b/src/components/sequencing/form/TimeTagEditor.svelte @@ -0,0 +1,31 @@ + + + + +
Time Tag:
diff --git a/src/components/sequencing/form/utils.ts b/src/components/sequencing/form/utils.ts new file mode 100644 index 0000000000..debda51e19 --- /dev/null +++ b/src/components/sequencing/form/utils.ts @@ -0,0 +1,127 @@ +import type { SyntaxNode } from '@lezer/common'; +import type { + CommandDictionary, + FswCommandArgument, + FswCommandArgumentEnum, + FswCommandArgumentFixedString, + FswCommandArgumentFloat, + FswCommandArgumentInteger, + FswCommandArgumentNumeric, + FswCommandArgumentRepeat, + FswCommandArgumentUnsigned, + FswCommandArgumentVarString, +} from '@nasa-jpl/aerie-ampcs'; +import type { EditorView } from 'codemirror'; +import { fswCommandArgDefault } from '../../../utilities/new-sequence-editor/command-dictionary'; +import { TOKEN_REPEAT_ARG } from '../../../utilities/new-sequence-editor/sequencer-grammar-constants'; + +export function isFswCommandArgumentEnum(arg: FswCommandArgument): arg is FswCommandArgumentEnum { + return arg.arg_type === 'enum'; +} + +export function isFswCommandArgumentInteger(arg: FswCommandArgument): arg is FswCommandArgumentInteger { + return arg.arg_type === 'integer'; +} + +export function isFswCommandArgumentFloat(arg: FswCommandArgument): arg is FswCommandArgumentFloat { + return arg.arg_type === 'float'; +} + +export function isFswCommandArgumentNumeric(arg: FswCommandArgument): arg is FswCommandArgumentNumeric { + return arg.arg_type === 'numeric'; +} + +export function isFswCommandArgumentUnsigned(arg: FswCommandArgument): arg is FswCommandArgumentUnsigned { + return arg.arg_type === 'unsigned'; +} + +export function isFswCommandArgumentRepeat(arg: FswCommandArgument): arg is FswCommandArgumentRepeat { + return arg.arg_type === 'repeat'; +} + +export function isFswCommandArgumentVarString(arg: FswCommandArgument): arg is FswCommandArgumentVarString { + return arg.arg_type === 'var_string'; +} + +export function isFswCommandArgumentFixedString(arg: FswCommandArgument): arg is FswCommandArgumentFixedString { + return arg.arg_type === 'fixed_string'; +} + +export function isNumberArg(arg: FswCommandArgument): arg is NumberArg { + return ( + isFswCommandArgumentFloat(arg) || + isFswCommandArgumentInteger(arg) || + isFswCommandArgumentNumeric(arg) || + isFswCommandArgumentUnsigned(arg) + ); +} + +export function isStringArg(arg: FswCommandArgument): arg is StringArg { + return isFswCommandArgumentVarString(arg) || isFswCommandArgumentFixedString(arg); +} + +export type StringArg = FswCommandArgumentVarString | FswCommandArgumentFixedString; + +export type NumberArg = + | FswCommandArgumentFloat + | FswCommandArgumentInteger + | FswCommandArgumentNumeric + | FswCommandArgumentUnsigned; + +export type ArgTextDef = { + argDef?: FswCommandArgument; + children?: ArgTextDef[]; + node?: SyntaxNode; + parentArgDef?: FswCommandArgumentRepeat; + text?: string; +}; + +export function addDefaultArgs( + commandDictionary: CommandDictionary, + view: EditorView, + commandNode: SyntaxNode, + argDefs: FswCommandArgument[], +) { + let insertPosition: undefined | number = undefined; + const str = ' ' + argDefs.map(argDef => fswCommandArgDefault(argDef, commandDictionary.enumMap)).join(' '); + const argsNode = commandNode.getChild('Args'); + const stemNode = commandNode.getChild('Stem'); + if (stemNode) { + insertPosition = argsNode?.to ?? stemNode.to; + if (insertPosition !== undefined) { + const transaction = view.state.update({ + changes: { from: insertPosition, insert: str }, + }); + view.dispatch(transaction); + } + } else if (commandNode.name === TOKEN_REPEAT_ARG) { + insertPosition = commandNode.to - 1; + if (insertPosition !== undefined) { + const transaction = view.state.update({ + changes: { from: insertPosition, insert: str }, + }); + view.dispatch(transaction); + } + } +} + +export function getMissingArgDefs(argInfoArray: ArgTextDef[]) { + return argInfoArray + .filter((argInfo): argInfo is { argDef: FswCommandArgument } => !argInfo.node && !!argInfo.argDef) + .map(argInfo => argInfo.argDef); +} + +export function isQuoted(s: string) { + return s.startsWith('"') && s.endsWith('"'); +} + +export function unquoteUnescape(s: string) { + if (isQuoted(s)) { + return s.slice(1, -1).replaceAll('\\"', '"'); + } + return s; +} + +export function quoteEscape(s: string) { + return `"${s.replaceAll('"', '\\"')}"`; +} diff --git a/src/components/ui/CssGrid.svelte b/src/components/ui/CssGrid.svelte index 9b436f1af3..94af87807d 100644 --- a/src/components/ui/CssGrid.svelte +++ b/src/components/ui/CssGrid.svelte @@ -9,6 +9,7 @@ export let columns: string = 'none'; export let gap: string = '0'; export let rows: string = 'none'; + export let minHeight: string = 'unset'; const dispatch = createEventDispatcher<{ changeColumnSizes: string; @@ -77,7 +78,7 @@
diff --git a/src/components/ui/CssGridGutter.svelte b/src/components/ui/CssGridGutter.svelte index 189d4748fc..7a07277ff5 100644 --- a/src/components/ui/CssGridGutter.svelte +++ b/src/components/ui/CssGridGutter.svelte @@ -3,9 +3,10 @@ -
+
diff --git a/src/routes/expansion/rules/edit/[id]/+page.svelte b/src/routes/expansion/rules/edit/[id]/+page.svelte index eb66059a60..a011ebacef 100644 --- a/src/routes/expansion/rules/edit/[id]/+page.svelte +++ b/src/routes/expansion/rules/edit/[id]/+page.svelte @@ -11,7 +11,7 @@ initialRuleActivityType={data.initialRule.activity_type} initialRuleDescription={data.initialRule.description} initialRuleCreatedAt={data.initialRule.created_at} - initialRuleDictionaryId={data.initialRule.authoring_command_dict_id} + initialParcelId={data.initialRule.parcel_id} initialRuleId={data.initialRule.id} initialRuleLogic={data.initialRule.expansion_logic} initialRuleModelId={data.initialRule.authoring_mission_model_id} diff --git a/src/routes/parcels/+layout.svelte b/src/routes/parcels/+layout.svelte new file mode 100644 index 0000000000..73302c982e --- /dev/null +++ b/src/routes/parcels/+layout.svelte @@ -0,0 +1,16 @@ + + + + + + + + diff --git a/src/routes/parcels/+page.svelte b/src/routes/parcels/+page.svelte new file mode 100644 index 0000000000..30680d7e96 --- /dev/null +++ b/src/routes/parcels/+page.svelte @@ -0,0 +1,13 @@ + + + + + + + diff --git a/src/routes/parcels/+page.ts b/src/routes/parcels/+page.ts new file mode 100644 index 0000000000..35cf52612a --- /dev/null +++ b/src/routes/parcels/+page.ts @@ -0,0 +1,7 @@ +import type { PageLoad } from './$types'; + +export const load: PageLoad = async ({ parent }) => { + const { user } = await parent(); + + return { user }; +}; diff --git a/src/routes/parcels/edit/[id]/+page.svelte b/src/routes/parcels/edit/[id]/+page.svelte new file mode 100644 index 0000000000..f80c9788af --- /dev/null +++ b/src/routes/parcels/edit/[id]/+page.svelte @@ -0,0 +1,20 @@ + + + + + diff --git a/src/routes/parcels/edit/[id]/+page.ts b/src/routes/parcels/edit/[id]/+page.ts new file mode 100644 index 0000000000..4e86caf64c --- /dev/null +++ b/src/routes/parcels/edit/[id]/+page.ts @@ -0,0 +1,31 @@ +import { base } from '$app/paths'; +import { redirect } from '@sveltejs/kit'; +import { parcel } from '../../../../stores/sequencing'; +import type { Parcel } from '../../../../types/sequencing'; +import effects from '../../../../utilities/effects'; +import { parseFloatOrNull } from '../../../../utilities/generic'; +import type { PageLoad } from './$types'; + +export const load: PageLoad = async ({ parent, params }) => { + const { user } = await parent(); + + const { id: parcelIdParam } = params; + + if (parcelIdParam !== null && parcelIdParam !== undefined) { + const parcelIdAsNumber = parseFloatOrNull(parcelIdParam); + + if (parcelIdAsNumber !== null) { + const initialParcel: Parcel | null = await effects.getParcel(parcelIdAsNumber, user); + parcel.set(initialParcel); + + if (initialParcel !== null) { + return { + initialParcel, + user, + }; + } + } + } + + redirect(302, `${base}/parcels`); +}; diff --git a/src/routes/parcels/new/+page.svelte b/src/routes/parcels/new/+page.svelte new file mode 100644 index 0000000000..2647f37ca1 --- /dev/null +++ b/src/routes/parcels/new/+page.svelte @@ -0,0 +1,16 @@ + + + + + diff --git a/src/routes/parcels/new/+page.ts b/src/routes/parcels/new/+page.ts new file mode 100644 index 0000000000..35cf52612a --- /dev/null +++ b/src/routes/parcels/new/+page.ts @@ -0,0 +1,7 @@ +import type { PageLoad } from './$types'; + +export const load: PageLoad = async ({ parent }) => { + const { user } = await parent(); + + return { user }; +}; diff --git a/src/routes/sequencing/edit/[id]/+page.svelte b/src/routes/sequencing/edit/[id]/+page.svelte index 7f02961843..8aad1ce7e6 100644 --- a/src/routes/sequencing/edit/[id]/+page.svelte +++ b/src/routes/sequencing/edit/[id]/+page.svelte @@ -8,11 +8,11 @@ > = writable({}); + +export const parsedCommandDictionaries: Writable> = writable({}); + +export const parsedParameterDictionaries: Writable> = writable({}); + +export const parcel: Writable = writable(null); + +export const parcelId: Readable = derived(parcel, $parcel => ($parcel ? $parcel.id : -1)); + /* Subscriptions. */ +export const channelDictionaries = gqlSubscribable(gql.SUB_CHANNEL_DICTIONARIES, {}, [], null); + export const commandDictionaries = gqlSubscribable(gql.SUB_COMMAND_DICTIONARIES, {}, [], null); +export const parameterDictionaries = gqlSubscribable( + gql.SUB_PARAMETER_DICTIONARIES, + {}, + [], + null, +); + +export const parcelToParameterDictionaries = gqlSubscribable( + gql.SUB_PARCEL_TO_PARAMETER_DICTIONARIES, + { parcelId }, + [], + null, +); + +export const parcels = gqlSubscribable(gql.SUB_PARCELS, {}, [], null); + +export const parcelBundles: Readable = derived( + [parcels, parcelToParameterDictionaries, commandDictionaries], + ([$parcels, $parcelToParameterDictionaries, $commandDictionaries]) => { + if (!$parcels || !$parcelToParameterDictionaries) { + return []; + } + return $parcels.map(parcel => { + const parameterDictionaryIds = $parcelToParameterDictionaries + .filter(parcelToParameterDictionary => parcelToParameterDictionary.parcel_id === parcel.id) + .map(parcelToParameterDictionary => parcelToParameterDictionary.parameter_dictionary_id); + + const commandDictionary = $commandDictionaries.find( + commandDictionary => commandDictionary.id === parcel.command_dictionary_id, + )?.id; + + return { + channel_dictionary_id: parcel.channel_dictionary_id, + command_dictionary_id: commandDictionary, + created_at: parcel.created_at, + id: parcel.id, + name: parcel.name, + owner: parcel.owner, + parameter_dictionary_ids: parameterDictionaryIds, + sequence_adaptation_id: parcel.sequence_adaptation_id, + }; + }); + }, +); + +export const sequenceAdaptations = gqlSubscribable(gql.SUB_SEQUENCE_ADAPTATIONS, {}, [], null); + +export const userParcelColumns: Writable = writable('2fr 3px 1fr'); + export const userSequences = gqlSubscribable(gql.SUB_USER_SEQUENCES, {}, [], null); /* Writeable. */ @@ -15,4 +94,75 @@ export const userSequencesColumns: Writable = writable('1.5fr 3px 1fr'); export const userSequenceFormColumns: Writable = writable('1fr 3px 2fr'); -export const userSequencesRows: Writable = writable('1fr 3px 1fr'); +export const userSequenceEditorColumns: Writable = writable('3fr 3px'); + +export const userSequenceEditorColumnsWithFormBuilder: Writable = writable('3fr 3px 1fr'); + +/* Helper Functions */ + +export async function getParsedChannelDictionary( + unparsedChannelDictionary: ChannelDictionary, + user: User | null, +): Promise { + const id = generateId(unparsedChannelDictionary.id, unparsedChannelDictionary.updated_at); + let channelDictionary: AmpcsChannelDictionary | null = get(parsedChannelDictionaries)[id]; + + if (channelDictionary === null || channelDictionary === undefined) { + channelDictionary = await effects.getParsedAmpcsChannelDictionary(unparsedChannelDictionary.id, user); + + if (channelDictionary !== null) { + parsedChannelDictionaries.set({ + ...get(parsedChannelDictionaries), + [id]: channelDictionary, + }); + } + } + + return channelDictionary; +} + +export async function getParsedCommandDictionary( + unparsedCommandDictionary: CommandDictionary, + user: User | null, +): Promise { + const id = generateId(unparsedCommandDictionary.id, unparsedCommandDictionary.updated_at); + let commandDictionary: AmpcsCommandDictionary | null = get(parsedCommandDictionaries)[id]; + + if (commandDictionary === null || commandDictionary === undefined) { + commandDictionary = await effects.getParsedAmpcsCommandDictionary(unparsedCommandDictionary.id, user); + + if (commandDictionary !== null) { + parsedCommandDictionaries.set({ + ...get(parsedCommandDictionaries), + [id]: commandDictionary, + }); + } + } + + return commandDictionary; +} + +export async function getParsedParameterDictionary( + unparsedParameterDictionary: ParameterDictionary, + user: User | null, +): Promise { + const id = generateId(unparsedParameterDictionary.id, unparsedParameterDictionary.updated_at); + let parameterDictionary: AmpcsParameterDictionary | null = get(parsedParameterDictionaries)[id]; + + if (parameterDictionary === null || parameterDictionary === undefined) { + parameterDictionary = await effects.getParsedAmpcsParameterDictionary(unparsedParameterDictionary.id, user); + + if (parameterDictionary !== null) { + parsedParameterDictionaries.set({ + ...get(parsedParameterDictionaries), + [id]: parameterDictionary, + }); + } + } + + return parameterDictionary; +} + +function generateId(id: number, updatedAt: string): string { + return `${id.toString()}_${updatedAt}`; +} diff --git a/src/tests/mocks/sequencing/dictionaries/command_banananation.xml b/src/tests/mocks/sequencing/dictionaries/command_banananation.xml new file mode 100644 index 0000000000..094b7962ed --- /dev/null +++ b/src/tests/mocks/sequencing/dictionaries/command_banananation.xml @@ -0,0 +1,259 @@ + + +
+ + + +
+ + + + + + + + + + + + + + + + + + + + + + String to echo back + + + + shell_ctl + FSW + + This command will echo back a string + String is echoed back + + + + + + + + + + + + Set the oven temperature + + + + shell_ctl + FSW + + This command will turn on the oven + Oven is preheated + + + + + + + + + + + + The distance you throw the bananan + + + + shell_ctl + FSW + + This command will throw a banana + A single banana was thrown + + + + + + + + + + + + Number of bananas to grow + + + + + + How many seconds will it take to grow + + + + shell_ctl + FSW + + This command will grow bananas + Bananas are grown + + + + + + + + + + + + Number of bananas to grow + + + + + + How many seconds will it take to grow + + + + shell_ctl + FSW + + This command will grow bananas, it's a duplicate to clash with an activity type of the same name + Bananas are grown + + + + + + + + + + + + How much sugar is needed + + + Do you hate flavor + + + + shell_ctl + FSW + + This command make the banana bread dough + The dough mixture is created + + + + + + + + + Which way do you peel the banana + + + + shell_ctl + FSW + + This command peels a single banana + The banana is peeled + + + + + + + + shell_ctl + FSW + + This command bakes a banana bread + Banana bread is done baking + + + + + + + + shell_ctl + FSW + + This command waters the banana tree + Done watering the banana tree + + + + + + + + + Identification number assigned to a particular quantity + + + A repeated set of strings and integer containing the arguments to the lot + + + + Name of the banana bundle + + + + + + How many banana in a bundle + + + + + + Dynamically bundle bananas into lots + The bananas are packaged up + + + + + + + + shell_ctl + FSW + + Pick a banana + You have a single banana + + + + + + + + shell_ctl + FSW + + Eat a banana + You ate a single banana + + + + + + + + UPL + + Dump the blender configuration file. + + +
diff --git a/src/tests/mocks/sequencing/grammar-cases/errors.txt b/src/tests/mocks/sequencing/grammar-cases/errors.txt new file mode 100644 index 0000000000..a09b086e94 --- /dev/null +++ b/src/tests/mocks/sequencing/grammar-cases/errors.txt @@ -0,0 +1,49 @@ +# Bad Input - Invalid stems + +C 2_STEM_NAME +STEM$BAR + +==> + +Sequence(Commands(Command(TimeTag(TimeComplete),⚠(Number),Stem,Args),Command(Stem,⚠),Command(Stem,Args))) + +# Stem with disallowed characters + +FSW_CMD%BAR$BAZ + +==> +Sequence(Commands( + Command(Stem,⚠), + Command(Stem,⚠), + Command(Stem,Args) +)) + +# Stem ending in disallowed character + +FSW_CMD% + +==> +Sequence(Commands( + Command(Stem,⚠,Args) +)) + +# Mismatched brackets + +CMD [[] +CMD2 [ +CMD3 ] + +==> + +Sequence(Commands( + Command(Stem,Args(RepeatArg(⚠))), + Command(Stem,Args(RepeatArg(⚠,Enum))) +)) + +# locals with wrong value types + +@LOCALS "string_not_enum" + +==> + +Sequence(LocalDeclaration(⚠(String))) diff --git a/src/tests/mocks/sequencing/grammar-cases/parse_tree.txt b/src/tests/mocks/sequencing/grammar-cases/parse_tree.txt new file mode 100644 index 0000000000..cf00fba8c9 --- /dev/null +++ b/src/tests/mocks/sequencing/grammar-cases/parse_tree.txt @@ -0,0 +1,329 @@ +# Command with no args + +FSW_CMD + +==> +Sequence( + Commands( + Command(Stem,Args) + ) +) + +# Hardware commands + +@HARDWARE +HARDWARE_COMMAND_1 +HDW_2 + +==> +Sequence( + HardwareCommands( + Command(Stem,Args), + Command(Stem,Args) + ) +) + +# Generic directive + +@WRONG_LOAD_AND_GO + +C CMD_1 + +==> + +Sequence( + GenericDirective, + Commands( + Command(TimeTag(TimeComplete),Stem,Args) + ) +) + + +# Command with two string args + +FSW_CMD "hello" "world" +==> +Sequence( + Commands( + Command(Stem,Args(String,String)) + ) +) + +# Command with mixed args + +FSW_CMD "hello" 10 +==> +Sequence(Commands( + Command(Stem,Args(String,Number)) +)) + +# Command with mixed args and comment + +FSW_CMD "hello" 10# yay comment +==> +Sequence(Commands( + Command(Stem,Args(String,Number),LineComment) +)) + +# Command with two sting args wrapped by comments + +# full line comment +FSW_CMD "TRUE" "FALSE" # end of line +# full line comment +==> +Sequence(Commands( + LineComment, + Command(Stem,Args(String,String),LineComment), + LineComment +)) + +# Command with enum args (disallowed in linter) + +FSW_CMD TRUE FALSE +==> + +Sequence(Commands(Command(Stem,Args(Enum,Enum)))) + +# Command with enum args and comments + +# Com +FSW_CMD TRUE FALSE # Com 1 +# Com +==> + +Sequence(Commands( + LineComment, + Command(Stem,Args(Enum,Enum),LineComment), + LineComment +)) + +# Command with repeat args + +C CMD_1 ["asd"] [] ["asdf" 2] [ 4 4] + +==> + +Sequence(Commands( + Command(TimeTag(TimeComplete),Stem,Args( + RepeatArg(String), + RepeatArg, + RepeatArg(String,Number), + RepeatArg(Number,Number) + )) +)) + +# Locals and commands + +# comment before parameter +@INPUT_PARAMS L02INT +# declare my local variables, types are defined in adaptation +# comment before directive +@LOCALS L01STRING L02INT +# before metadata comment +@METADATA "foo" "val foo" +# before commands comment +FSW_CMD 1 2 +FSW_CMD2 +==> + +Sequence( + LineComment, + ParameterDeclaration(Enum), + LineComment, + LineComment, + LocalDeclaration(Enum,Enum), + LineComment, + Metadata( + MetaEntry(Key(String),Value(String)) + ), + Commands( + LineComment, + Command(Stem,Args(Number,Number)), + Command(Stem,Args) + ) +) + + +# Parameters, locals, and commands + +@INPUT_PARAMS L01STRING L02INT +@LOCALS L01STRING L02INT +FSW_CMD 1 2 +FSW_CMD2 +==> + +Sequence( + ParameterDeclaration(Enum,Enum), + LocalDeclaration(Enum,Enum), + Commands( + Command(Stem,Args(Number,Number)), + Command(Stem,Args) + ) +) + +# Parameters, locals, and indented commands + +@INPUT_PARAMS L01STRING L02INT +@LOCALS L01STRING L02INT + FSW_CMD 1 2 + FSW_CMD2 "string val" +==> + +Sequence( + ParameterDeclaration(Enum,Enum), + LocalDeclaration(Enum,Enum), + Commands( + Command(Stem,Args(Number,Number)), + Command(Stem,Args(String)) + ) +) + +# Commands with models and metadata + +@ID "big test" + +@METADATA "foo" "val foo" + + +CMD_1 1 2 3 +@METADATA "foo" "val\" foo2" +@METADATA "bar" "val bar" + +CMD_2 "hello, it's me" +@METADATA "bar" "{ \"foo\": 5}" +@MODEL "a" 5 "c" +@MODEL "d" true "f" +==> + +Sequence( + IdDeclaration(String), + Metadata( + MetaEntry(Key(String),Value(String)) + ), + Commands( + Command( + Stem, + Args(Number,Number,Number), + Metadata( + MetaEntry(Key(String),Value(String)), + MetaEntry(Key(String),Value(String)) + ) + ), + Command( + Stem, + Args(String), + Metadata( + MetaEntry(Key(String),Value(String)) + ), + Models( + Model(Variable(String),Value(Number),Offset(String)), + Model(Variable(String),Value(Boolean),Offset(String)) + ) + ) + ) +) + +# Commands with models and metadata, with mixed indentation + +@ID "big test" + +@METADATA "foo" "val foo" + + # indented 4 spaces + CMD_1 1 2 3 + @METADATA "foo" "val\" foo2" + @METADATA "bar" "val bar" + + + # indented 8 spaces + CMD_2 "hello, it's me" + @METADATA "bar" "val bar2" + @MODEL "a" "b" "c" + @MODEL "d" "e" "f" + +==> + +Sequence( + IdDeclaration(String), + Metadata( + MetaEntry(Key(String),Value(String)) + ), + Commands( + LineComment, + Command( + Stem, + Args(Number,Number,Number), + Metadata( + MetaEntry(Key(String),Value(String)), + MetaEntry(Key(String),Value(String)) + ) + ), + LineComment, + Command( + Stem, + Args(String), + Metadata( + MetaEntry(Key(String),Value(String)) + ), + Models( + Model(Variable(String),Value(String),Offset(String)), + Model(Variable(String),Value(String),Offset(String)) + ) + ) + ) +) + +# Mega Sequence + + @ID "big test" + +# Welcome + + + @INPUT_PARAMS PARM1 + + + # Bingo + + @LOCALS FOO BAR BIZ + + + + + + + +@METADATA "foo" "val foo" + + + +@LOAD_AND_GO + + # indented 4 spaces + CMD_1 1 2 3 + @METADATA "foo" "val\" foo2" + @METADATA "bar" "val bar" + + + # indented 8 spaces + CMD_2 "hello, it's me" + @MODEL "a" "b" "c" + @MODEL "d" "e" "f" + +==> + +Sequence( + IdDeclaration(String), + LineComment, + ParameterDeclaration(Enum), + LineComment, + LocalDeclaration(Enum,Enum,Enum), + Metadata(MetaEntry(Key(String),Value(String))), + Commands( + LoadAndGoDirective, + LineComment, + Command(Stem,Args(Number,Number,Number),Metadata(MetaEntry(Key(String),Value(String)),MetaEntry(Key(String),Value(String)))), + LineComment, + Command(Stem,Args(String),Models(Model(Variable(String),Value(String),Offset(String)),Model(Variable(String),Value(String),Offset(String)))) + ) +) diff --git a/src/tests/mocks/sequencing/grammar-cases/time_formats.txt b/src/tests/mocks/sequencing/grammar-cases/time_formats.txt new file mode 100644 index 0000000000..1323363c65 --- /dev/null +++ b/src/tests/mocks/sequencing/grammar-cases/time_formats.txt @@ -0,0 +1,111 @@ +# Different absolute time tags + +C CMD_1 +A2030-001T12:34:56 CMD_2 "hello" "world" +A2030-001T12:34:56.789 CMD_3 "subseconds" +==> +Sequence(Commands( + Command( + TimeTag(TimeComplete), + Stem, + Args + ), + Command( + TimeTag(TimeAbsolute), + Stem, + Args(String,String) + ), + Command( + TimeTag(TimeAbsolute), + Stem, + Args(String) + ), +)) + +# Different relative times + +R010T01:00:00.000 CMD_4 "hello" 10 +R00:10:00.000 CMD_5 +R00:00:01 CMD_6 +R00:00:01.123 CMD_7 +R10 CMD_8 10 +R10.123 CMD_9 10 + +==> + +Sequence(Commands( + Command( + TimeTag(TimeRelative), + Stem, + Args(String,Number) + ), + Command( + TimeTag(TimeRelative), + Stem, + Args + ), + Command( + TimeTag(TimeRelative), + Stem, + Args + ), + Command( + TimeTag(TimeRelative), + Stem, + Args + ), + Command( + TimeTag(TimeRelative), + Stem, + Args(Number) + ), + Command( + TimeTag(TimeRelative), + Stem, + Args(Number) + ) +)) + +# Different epoch times + +E123T12:34:56.789 CMD_3 +E-123T12:34:56.789 CMD_3 +E+123T12:34:56.789 CMD_3 +E12:34:56.789 CMD_3 +E-12:34:56.789 CMD_3 +E+12:34:56.789 CMD_3 +E123T12:34:56 CMD_3 +E-123T12:34:56 CMD_3 +E+123T12:34:56 CMD_3 +E12:34:56 CMD_3 +E-12:34:56 CMD_3 +E+12:34:56 CMD_3 +E123 CMD_3 +E-123 CMD_3 +E+123 CMD_3 +E123.456 CMD_3 +E-123.456 CMD_3 +E+123.456 CMD_3 + +==> + +Sequence(Commands( + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args), + Command(TimeTag(TimeEpoch),Stem,Args) +)) diff --git a/src/tests/mocks/sequencing/sequences/mega_sequence.txt b/src/tests/mocks/sequencing/sequences/mega_sequence.txt new file mode 100644 index 0000000000..1ca4f978fd --- /dev/null +++ b/src/tests/mocks/sequencing/sequences/mega_sequence.txt @@ -0,0 +1,36 @@ + + + @ID "big test" + +# Welcome + + + @INPUT_PARAMS PARM1 + + + # Bingo + + @LOCALS FOO BAR BIZ + + + + + + + +@METADATA "foo" "val foo" + + + +@LOAD_AND_GO + + # indented 4 spaces + CMD_1 1 2 3 + @METADATA "foo" "val\" foo2" + @METADATA "bar" "val bar" + + + # indented 8 spaces + CMD_2 "hello, it's me" + @MODEL "a" "b" "c" + @MODEL "d" "e" "f" diff --git a/src/tests/mocks/sequencing/sequences/seq_with_empty_linest.txt b/src/tests/mocks/sequencing/sequences/seq_with_empty_linest.txt new file mode 100644 index 0000000000..f2a5881072 --- /dev/null +++ b/src/tests/mocks/sequencing/sequences/seq_with_empty_linest.txt @@ -0,0 +1,12 @@ + +# standalone comment +CMD_1 "strarg" 0 # inline comment + +# standalone comment + + + + +CMD_2 "strarg" 0 # inline comment + +@METADATA "key" "val" diff --git a/src/types/expansion.ts b/src/types/expansion.ts index c0f8816224..7f7e99c063 100644 --- a/src/types/expansion.ts +++ b/src/types/expansion.ts @@ -5,7 +5,6 @@ import type { Tag } from './tags'; export type ExpansionRule = { activity_type: string; - authoring_command_dict_id: number; authoring_mission_model_id: number; created_at: string; description: string; @@ -13,6 +12,7 @@ export type ExpansionRule = { id: number; name: string; owner: UserId; + parcel_id: number; tags: { tag: Tag }[]; updated_at: string; updated_by: UserId; @@ -42,7 +42,6 @@ export type ExpansionSequence = { export type ExpansionSequenceInsertInput = Omit; export type ExpansionSet = { - command_dict_id: number; created_at: string; description: string; expansion_rules: ExpansionRuleSlim[]; @@ -50,6 +49,7 @@ export type ExpansionSet = { mission_model_id: number; name: string; owner: UserId; + parcel_id: number; updated_at: string; updated_by: UserId; }; @@ -65,7 +65,6 @@ export type ActivityInstanceJoin = { export type ExpandedSequence = { created_at: string; - edsl_string: string; expanded_sequence: SeqJson; id: number; seq_id: string; diff --git a/src/types/global-type.ts b/src/types/global-type.ts new file mode 100644 index 0000000000..be1f430456 --- /dev/null +++ b/src/types/global-type.ts @@ -0,0 +1,6 @@ +import type { GlobalTypes } from '../enums/globalTypes'; + +export type GlobalType = { + name: string; + type: GlobalTypes; +}; diff --git a/src/types/sequencing.ts b/src/types/sequencing.ts index 202964d47c..e3ef97d466 100644 --- a/src/types/sequencing.ts +++ b/src/types/sequencing.ts @@ -1,13 +1,55 @@ +import type { DictionaryTypes } from '../enums/dictionaryTypes'; import type { UserId } from './app'; +export type ChannelDictionary = { + type: DictionaryTypes.CHANNEL; +} & DictionaryType; + export type CommandDictionary = { - command_types_typescript_path: string; + type: DictionaryTypes.COMMAND; +} & DictionaryType; + +export type ParameterDictionary = { + type: DictionaryTypes.PARAMETER; +} & DictionaryType; + +export type SequenceAdaptation = { + adaptation: string; + type: DictionaryTypes.ADAPTATION; +} & DictionaryType; + +export type DictionaryType = { created_at: string; id: number; mission: string; + path: string; + updated_at: string; version: string; }; +export type Parcel = { + channel_dictionary_id: number | null; + command_dictionary_id: number; + created_at: string; + id: number; + name: string; + owner: UserId; + sequence_adaptation_id: number | null; + updated_at: string; +}; + +export type ParcelBundle = { + command_dictionary_id: number | undefined; +} & Omit; + +export type ParcelToParameterDictionary = { + id: number; + parameter_dictionary_id: number; + parcel_id: number; +}; + +export type ParcelInsertInput = Omit; + export type GetSeqJsonResponseError = { location: { column: number; @@ -26,12 +68,13 @@ export type GetSeqJsonResponse = { export type SeqJson = any; // TODO: Strongly type. export type UserSequence = { - authoring_command_dict_id: number; created_at: string; definition: string; id: number; name: string; owner: UserId; + parcel_id: number; + seq_json: SeqJson; updated_at: string; }; diff --git a/src/utilities/codemirror/codemirror-utils.ts b/src/utilities/codemirror/codemirror-utils.ts new file mode 100644 index 0000000000..fab174ada7 --- /dev/null +++ b/src/utilities/codemirror/codemirror-utils.ts @@ -0,0 +1,127 @@ +import type { SyntaxNode } from '@lezer/common'; +import type { + CommandDictionary, + FswCommandArgument, + FswCommandArgumentEnum, + FswCommandArgumentFixedString, + FswCommandArgumentFloat, + FswCommandArgumentInteger, + FswCommandArgumentNumeric, + FswCommandArgumentRepeat, + FswCommandArgumentUnsigned, + FswCommandArgumentVarString, +} from '@nasa-jpl/aerie-ampcs'; +import type { EditorView } from 'codemirror'; +import { fswCommandArgDefault } from '../new-sequence-editor/command-dictionary'; +import { TOKEN_REPEAT_ARG } from '../new-sequence-editor/sequencer-grammar-constants'; + +export function isFswCommandArgumentEnum(arg: FswCommandArgument): arg is FswCommandArgumentEnum { + return arg.arg_type === 'enum'; +} + +export function isFswCommandArgumentInteger(arg: FswCommandArgument): arg is FswCommandArgumentInteger { + return arg.arg_type === 'integer'; +} + +export function isFswCommandArgumentFloat(arg: FswCommandArgument): arg is FswCommandArgumentFloat { + return arg.arg_type === 'float'; +} + +export function isFswCommandArgumentNumeric(arg: FswCommandArgument): arg is FswCommandArgumentNumeric { + return arg.arg_type === 'numeric'; +} + +export function isFswCommandArgumentUnsigned(arg: FswCommandArgument): arg is FswCommandArgumentUnsigned { + return arg.arg_type === 'unsigned'; +} + +export function isFswCommandArgumentRepeat(arg: FswCommandArgument): arg is FswCommandArgumentRepeat { + return arg.arg_type === 'repeat'; +} + +export function isFswCommandArgumentVarString(arg: FswCommandArgument): arg is FswCommandArgumentVarString { + return arg.arg_type === 'var_string'; +} + +export function isFswCommandArgumentFixedString(arg: FswCommandArgument): arg is FswCommandArgumentFixedString { + return arg.arg_type === 'fixed_string'; +} + +export function isNumberArg(arg: FswCommandArgument): arg is NumberArg { + return ( + isFswCommandArgumentFloat(arg) || + isFswCommandArgumentInteger(arg) || + isFswCommandArgumentNumeric(arg) || + isFswCommandArgumentUnsigned(arg) + ); +} + +export function isStringArg(arg: FswCommandArgument): arg is StringArg { + return isFswCommandArgumentVarString(arg) || isFswCommandArgumentFixedString(arg); +} + +export type StringArg = FswCommandArgumentVarString | FswCommandArgumentFixedString; + +export type NumberArg = + | FswCommandArgumentFloat + | FswCommandArgumentInteger + | FswCommandArgumentNumeric + | FswCommandArgumentUnsigned; + +export type ArgTextDef = { + argDef?: FswCommandArgument; + children?: ArgTextDef[]; + node?: SyntaxNode; + parentArgDef?: FswCommandArgumentRepeat; + text?: string; +}; + +export function addDefaultArgs( + commandDictionary: CommandDictionary, + view: EditorView, + commandNode: SyntaxNode, + argDefs: FswCommandArgument[], +) { + let insertPosition: undefined | number = undefined; + const str = ' ' + argDefs.map(argDef => fswCommandArgDefault(argDef, commandDictionary.enumMap)).join(' '); + const argsNode = commandNode.getChild('Args'); + const stemNode = commandNode.getChild('Stem'); + if (stemNode) { + insertPosition = argsNode?.to ?? stemNode.to; + if (insertPosition !== undefined) { + const transaction = view.state.update({ + changes: { from: insertPosition, insert: str }, + }); + view.dispatch(transaction); + } + } else if (commandNode.name === TOKEN_REPEAT_ARG) { + insertPosition = commandNode.to - 1; + if (insertPosition !== undefined) { + const transaction = view.state.update({ + changes: { from: insertPosition, insert: str }, + }); + view.dispatch(transaction); + } + } +} + +export function getMissingArgDefs(argInfoArray: ArgTextDef[]) { + return argInfoArray + .filter((argInfo): argInfo is { argDef: FswCommandArgument } => !argInfo.node && !!argInfo.argDef) + .map(argInfo => argInfo.argDef); +} + +export function isQuoted(s: string) { + return s.startsWith('"') && s.endsWith('"'); +} + +export function unquoteUnescape(s: string) { + if (isQuoted(s)) { + return s.slice(1, -1).replaceAll('\\"', '"'); + } + return s; +} + +export function quoteEscape(s: string) { + return `"${s.replaceAll('"', '\\"')}"`; +} diff --git a/src/utilities/codemirror/custom-folder.ts b/src/utilities/codemirror/custom-folder.ts new file mode 100644 index 0000000000..750276143b --- /dev/null +++ b/src/utilities/codemirror/custom-folder.ts @@ -0,0 +1,44 @@ +import type { EditorState } from '@codemirror/state'; +import type { SyntaxNode } from '@lezer/common'; + +export function customFoldInside(node: SyntaxNode, state: EditorState): { from: number; to: number } | null { + if (node.name === 'Command') { + return foldCommand(node, state); + } + return null; +} + +function foldCommand(node: SyntaxNode, state: EditorState): { from: number; to: number } | null { + const stemNode = node.getChild('Stem'); + const argsNodes = node.getChildren('Args'); + const commentNode = node.getChild('LineComment'); + const metadataNode = node.getChildren('Metadata'); + const modelNodes = node.getChildren('Models'); + + if (stemNode == null) { + return null; + } + + const from = calculateStartAndEnd([stemNode, ...argsNodes, commentNode]).to; + const nonCommandTo = calculateStartAndEnd([...metadataNode, ...modelNodes]).to; + const text = state.sliceDoc(from, nonCommandTo); + // Exclude the last new line so commands remain on different lines + const to = from + text.lastIndexOf('\n'); + return { from, to }; +} + +// FIXME -- this looks like copy paste of getFromAndTo() +function calculateStartAndEnd(nodes: (SyntaxNode | null)[]): { from: number; to: number } { + return nodes.reduce( + (acc, node) => { + if (node === null) { + return acc; + } + return { + from: Math.min(acc.from, node.from), + to: Math.max(acc.to, node.to), + }; + }, + { from: Number.MAX_VALUE, to: Number.MIN_VALUE }, + ); +} diff --git a/src/utilities/codemirror/index.ts b/src/utilities/codemirror/index.ts new file mode 100644 index 0000000000..8bd75b1059 --- /dev/null +++ b/src/utilities/codemirror/index.ts @@ -0,0 +1,58 @@ +import { CompletionContext, type CompletionResult } from '@codemirror/autocomplete'; +import { + LRLanguage, + LanguageSupport, + delimitedIndent, + foldInside, + foldNodeProp, + indentNodeProp, +} from '@codemirror/language'; +import { styleTags, tags as t } from '@lezer/highlight'; +import { customFoldInside } from './custom-folder'; +import { parser } from './sequence.grammar'; + +export const SeqLanguage = LRLanguage.define({ + languageData: { + commentTokens: { line: '#' }, + }, + parser: parser.configure({ + props: [ + indentNodeProp.add({ + Application: delimitedIndent({ align: false, closing: ')' }), + }), + foldNodeProp.add({ + Application: foldInside, + Command: customFoldInside, + }), + styleTags({ + Boolean: t.bool, + GenericDirective: t.namespace, + Global: t.namespace, + HardwareCommands: t.namespace, + IdDeclaration: t.namespace, + ImmediateCommands: t.namespace, + LineComment: t.comment, + LoadAndGoDirective: t.namespace, + LocalDeclaration: t.namespace, + MetaEntry: t.namespace, + Model: t.namespace, + ParameterDeclaration: t.namespace, + Stem: t.keyword, + String: t.string, + TimeAbsolute: t.className, + TimeComplete: t.className, + TimeEpoch: t.className, + TimeRelative: t.className, + }), + ], + }), +}); + +export function setupLanguageSupport(autocomplete?: (context: CompletionContext) => CompletionResult | null) { + if (autocomplete) { + const autocompleteExtension = SeqLanguage.data.of({ autocomplete }); + return new LanguageSupport(SeqLanguage, [autocompleteExtension]); + } else { + return new LanguageSupport(SeqLanguage); + } +} diff --git a/src/utilities/codemirror/sequence.grammar b/src/utilities/codemirror/sequence.grammar new file mode 100644 index 0000000000..95402c7a29 --- /dev/null +++ b/src/utilities/codemirror/sequence.grammar @@ -0,0 +1,187 @@ +@top Sequence { + optSpace + ( + commentLine* ~maybeComments + (IdDeclaration | ParameterDeclaration | LocalDeclaration | GenericDirective) + )* + commentLine* ~maybeComments + Metadata? + Commands? + ImmediateCommands? + HardwareCommands? +} + +// Potential Improvements +// maintainability - use @specialize on directives +// expressiveness - add activate, load and ground syntax + +@precedence { + stemStart @cut +} + +GenericDirective { + genericDirective (whiteSpace String)* newLine +} + +IdDeclaration { + idDirective (whiteSpace (String | Enum | Number)?)? newLine +} + +ParameterDeclaration { + parameterDirective (whiteSpace Enum)+ newLine +} + +LocalDeclaration { + localsDirective (whiteSpace Enum)+ newLine +} + +commandBlock { + (Command | commentLine ~maybeComments)+ +} + +commentLine { + LineComment newLine +} + +optSpace { + (newLine | whiteSpace)? +} + +Commands { + (LoadAndGoDirective newLine)? + commandBlock +} + +ImmediateCommands { + immediateDirective newLine + commandBlock +} + +HardwareCommands { + hardwareDirective newLine + commandBlock +} + +TimeTag { TimeAbsolute | TimeEpoch | TimeRelative | TimeComplete } + +Args { + (whiteSpace (arg | RepeatArg))* whiteSpace? +} + +RepeatArg { + "[" (whiteSpace? arg)* whiteSpace? "]" +} + +arg[@isGroup=Arguments] { Number | String | Enum } + +Command { + TimeTag? + Stem + Args + LineComment? + newLine + Metadata? + Models? +} + +Metadata { + MetaEntry { + metadataDirective + whiteSpace Key { String } + whiteSpace Value { metaValue } + newLine + }+ +} + +metaValue { + String | Number | Boolean | Null | Array | Object +} + +Object { "{" list? "}" } +Array { "[" list? "]" } + +Property { PropertyName optSpace ":" optSpace metaValue } +PropertyName[isolate] { String } + +list { optSpace item (optSpace "," optSpace item)* optSpace } + +Models { + Model { + modelDirective + whiteSpace Variable { String } + whiteSpace Value { String | Number | Boolean } + whiteSpace Offset { String } + newLine + }+ +} + +Enum { identifier } + +Stem { !stemStart identifier } + +@tokens { + identifier { @asciiLetter (@asciiLetter| @digit | "_" | "-")* } + + timeHhmmss { @digit@digit":"@digit@digit":"@digit@digit("."@digit+)? } + + timeDOY { @digit@digit@digit"T"timeHhmmss } + + timeSecond { $[1-9] @digit* ("."@digit+)? } + + TimeAbsolute { 'A'@digit@digit@digit@digit"-"@digit@digit@digit"T"timeHhmmss whiteSpace } + + TimeRelative { 'R'(timeSecond | timeDOY | timeHhmmss) whiteSpace} + + TimeEpoch { 'E'$[+\-]?(timeSecond | timeDOY | timeHhmmss) whiteSpace} + + TimeComplete { 'C' whiteSpace } + + String { '"' (!["\\] | "\\" _)* '"' } + + hex { @digit | $[A-F] } + + Number { + ("+" | "-")? (@digit ("_" | @digit)* ("." ("_" | @digit)*)? | "." @digit ("_" | @digit)*) + (("e" | "E") ("+" | "-")? ("_" | @digit)+)? | + @digit ("_" | @digit)* "n" | + "0x" (hex | "_")+ "n"? + } + + TRUE { "true" } + FALSE { "false" } + Boolean { TRUE | FALSE } + Null { "null" } + + LineComment { "#"![\n\r]* } + + newLine { ($[ \t]* "\n")+ $[ \t]* | (whiteSpace? @eof) } + + whiteSpace { $[ \t]+ } + + idDirective { "@ID" } + LoadAndGoDirective { "@LOAD_AND_GO"} + immediateDirective { "@IMMEDIATE" } + hardwareDirective { "@HARDWARE" } + localsDirective { "@LOCALS" } + parameterDirective { "@INPUT_PARAMS" } + metadataDirective { "@METADATA" } + modelDirective { "@MODEL" } + genericDirective { "@"identifier } + + @precedence { newLine, whiteSpace } + + @precedence{ TimeAbsolute, TimeRelative, TimeEpoch, TimeComplete, identifier } + + @precedence { + idDirective, + metadataDirective, + modelDirective, + immediateDirective, + hardwareDirective, + localsDirective, + parameterDirective, + LoadAndGoDirective, + genericDirective, + identifier + } +} diff --git a/src/utilities/codemirror/sequence.grammar.d.ts b/src/utilities/codemirror/sequence.grammar.d.ts new file mode 100644 index 0000000000..c306a0405f --- /dev/null +++ b/src/utilities/codemirror/sequence.grammar.d.ts @@ -0,0 +1,2 @@ +import { LRParser } from '@lezer/lr'; +export declare const parser: LRParser; diff --git a/src/utilities/effects.ts b/src/utilities/effects.ts index 4fcddbad20..da53c43a22 100644 --- a/src/utilities/effects.ts +++ b/src/utilities/effects.ts @@ -1,8 +1,13 @@ import { goto } from '$app/navigation'; import { base } from '$app/paths'; import { env } from '$env/dynamic/public'; -import type { CommandDictionary as AmpcsCommandDictionary } from '@nasa-jpl/aerie-ampcs'; +import { + type ChannelDictionary as AmpcsChannelDictionary, + type CommandDictionary as AmpcsCommandDictionary, + type ParameterDictionary as AmpcsParameterDictionary, +} from '@nasa-jpl/aerie-ampcs'; import { get } from 'svelte/store'; +import { DictionaryHeaders } from '../enums/dictionaryHeaders'; import { SearchParameters } from '../enums/searchParameters'; import { Status } from '../enums/status'; import { activityDirectives, activityDirectivesMap, selectedActivityDirectiveId } from '../stores/activities'; @@ -18,7 +23,12 @@ import { import { createModelError, creatingModel, models } from '../stores/model'; import { createPlanError, creatingPlan, planId } from '../stores/plan'; import { schedulingRequests, selectedSpecId } from '../stores/scheduling'; -import { commandDictionaries } from '../stores/sequencing'; +import { + channelDictionaries, + commandDictionaries, + parameterDictionaries, + sequenceAdaptations, +} from '../stores/sequencing'; import { selectedSpanId, simulationDataset, simulationDatasetId } from '../stores/simulation'; import { createTagError } from '../stores/tags'; import { applyViewUpdate, view, viewUpdateTimeline } from '../stores/views'; @@ -123,12 +133,18 @@ import type { SchedulingResponse, } from '../types/scheduling'; import type { ValueSchema } from '../types/schema'; -import type { - CommandDictionary, - GetSeqJsonResponse, - SeqJson, - UserSequence, - UserSequenceInsertInput, +import { + type ChannelDictionary, + type CommandDictionary, + type GetSeqJsonResponse, + type ParameterDictionary, + type Parcel, + type ParcelInsertInput, + type ParcelToParameterDictionary, + type SeqJson, + type SequenceAdaptation, + type UserSequence, + type UserSequenceInsertInput, } from '../types/sequencing'; import type { PlanDataset, @@ -539,27 +555,6 @@ const effects = { } }, - async createCommandDictionary(files: FileList, user: User | null): Promise { - try { - if (!queryPermissions.CREATE_COMMAND_DICTIONARY(user)) { - throwPermissionError('upload a command dictionary'); - } - - const file: File = files[0]; - const dictionary = await file.text(); - const data = await reqHasura(gql.CREATE_COMMAND_DICTIONARY, { dictionary }, user); - const { createCommandDictionary: newCommandDictionary } = data; - if (newCommandDictionary != null) { - return newCommandDictionary; - } else { - throw Error('Unable to upload command dictionary'); - } - } catch (e) { - catchError('Command Dictionary Upload Failed', e as Error); - return null; - } - }, - async createConstraint( name: string, isPublic: boolean, @@ -650,6 +645,31 @@ const effects = { } }, + async createCustomAdaptation( + adaptation: { adaptation: string }, + user: User | null, + ): Promise { + try { + if (!queryPermissions.CREATE_SEQUENCE_ADAPTATION(user)) { + throwPermissionError('upload a custom adaptation'); + } + + if (adaptation?.adaptation) { + const data = await reqHasura(gql.CREATE_SEQUENCE_ADAPTATION, { adaptation }, user); + const { createSequenceAdaptation: newSequenceAdaptation } = data; + if (newSequenceAdaptation != null) { + return newSequenceAdaptation; + } else { + throw Error('Unable to upload sequence adaptation'); + } + } + } catch (e) { + catchError('Sequence Adaptation Upload Failed', e as Error); + } + + return null; + }, + async createExpansionRule(rule: ExpansionRuleInsertInput, user: User | null): Promise { try { createExpansionRuleError.set(null); @@ -731,7 +751,7 @@ const effects = { }, async createExpansionSet( - dictionaryId: number, + parcelId: number, model: ModelSlim, expansionRuleIds: number[], user: User | null, @@ -748,10 +768,10 @@ const effects = { const data = await reqHasura( gql.CREATE_EXPANSION_SET, { - dictionaryId, expansionRuleIds, modelId: model.id, ...(name && { name }), + parcelId, ...(description && { description }), }, user, @@ -834,6 +854,61 @@ const effects = { return null; }, + async createParcel(parcel: ParcelInsertInput, user: User | null): Promise { + try { + if (!queryPermissions.CREATE_PARCEL(user)) { + throwPermissionError('create a parcel'); + } + + const data = await reqHasura>(gql.CREATE_PARCEL, { parcel }, user); + const { createParcel } = data; + + if (createParcel === null) { + throw Error(`Unable to create parcel "${parcel.name}"`); + } + + const { id } = createParcel; + showSuccessToast('Parcel Created Successfully'); + return id; + } catch (e) { + catchError('Parcel Create Failed', e as Error); + showFailureToast('Parcel Create Failed'); + return null; + } + }, + + async createParcelToParameterDictionaries( + parcelOwner: UserId, + parcelToParameterDictionariesToAdd: Omit[], + user: User | null, + ): Promise { + try { + if (!queryPermissions.CREATE_PARCEL_TO_PARAMETER_DICTIONARIES(user)) { + throwPermissionError('create parcel to parameter dictionary'); + } + + const data = await reqHasura<{ returning: ParcelToParameterDictionary[] }>( + gql.CREATE_PARCEL_TO_PARAMETER_DICTIONARIES, + { parcelToParameterDictionaries: parcelToParameterDictionariesToAdd }, + user, + ); + const { insert_parcel_to_parameter_dictionary } = data; + + if (insert_parcel_to_parameter_dictionary) { + showSuccessToast('Parcel to parameter dictionaries created successfully'); + } else { + throw Error('Unable to create parcel to parameter dictionaries'); + } + + return insert_parcel_to_parameter_dictionary.returning; + } catch (e) { + catchError('Create parcel to parameter dictionaries failed', e as Error); + showFailureToast('Create parcel to parameter dictionaries failed'); + } + + return null; + }, + async createPlan( end_time_doy: string, model_id: number, @@ -1801,6 +1876,33 @@ const effects = { return false; }, + async deleteChannelDictionary(id: number, user: User | null): Promise { + try { + if (!queryPermissions.DELETE_CHANNEL_DICTIONARY(user)) { + throwPermissionError('delete this channel dictionary'); + } + + const { confirm } = await showConfirmModal( + 'Delete', + `Are you sure you want to delete the dictionary with ID: "${id}"?`, + 'Delete Channel Dictionary', + ); + + if (confirm) { + const data = await reqHasura<{ id: number }>(gql.DELETE_CHANNEL_DICTIONARY, { id }, user); + if (data.deleteChannelDictionary != null) { + showSuccessToast('Channel Dictionary Deleted Successfully'); + channelDictionaries.filterValueById(id); + } else { + throw Error(`Unable to delete channel dictionary with ID: "${id}"`); + } + } + } catch (e) { + catchError('Channel Dictionary Delete Failed', e as Error); + showFailureToast('Channel Dictionary Delete Failed'); + } + }, + async deleteCommandDictionary(id: number, user: User | null): Promise { try { if (!queryPermissions.DELETE_COMMAND_DICTIONARY(user)) { @@ -2063,6 +2165,95 @@ const effects = { } }, + async deleteParameterDictionary(id: number, user: User | null): Promise { + try { + if (!queryPermissions.DELETE_PARAMETER_DICTIONARY(user)) { + throwPermissionError('delete this parameter dictionary'); + } + + const { confirm } = await showConfirmModal( + 'Delete', + `Are you sure you want to delete the dictionary with ID: "${id}"?`, + 'Delete Parameter Dictionary', + ); + + if (confirm) { + const data = await reqHasura<{ id: number }>(gql.DELETE_PARAMETER_DICTIONARY, { id }, user); + if (data.deleteParameterDictionary != null) { + showSuccessToast('Parameter Dictionary Deleted Successfully'); + parameterDictionaries.filterValueById(id); + } else { + throw Error(`Unable to delete parameter dictionary with ID: "${id}"`); + } + } + } catch (e) { + catchError('Parameter Dictionary Delete Failed', e as Error); + showFailureToast('Parameter Dictionary Delete Failed'); + } + }, + + async deleteParcel(parcel: Parcel, user: User | null): Promise { + try { + if (!queryPermissions.DELETE_PARCEL(user, parcel)) { + throwPermissionError('delete this parcel'); + } + + const { confirm } = await showConfirmModal( + 'Delete', + `Are you sure you want to delete "${parcel.name}"?`, + 'Delete Parcel', + ); + + if (confirm) { + const data = await reqHasura<{ id: number }>(gql.DELETE_PARCEL, { id: parcel.id }, user); + + if (data.deleteParcel === null) { + throw Error(`Unable to delete parcel "${parcel.name}"`); + } + + showSuccessToast('Parcel Deleted Successfully'); + return true; + } + + return false; + } catch (e) { + catchError('Parcel Delete Failed', e as Error); + showFailureToast('Parcel Delete Failed'); + return false; + } + }, + + async deleteParcelToParameterDictionaries(ids: number[], user: User | null): Promise { + try { + if (!queryPermissions.DELETE_PARCEL_TO_PARAMETER_DICTIONARIES(user)) { + throwPermissionError('delete parcel to parameter dictionaries'); + } + + const data = await reqHasura<{ affected_rows: number }>( + gql.DELETE_PARCEL_TO_PARAMETER_DICTIONARIES, + { ids }, + user, + ); + const { delete_parcel_to_parameter_dictionary } = data; + if (delete_parcel_to_parameter_dictionary != null) { + const { affected_rows } = delete_parcel_to_parameter_dictionary; + + if (affected_rows !== ids.length) { + throw Error('Some parcel to parameter dictionaries were not successfully deleted'); + } + + showSuccessToast('Parcel to parameter dictionaries updated Successfully'); + return affected_rows; + } else { + throw Error('Unable to delete parcel to parameter dictionaries'); + } + } catch (e) { + catchError('Delete parcel to parameter dictionaries failed', e as Error); + showFailureToast('Delete parcel to parameter dictionaries failed'); + return null; + } + }, + async deletePlan(plan: PlanSlim, user: User | null): Promise { try { if (!queryPermissions.DELETE_PLAN(user, plan)) { @@ -2233,6 +2424,33 @@ const effects = { } }, + async deleteSequenceAdaptation(id: number, user: User | null): Promise { + try { + if (!queryPermissions.DELETE_SEQUENCE_ADAPTATION(user)) { + throwPermissionError('delete this sequence adaptation'); + } + + const { confirm } = await showConfirmModal( + 'Delete', + `Are you sure you want to delete the sequence adaptation with ID: "${id}"?`, + 'Delete Sequence Adaptation', + ); + + if (confirm) { + const data = await reqHasura<{ id: number }>(gql.DELETE_SEQUENCE_ADAPTATION, { id }, user); + if (data.deleteSequenceAdaptation === null) { + throw Error(`Unable to delete sequence adaptation with ID: "${id}"`); + } + + showSuccessToast('Sequence Adaptation Deleted Successfully'); + sequenceAdaptations.filterValueById(id); + } + } catch (e) { + catchError('Sequence Adaptation Delete Failed', e as Error); + showFailureToast('Sequence Adaptation Delete Failed'); + } + }, + async deleteSimulationTemplate( simulationTemplate: SimulationTemplate, modelName: string, @@ -2794,31 +3012,100 @@ const effects = { } }, + async getParcel(id: number, user: User | null): Promise { + try { + const data = await reqHasura(gql.GET_PARCEL, { id }, user); + const { parcel } = data; + return parcel; + } catch (e) { + catchError(e as Error); + return null; + } + }, + + async getParsedAmpcsChannelDictionary( + channelDictionaryId: number | null | undefined, + user: User | null, + ): Promise { + if (typeof channelDictionaryId !== 'number') { + return null; + } + + try { + const data = await reqHasura<[{ parsed_json: AmpcsChannelDictionary }]>( + gql.GET_PARSED_CHANNEL_DICTIONARY, + { channelDictionaryId }, + user, + ); + const { channel_dictionary } = data; + + if (!Array.isArray(channel_dictionary) || !channel_dictionary.length) { + catchError(`Unable to find channel dictionary with id ${channelDictionaryId}`); + return null; + } else { + const [{ parsed_json }] = channel_dictionary; + return parsed_json; + } + } catch (e) { + catchError(e as Error); + return null; + } + }, + async getParsedAmpcsCommandDictionary( commandDictionaryId: number | null | undefined, user: User | null, ): Promise { - if (commandDictionaryId !== null && commandDictionaryId !== undefined) { - try { - const data = await reqHasura<[{ parsed_json: AmpcsCommandDictionary }]>( - gql.GET_PARSED_COMMAND_DICTIONARY, - { commandDictionaryId }, - user, - ); - const { command_dictionary } = data; + if (typeof commandDictionaryId !== 'number') { + return null; + } - if (!Array.isArray(command_dictionary) || !command_dictionary.length) { - catchError(`Unable to find command dictionary with id ${commandDictionaryId}`); - return null; - } else { - const [{ parsed_json }] = command_dictionary; - return parsed_json; - } - } catch (e) { - catchError(e as Error); + try { + const data = await reqHasura<[{ parsed_json: AmpcsCommandDictionary }]>( + gql.GET_PARSED_COMMAND_DICTIONARY, + { commandDictionaryId }, + user, + ); + const { command_dictionary } = data; + + if (!Array.isArray(command_dictionary) || !command_dictionary.length) { + catchError(`Unable to find command dictionary with id ${commandDictionaryId}`); return null; + } else { + const [{ parsed_json }] = command_dictionary; + return parsed_json; } - } else { + } catch (e) { + catchError(e as Error); + return null; + } + }, + + async getParsedAmpcsParameterDictionary( + parameterDictionaryId: number | null | undefined, + user: User | null, + ): Promise { + if (typeof parameterDictionaryId !== 'number') { + return null; + } + + try { + const data = await reqHasura<[{ parsed_json: AmpcsParameterDictionary }]>( + gql.GET_PARSED_PARAMETER_DICTIONARY, + { parameterDictionaryId }, + user, + ); + const { parameter_dictionary } = data; + + if (!Array.isArray(parameter_dictionary) || !parameter_dictionary.length) { + catchError(`Unable to find parameter dictionary with id ${parameterDictionaryId}`); + return null; + } else { + const [{ parsed_json }] = parameter_dictionary; + return parsed_json; + } + } catch (e) { + catchError(e as Error); return null; } }, @@ -3180,6 +3467,25 @@ const effects = { } }, + async getSequenceAdaptation(sequence_adaptation_id: number, user: User | null): Promise { + try { + const data = await reqHasura<[sequence_adaptation: SequenceAdaptation]>( + gql.GET_SEQUENCE_ADAPTATION, + { sequence_adaptation_id }, + user, + ); + const { sequence_adaptation } = data; + + if (sequence_adaptation && sequence_adaptation.length > 0) { + return sequence_adaptation[0]; + } + } catch (e) { + catchError(e as Error); + } + + return null; + }, + async getSpans(datasetId: number, user: User | null, signal: AbortSignal | undefined = undefined): Promise { try { const data = await reqHasura(gql.GET_SPANS, { datasetId }, user, signal); @@ -4301,6 +4607,33 @@ const effects = { } catch (e) { catchError('Model Update Failed', e as Error); showFailureToast('Model Update Failed'); + } + return null; + }, + + async updateParcel( + id: number, + parcel: Partial, + parcelOwner: UserId, + user: User | null, + ): Promise { + try { + if (!queryPermissions.UPDATE_PARCEL(user, { owner: parcelOwner })) { + throwPermissionError('update this parcel'); + } + + const data = await reqHasura>(gql.UPDATE_PARCEL, { id, parcel }, user); + const { updateParcel } = data; + + if (updateParcel === null) { + throw Error(`Unable to update parcel with ID: "${id}"`); + } + + showSuccessToast('Parcel Updated Successfully'); + return ''; + } catch (e) { + catchError('Parcel Update Failed', e as Error); + showFailureToast('Parcel Update Failed'); return null; } }, @@ -4856,6 +5189,65 @@ const effects = { } }, + async uploadDictionary( + dictionary: string, + type: 'COMMAND' | 'CHANNEL' | 'PARAMETER', + user: User | null, + ): Promise { + try { + if (!queryPermissions.CREATE_DICTIONARY(user)) { + throwPermissionError('upload a command dictionary'); + } + + const data = await reqHasura( + gql.CREATE_DICTIONARY, + { dictionary, type }, + user, + ); + + const { createDictionary: newDictionary } = data; + if (newDictionary === null) { + throw Error('Unable to upload command dictionary'); + } + + return newDictionary; + } catch (e) { + catchError('Command Dictionary Upload Failed', e as Error); + return null; + } + }, + + async uploadDictionaryOrAdaptation( + files: FileList, + user: User | null, + ): Promise { + const file: File = files[0]; + const text = await file.text(); + const splitLineDictionary = text.split('\n'); + + let type: 'COMMAND' | 'CHANNEL' | 'PARAMETER' = 'COMMAND'; + switch (splitLineDictionary[1]) { + case `<${DictionaryHeaders.command_dictionary}>`: { + type = 'COMMAND'; + break; + } + case `<${DictionaryHeaders.telemetry_dictionary}>`: { + type = 'CHANNEL'; + break; + } + case `<${DictionaryHeaders.param_def}>`: { + type = 'PARAMETER'; + break; + } + default: { + const adaptation = await this.createCustomAdaptation({ adaptation: text }, user); + return adaptation; + } + } + const dictionary = await this.uploadDictionary(text, type, user); + return dictionary; + }, + async uploadFile(file: File, user: User | null): Promise { try { const body = new FormData(); diff --git a/src/utilities/gql.ts b/src/utilities/gql.ts index d7602e149e..e1d32f157c 100644 --- a/src/utilities/gql.ts +++ b/src/utilities/gql.ts @@ -9,6 +9,7 @@ export enum Queries { APPLY_PRESET_TO_ACTIVITY = 'apply_preset_to_activity', BEGIN_MERGE = 'begin_merge', CANCEL_MERGE = 'cancel_merge', + CHANNEL_DICTIONARIES = 'channel_dictionary', COMMAND_DICTIONARIES = 'command_dictionary', COMMIT_MERGE = 'commit_merge', CONSTRAINTS_DSL_TYPESCRIPT = 'constraintsDslTypescript', @@ -26,6 +27,7 @@ export enum Queries { DELETE_ACTIVITY_PRESET = 'delete_activity_presets_by_pk', DELETE_ACTIVITY_REANCHOR_PLAN_START_BULK = 'delete_activity_by_pk_reanchor_plan_start_bulk', DELETE_ACTIVITY_REANCHOR_TO_ANCHOR_BULK = 'delete_activity_by_pk_reanchor_to_anchor_bulk', + DELETE_CHANNEL_DICTIONARY = 'delete_channel_dictionary_by_pk', DELETE_COMMAND_DICTIONARY = 'delete_command_dictionary_by_pk', DELETE_CONSTRAINT_DEFINITION_TAGS = 'delete_constraint_definition_tags', DELETE_CONSTRAINT_METADATA = 'delete_constraint_metadata_by_pk', @@ -36,6 +38,9 @@ export enum Queries { DELETE_EXPANSION_RULE_TAGS = 'delete_expansion_rule_tags', DELETE_EXPANSION_SET = 'delete_expansion_set_by_pk', DELETE_MISSION_MODEL = 'delete_mission_model_by_pk', + DELETE_PARAMETER_DICTIONARY = 'delete_parameter_dictionary_by_pk', + DELETE_PARCEL = 'delete_parcel_by_pk', + DELETE_PARCEL_TO_PARAMETER_DICTIONARY = 'delete_parcel_to_parameter_dictionary', DELETE_PLAN = 'delete_plan_by_pk', DELETE_PLAN_COLLABORATOR = 'delete_plan_collaborators_by_pk', DELETE_PLAN_SNAPSHOT = 'delete_plan_snapshot_by_pk', @@ -53,6 +58,7 @@ export enum Queries { DELETE_SCHEDULING_SPECIFICATION_CONDITIONS = 'delete_scheduling_specification_conditions', DELETE_SCHEDULING_SPECIFICATION_GOALS = 'delete_scheduling_specification_goals', DELETE_SEQUENCE = 'delete_sequence_by_pk', + DELETE_SEQUENCE_ADAPTATION = 'delete_sequence_adaptation_by_pk', DELETE_SEQUENCE_TO_SIMULATED_ACTIVITY = 'delete_sequence_to_simulated_activity_by_pk', DELETE_SIMULATION_TEMPLATE = 'delete_simulation_template_by_pk', DELETE_TAG = 'delete_tags_by_pk', @@ -81,6 +87,8 @@ export enum Queries { INSERT_ACTIVITY_DIRECTIVE = 'insert_activity_directive_one', INSERT_ACTIVITY_DIRECTIVE_TAGS = 'insert_activity_directive_tags', INSERT_ACTIVITY_PRESET = 'insert_activity_presets_one', + INSERT_CHANNEL_DICTIONARY = 'insert_channel_dictionary_one', + INSERT_DICTIONARY = 'insert_dictionary_one', INSERT_CONSTRAINT_DEFINITION = 'insert_constraint_definition_one', INSERT_CONSTRAINT_DEFINITION_TAGS = 'insert_constraint_definition_tags', INSERT_CONSTRAINT_METADATA = 'insert_constraint_metadata_one', @@ -91,6 +99,9 @@ export enum Queries { INSERT_EXPANSION_RULE = 'insert_expansion_rule_one', INSERT_EXPANSION_RULE_TAGS = 'insert_expansion_rule_tags', INSERT_MISSION_MODEL = 'insert_mission_model_one', + INSERT_PARAMETER_DICTIONARY = 'insert_parameter_dictionary_one', + INSERT_PARCEL = 'insert_parcel_one', + INSERT_PARCEL_TO_PARAMETER_DICTIONARY = 'insert_parcel_to_parameter_dictionary', INSERT_PLAN = 'insert_plan_one', INSERT_PLAN_SNAPSHOT_TAGS = 'insert_plan_snapshot_tags', INSERT_PLAN_COLLABORATORS = 'insert_plan_collaborators', @@ -111,6 +122,7 @@ export enum Queries { INSERT_SCHEDULING_SPECIFICATION_GOAL = 'insert_scheduling_specification_goals_one', INSERT_SCHEDULING_SPECIFICATION_GOALS = 'insert_scheduling_specification_goals', INSERT_SEQUENCE = 'insert_sequence_one', + INSERT_SEQUENCE_ADAPTATION = 'insert_sequence_adaptation_one', INSERT_SEQUENCE_TO_SIMULATED_ACTIVITY = 'insert_sequence_to_simulated_activity_one', INSERT_SIMULATION_TEMPLATE = 'insert_simulation_template_one', INSERT_TAG = 'insert_tags_one', @@ -121,6 +133,10 @@ export enum Queries { MERGE_REQUESTS = 'merge_request', MISSION_MODEL = 'mission_model_by_pk', MISSION_MODELS = 'mission_model', + PARAMETER_DICTIONARIES = 'parameter_dictionary', + PARCEL = 'parcel_by_pk', + PARCELS = 'parcel', + PARCEL_TO_PARAMETER_DICTIONARY = 'parcel_to_parameter_dictionary', PLAN = 'plan_by_pk', PLANS = 'plan', PLAN_DATASETS = 'plan_dataset', @@ -141,6 +157,7 @@ export enum Queries { SCHEDULING_SPECIFICATION_CONDITIONS = 'scheduling_specification_conditions', SCHEDULING_SPECIFICATION_GOALS = 'scheduling_specification_goals', SEQUENCE = 'sequence', + SEQUENCE_ADAPTATION = 'sequence_adaptation', SEQUENCE_TO_SIMULATED_ACTIVITY = 'sequence_to_simulated_activity_by_pk', SET_RESOLUTION = 'set_resolution', SET_RESOLUTIONS = 'set_resolution_bulk', @@ -159,6 +176,7 @@ export enum Queries { UPDATE_CONSTRAINT_MODEL_SPECIFICATION = 'update_constraint_model_specification_by_pk', UPDATE_EXPANSION_RULE = 'update_expansion_rule_by_pk', UPDATE_MISSION_MODEL = 'update_mission_model_by_pk', + UPDATE_PARCEL = 'update_parcel_by_pk', UPDATE_PLAN_SNAPSHOT = 'update_plan_snapshot_by_pk', UPDATE_SCHEDULING_CONDITION_METADATA = 'update_scheduling_condition_metadata_by_pk', UPDATE_SCHEDULING_GOAL_METADATA = 'update_scheduling_goal_metadata_by_pk', @@ -309,13 +327,13 @@ const gql = { } `, - CREATE_COMMAND_DICTIONARY: `#graphql - mutation CreateCommandDictionary($dictionary: String!) { - createCommandDictionary: ${Queries.UPLOAD_DICTIONARY}(dictionary: $dictionary) { - command_types_typescript_path + CREATE_CHANNEL_DICTIONARY: `#graphql + mutation CreateChannelDictionary($channelDictionary: channel_dictionary_insert_input!) { + createChannelDictionary: ${Queries.INSERT_CHANNEL_DICTIONARY}(object: $channelDictionary) { created_at id mission + parsed_json version } } @@ -363,6 +381,20 @@ const gql = { } `, + CREATE_DICTIONARY: `#graphql + mutation CreateDictionary($dictionary: String!, $type: String!) { + createDictionary: ${Queries.UPLOAD_DICTIONARY}(dictionary: $dictionary, type : $type) { + dictionary_path + created_at + id + mission + parsed_json + version + type + } + } + `, + CREATE_EXPANSION_RULE: `#graphql mutation CreateExpansionRule($rule: expansion_rule_insert_input!) { createExpansionRule: ${Queries.INSERT_EXPANSION_RULE}(object: $rule) { @@ -391,13 +423,13 @@ const gql = { `, CREATE_EXPANSION_SET: `#graphql - mutation CreateExpansionSet($dictionaryId: Int!, $modelId: Int!, $expansionRuleIds: [Int!]!, $name: String, $description: String) { + mutation CreateExpansionSet($parcelId: Int!, $modelId: Int!, $expansionRuleIds: [Int!]!, $name: String, $description: String) { ${Queries.CREATE_EXPANSION_SET}( - commandDictionaryId: $dictionaryId, missionModelId: $modelId, expansionIds: $expansionRuleIds, name: $name, description: $description + parcelId : $parcelId ) { id } @@ -414,6 +446,38 @@ const gql = { } `, + CREATE_PARAMETER_DICTIONARY: `#graphql + mutation CreateParameterDictionary($parameterDictionary: parameter_dictionary_insert_input!) { + createParameterDictionary: ${Queries.INSERT_PARAMETER_DICTIONARY}(object: $parameterDictionary) { + created_at + id + mission + parsed_json + version + } + } + `, + + CREATE_PARCEL: `#graphql + mutation CreateParcel($parcel: parcel_insert_input!) { + createParcel: ${Queries.INSERT_PARCEL}(object: $parcel) { + id + } + } + `, + + CREATE_PARCEL_TO_PARAMETER_DICTIONARIES: `#graphql + mutation CreateParcelToParameterDictionaries($parcelToParameterDictionaries : [parcel_to_parameter_dictionary_insert_input!]!) { + ${Queries.INSERT_PARCEL_TO_PARAMETER_DICTIONARY}(objects: $parcelToParameterDictionaries) { + affected_rows + returning { + parcel_id + parameter_dictionary_id + } + } + } + `, + CREATE_PLAN: `#graphql mutation CreatePlan($plan: plan_insert_input!) { createPlan: ${Queries.INSERT_PLAN}(object: $plan) { @@ -571,6 +635,15 @@ const gql = { } `, + CREATE_SEQUENCE_ADAPTATION: `#graphql + mutation CreateCustomAdaptation($adaptation: sequence_adaptation_insert_input!) { + createSequenceAdaptation: ${Queries.INSERT_SEQUENCE_ADAPTATION}(object: $adaptation) { + adaptation + created_at + } + } + `, + CREATE_SIMULATION_TEMPLATE: `#graphql mutation CreateSimulationTemplate($simulationTemplateInsertInput: simulation_template_insert_input!) { ${Queries.INSERT_SIMULATION_TEMPLATE}(object: $simulationTemplateInsertInput) { @@ -687,6 +760,14 @@ const gql = { } `, + DELETE_CHANNEL_DICTIONARY: `#graphql + mutation DeleteChannelDictionary($id: Int!) { + deleteChannelDictionary: ${Queries.DELETE_CHANNEL_DICTIONARY}(id: $id) { + id + } + } + `, + DELETE_COMMAND_DICTIONARY: `#graphql mutation DeleteCommandDictionary($id: Int!) { deleteCommandDictionary: ${Queries.DELETE_COMMAND_DICTIONARY}(id: $id) { @@ -799,6 +880,30 @@ const gql = { } `, + DELETE_PARAMETER_DICTIONARY: `#graphql + mutation DeleteParameterDictionary($id: Int!) { + deleteParameterDictionary: ${Queries.DELETE_PARAMETER_DICTIONARY}(id: $id) { + id + } + } + `, + + DELETE_PARCEL: `#graphql + mutation DeleteParcel($id: Int!) { + deleteParcel: ${Queries.DELETE_PARCEL}(id: $id) { + id + } + } + `, + + DELETE_PARCEL_TO_PARAMETER_DICTIONARIES: `#graphql + mutation deleteParcelToParameterDictionaries($ids: [Int!]!) { + ${Queries.DELETE_PARCEL_TO_PARAMETER_DICTIONARY}(where: { id: { _in: $ids } }) { + affected_rows + } + } + `, + DELETE_PLAN: `#graphql mutation DeletePlan($id: Int!) { deletePlan: ${Queries.DELETE_PLAN}(id: $id) { @@ -918,6 +1023,14 @@ const gql = { affected_rows } } +`, + + DELETE_SEQUENCE_ADAPTATION: `#graphql + mutation DeleteSequenceAdaptation($id: Int!) { + deleteSequenceAdaptation: ${Queries.DELETE_SEQUENCE_ADAPTATION}(id: $id) { + id + } + } `, DELETE_SIMULATION_TEMPLATE: `#graphql @@ -1002,11 +1115,11 @@ const gql = { activity_types: ${Queries.ACTIVITY_TYPES}(where: { model_id: { _eq: $modelId } }) { expansion_rules { activity_type - authoring_command_dict_id authoring_mission_model_id created_at expansion_logic id + parcel_id updated_at } name @@ -1062,7 +1175,6 @@ const gql = { query GetExpansionRule($id: Int!) { expansionRule: ${Queries.EXPANSION_RULE}(id: $id) { activity_type - authoring_command_dict_id authoring_mission_model_id created_at description @@ -1070,6 +1182,7 @@ const gql = { id name owner + parcel_id updated_at updated_by tags { @@ -1088,13 +1201,12 @@ const gql = { expansionRuns: ${Queries.EXPANSION_RUNS}(order_by: { id: desc }) { created_at expansion_set { - command_dict_id created_at id name + parcel_id } expanded_sequences { - edsl_string expanded_sequence id seq_id @@ -1182,6 +1294,29 @@ const gql = { } `, + GET_PARCEL: `#graphql + query GetParcel($id: Int!) { + parcel: ${Queries.PARCEL}(id: $id) { + channel_dictionary_id + command_dictionary_id + created_at + id + name + owner + sequence_adaptation_id + updated_at + } + } + `, + + GET_PARSED_CHANNEL_DICTIONARY: `#graphql + query GetParsedChannelDictionary($channelDictionaryId: Int!) { + ${Queries.CHANNEL_DICTIONARIES}(where: { id: { _eq: $channelDictionaryId } }) { + parsed_json + } + } + `, + GET_PARSED_COMMAND_DICTIONARY: `#graphql query GetParsedCommandDictionary($commandDictionaryId: Int!) { ${Queries.COMMAND_DICTIONARIES}(where: { id: { _eq: $commandDictionaryId } }) { @@ -1190,6 +1325,14 @@ const gql = { } `, + GET_PARSED_PARAMETER_DICTIONARY: `#graphql + query GetParsedParameterDictionary($parameterDictionaryId: Int!) { + ${Queries.PARAMETER_DICTIONARIES}(where: { id: { _eq: $parameterDictionaryId } }) { + parsed_json + } + } + `, + GET_PERMISSIBLE_QUERIES: `#graphql query GetPermissibleQueries { queries: __schema { @@ -1425,6 +1568,14 @@ const gql = { } `, + GET_SEQUENCE_ADAPTATION: `#graphql + query GetSequenceAdaptation($sequence_adaptation_id: Int!) { + ${Queries.SEQUENCE_ADAPTATION}(where: { id: { _eq: $sequence_adaptation_id }}) { + adaptation + } + } + `, + GET_SIMULATION_DATASET_ID: `#graphql query GetSimulationDatasetId($datasetId: Int!) { ${Queries.SIMULATION_DATASETS}(where: {dataset_id: {_eq: $datasetId}}) { @@ -1510,12 +1661,12 @@ const gql = { GET_USER_SEQUENCE: `#graphql query GetUserSequence($id: Int!) { userSequence: ${Queries.USER_SEQUENCE}(id: $id) { - authoring_command_dict_id created_at definition id name owner + parcel_id updated_at } } @@ -1791,10 +1942,22 @@ const gql = { } `, + SUB_CHANNEL_DICTIONARIES: `#graphql + subscription SubChannelDictionaries { + ${Queries.CHANNEL_DICTIONARIES}(order_by: { id: desc }) { + created_at + id + mission + version + created_at + updated_at + } + } + `, + SUB_COMMAND_DICTIONARIES: `#graphql subscription SubCommandDictionaries { ${Queries.COMMAND_DICTIONARIES}(order_by: { id: desc }) { - command_types_typescript_path created_at id mission @@ -1918,7 +2081,6 @@ const gql = { subscription SubExpansionRules { expansionRules: ${Queries.EXPANSION_RULES}(order_by: { id: desc }) { activity_type - authoring_command_dict_id authoring_mission_model_id created_at description @@ -1926,6 +2088,7 @@ const gql = { id name owner + parcel_id updated_at updated_by tags { @@ -1958,21 +2121,21 @@ const gql = { SUB_EXPANSION_SETS: `#graphql subscription SubExpansionSets { expansionSets: ${Queries.EXPANSION_SETS}(order_by: { id: desc }) { - command_dict_id created_at description expansion_rules { activity_type - authoring_command_dict_id authoring_mission_model_id expansion_logic id owner + parcel_id } id mission_model_id name owner + parcel_id updated_at updated_by } @@ -2057,6 +2220,41 @@ const gql = { } `, + SUB_PARAMETER_DICTIONARIES: `#graphql + subscription SubParameterDictionaries { + ${Queries.PARAMETER_DICTIONARIES}(order_by: { id: desc }) { + created_at + id + mission + updated_at + version + } + } + `, + + SUB_PARCELS: `#graphql + subscription SubParcels { + ${Queries.PARCELS}(order_by: { id: desc }) { + channel_dictionary_id + command_dictionary_id + created_at + id + name + sequence_adaptation_id + updated_at + } + } + `, + + SUB_PARCEL_TO_PARAMETER_DICTIONARIES: `#graphql + subscription SubParcelsToParameterDictionaries($parcelId: Int!) { + ${Queries.PARCEL_TO_PARAMETER_DICTIONARY}(where: {parcel_id: {_eq: $parcelId }}) { + parameter_dictionary_id + parcel_id + } + } + `, + SUB_PLANS_USER_WRITABLE: `#graphql subscription SubPlansUserWritable($userId: String!) { ${Queries.PLANS}(where: {_or: [{owner: {_eq: $userId}}, {collaborators: {collaborator: {_eq: $userId}}}]}, order_by: {id: desc}) { @@ -2540,6 +2738,17 @@ const gql = { } `, + SUB_SEQUENCE_ADAPTATIONS: `#graphql + subscription SubSequenceAdaptations { + ${Queries.SEQUENCE_ADAPTATION}(order_by: { id: desc }) { + adaptation + created_at + id + updated_by + } + } + `, + SUB_SIMULATION: `#graphql subscription SubSimulation($planId: Int!) { ${Queries.SIMULATIONS}(where: { plan_id: { _eq: $planId } }, order_by: { id: desc }, limit: 1) { @@ -2670,12 +2879,12 @@ const gql = { SUB_USER_SEQUENCES: `#graphql subscription SubUserSequences { ${Queries.USER_SEQUENCES}(order_by: { id: desc }) { - authoring_command_dict_id created_at definition id name owner + parcel_id updated_at } } @@ -2876,6 +3085,16 @@ const gql = { } `, + UPDATE_PARCEL: `#graphql + mutation UpdateParcel($id: Int!, $parcel: parcel_set_input!) { + updateParcel: ${Queries.UPDATE_PARCEL}( + pk_columns: { id: $id }, _set: $parcel + ) { + id + } + } + `, + UPDATE_PLAN_SNAPSHOT: `#graphql mutation UpdatePlanSnapshot($snapshot_id: Int!, $planSnapshot: plan_snapshot_set_input!) { updatePlanSnapshot: ${Queries.UPDATE_PLAN_SNAPSHOT}( diff --git a/src/utilities/new-sequence-editor/command-dictionary.ts b/src/utilities/new-sequence-editor/command-dictionary.ts new file mode 100644 index 0000000000..ab9bcc4d51 --- /dev/null +++ b/src/utilities/new-sequence-editor/command-dictionary.ts @@ -0,0 +1,168 @@ +import { + parse, + type CommandDictionary, + type EnumMap, + type FswCommandArgument, + type FswCommandArgumentBoolean, + type FswCommandArgumentEnum, + type FswCommandArgumentFloat, + type FswCommandArgumentInteger, + type FswCommandArgumentNumeric, + type FswCommandArgumentRepeat, + type FswCommandArgumentUnsigned, + type FswCommandArgumentVarString, +} from '@nasa-jpl/aerie-ampcs'; +import { logError } from './logger'; + +/** + * Return a default argument for a given argument definition. + */ +export function fswCommandArgDefault(fswCommandArg: FswCommandArgument, enumMap: EnumMap): string { + const { arg_type } = fswCommandArg; + + if (arg_type === 'boolean') { + const booleanArg = fswCommandArg as FswCommandArgumentBoolean; + const { default_value } = booleanArg; + + if (default_value !== null) { + return default_value.toLowerCase(); + } else { + return 'false'; + } + } else if (arg_type === 'enum') { + const enumArg = fswCommandArg as FswCommandArgumentEnum; + const enumSymbolValue = + enumMap[enumArg.enum_name]?.values[0]?.symbol ?? fswCommandArg.default_value ?? fswCommandArg.name; + return `"${enumSymbolValue}"` ?? 'UNKNOWN_ENUM'; + } else if (arg_type === 'fill') { + return '""'; + } else if (arg_type === 'fixed_string') { + return '""'; + } else if (arg_type === 'float') { + const floatArg = fswCommandArg as FswCommandArgumentFloat; + const { default_value, range } = floatArg; + + if (default_value !== null) { + return `${default_value}`; + } else if (range !== null) { + const { min } = range; + return `${min}`; + } else { + return '0.0'; + } + } else if (arg_type === 'integer') { + const intArg = fswCommandArg as FswCommandArgumentInteger; + const { default_value, range } = intArg; + + if (default_value !== null) { + return `${default_value}`; + } else if (range !== null) { + const { min } = range; + return `${min}`; + } else { + return '0'; + } + } else if (arg_type === 'numeric') { + const numericArg = fswCommandArg as FswCommandArgumentNumeric; + const { default_value, range } = numericArg; + + if (default_value !== null) { + return `${default_value}`; + } else if (range !== null) { + const { min } = range; + return `${min}`; + } else { + return '0.0'; + } + } else if (arg_type === 'repeat') { + const repeatArg = fswCommandArg as FswCommandArgumentRepeat; + const { repeat } = repeatArg; + + let defaultRepeatArg = '['; + let totalRepeatedArgs = 0; + + if (repeat) { + const { min } = repeat; + + do { + let repeatedArg = ''; + + for (let i = 0; i < repeat.arguments.length; ++i) { + const arg = repeat.arguments[i]; + const argValue = fswCommandArgDefault(arg, enumMap); + repeatedArg += `${argValue}`; + + if (i !== repeat.arguments.length - 1) { + repeatedArg += ' '; + } + } + + defaultRepeatArg += repeatedArg; + ++totalRepeatedArgs; + + // If we are going to add another repeated arg, make sure to add a comma. + if (min !== null && totalRepeatedArgs < min) { + defaultRepeatArg += ' '; + } + } while (min !== null && totalRepeatedArgs < min); + } + + defaultRepeatArg += ']'; + + return defaultRepeatArg; + } else if (arg_type === 'time') { + return '0'; + } else if (arg_type === 'unsigned') { + const numericArg = fswCommandArg as FswCommandArgumentUnsigned; + const { default_value, range } = numericArg; + + if (default_value !== null) { + return `${default_value}`; + } else if (range !== null) { + const { min } = range; + return `${min}`; + } else { + return '0'; + } + } else if (arg_type === 'var_string') { + const varStringArg = fswCommandArg as FswCommandArgumentVarString; + const { default_value } = varStringArg; + + if (default_value !== null) { + return default_value; + } else { + return '""'; + } + } else { + return ''; + } +} + +/** + * Return a parsed command dictionary from a file. + */ +export async function parseCommandDictionaryFromFile( + files: FileList | null | undefined, +): Promise { + if (files) { + const file = files.item(0); + + if (file) { + try { + const fileText = await file.text(); + const commandDictionary = parse(fileText); + return commandDictionary; + } catch (e) { + const errorMessage = (e as Error).message; + logError(errorMessage); + return null; + } + } else { + logError('No file provided'); + return null; + } + } else { + logError('No file provided'); + return null; + } +} diff --git a/src/utilities/new-sequence-editor/extension-points.ts b/src/utilities/new-sequence-editor/extension-points.ts new file mode 100644 index 0000000000..f1ba233e3a --- /dev/null +++ b/src/utilities/new-sequence-editor/extension-points.ts @@ -0,0 +1,49 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { type ChannelDictionary, type FswCommandArgument, type ParameterDictionary } from '@nasa-jpl/aerie-ampcs'; +import type { SeqJson } from '@nasa-jpl/seq-json-schema/types'; + +// TODO: serialization +// replace parameter names with hex ids +// replace enum parameter values with the numeric value + +// + +export type ArgDelegator = { + [stem: string]: { + [arg: string]: + | undefined + | (( + argDef: FswCommandArgument, + paramDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, + precedingArgValues: string[], + ) => FswCommandArgument | undefined); + }; +}; + +export function getCustomArgDef( + stem: string, + dictArg: FswCommandArgument, + precedingArgs: string[], + parameterDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, +) { + const delegate = globalThis.ARG_DELEGATOR?.[stem]?.[dictArg.name]; + return delegate?.(dictArg, parameterDictionaries, channelDictionary, precedingArgs) ?? dictArg; +} + +export function customizeSeqJson( + seqJson: SeqJson, + parameterDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, +) { + return globalThis.TO_SEQ_JSON?.(seqJson, parameterDictionaries, channelDictionary) ?? seqJson; +} + +export function customizeSeqJsonParsing( + seqJson: SeqJson, + parameterDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, +) { + return globalThis.FROM_SEQ_JSON?.(seqJson, parameterDictionaries, channelDictionary) ?? seqJson; +} diff --git a/src/utilities/new-sequence-editor/from-seq-json.test.ts b/src/utilities/new-sequence-editor/from-seq-json.test.ts new file mode 100644 index 0000000000..da2e68facd --- /dev/null +++ b/src/utilities/new-sequence-editor/from-seq-json.test.ts @@ -0,0 +1,479 @@ +import type { SeqJson } from '@nasa-jpl/seq-json-schema/types'; +import { describe, expect, it } from 'vitest'; +import { seqJsonToSequence } from './from-seq-json'; + +describe('from-seq-json.ts', () => { + it('converts a seq json id and metadata to sequence', () => { + const seqJson: SeqJson = { + id: 'test', + metadata: { + onboard_name: 'test.mod', + onboard_path: '/eng', + other_arbitrary_metadata: 'test_metadata', + }, + }; + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "test" +@METADATA "onboard_name" "test.mod" +@METADATA "onboard_path" "/eng" +@METADATA "other_arbitrary_metadata" "test_metadata" +`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json LGO to sequence', () => { + const seqJson: SeqJson = { + id: 'test', + metadata: { + lgo: true, + onboard_name: 'test.mod', + onboard_path: '/eng', + other_arbitrary_metadata: 'test_metadata', + }, + steps: [ + { + args: [], + stem: 'FSW_CMD_3', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + ], + }; + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "test" +@METADATA "onboard_name" "test.mod" +@METADATA "onboard_path" "/eng" +@METADATA "other_arbitrary_metadata" "test_metadata" + +@LOAD_AND_GO +C FSW_CMD_3 +`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json variables to sequence', () => { + const seqJson: SeqJson = { + id: 'testVariable', + + locals: [ + { + name: 'L00INT', + type: 'INT', + }, + { + name: 'L01STR', + type: 'STRING', + }, + { + name: 'L02FLT', + type: 'FLOAT', + }, + { + name: 'L03UINT', + type: 'UINT', + }, + { + name: 'L01ENUM', + type: 'ENUM', + }, + ], + metadata: { + lgo: false, + }, + parameters: [ + { + name: 'L00INT', + type: 'INT', + }, + { + name: 'L01STR', + type: 'STRING', + }, + { + name: 'L02FLT', + type: 'FLOAT', + }, + { + name: 'L03UINT', + type: 'UINT', + }, + { + name: 'L01ENUM', + type: 'ENUM', + }, + ], + }; + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "testVariable" +@INPUT_PARAMS L00INT L01STR L02FLT L03UINT L01ENUM +@LOCALS L00INT L01STR L02FLT L03UINT L01ENUM +`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json file to a correct sequence', () => { + const seqJson: SeqJson = { + id: '42', + metadata: {}, + steps: [ + { + args: [ + { + type: 'boolean', + value: true, + }, + { + type: 'hex', + value: '0xFF', + }, + { + type: 'string', + value: 'Hello', + }, + { + type: 'symbol', + value: 'World', + }, + { + type: 'repeat', + value: [ + [ + { + type: 'boolean', + value: false, + }, + { + type: 'hex', + value: '0xAA', + }, + { + type: 'string', + value: 'Foo', + }, + { + type: 'symbol', + value: 'BAR', + }, + ], + [ + { + type: 'boolean', + value: true, + }, + { + type: 'hex', + value: '0xBB', + }, + { + type: 'string', + value: 'Baz', + }, + { + type: 'symbol', + value: 'BAT', + }, + ], + ], + }, + ], + stem: 'FSW_CMD_0', + time: { + tag: '2024-001T00:00:00', + type: 'ABSOLUTE', + }, + type: 'command', + }, + { + args: [{ type: 'number', value: 22 }], + stem: 'FSW_CMD_1', + time: { + tag: '00:01:00', + type: 'COMMAND_RELATIVE', + }, + type: 'command', + }, + { + args: [{ type: 'string', value: 'Fab' }], + stem: 'FSW_CMD_2', + time: { + tag: '15:00:00', + type: 'EPOCH_RELATIVE', + }, + type: 'command', + }, + { + args: [], + stem: 'FSW_CMD_3', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + ], + }; + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "42" + +A2024-001T00:00:00 FSW_CMD_0 TRUE 0xFF "Hello" "World" [FALSE 0xAA "Foo" "BAR" TRUE 0xBB "Baz" "BAT"] +R00:01:00 FSW_CMD_1 22 +E15:00:00 FSW_CMD_2 "Fab" +C FSW_CMD_3 +`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json command model and metadata to sequence', () => { + const seqJson: SeqJson = { + id: 'testCommandModeling', + metadata: { + onboard_name: 'test.mod', + }, + steps: [ + { + args: [ + { + type: 'string', + value: 'test', + }, + ], + metadata: { + Key1: 'Value1', + Key2: 'Value2', + }, + models: [ + { + offset: '00:00:00', + value: 0, + variable: 'temp', + }, + { + offset: '00:00:01', + value: true, + variable: 'temp1', + }, + { + offset: '00:00:02', + value: false, + variable: 'temp2', + }, + { + offset: '00:00:03', + value: 'NULL', + variable: 'temp4', + }, + ], + stem: 'ECHO', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + ], + }; + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "testCommandModeling" +@METADATA "onboard_name" "test.mod" + +C ECHO "test" +@METADATA "Key1" "Value1" +@METADATA "Key2" "Value2" +@MODEL "temp" 0 "00:00:00" +@MODEL "temp1" TRUE "00:00:01" +@MODEL "temp2" FALSE "00:00:02" +@MODEL "temp4" "NULL" "00:00:03" +`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json description to sequence', () => { + const seqJson: SeqJson = { + id: 'testDescription', + metadata: {}, + steps: [ + { + args: [ + { + type: 'string', + value: 'TEST1', + }, + ], + description: 'a description', + stem: 'ECHO', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + { + args: [], + description: 'fsw command description', + models: [ + { + offset: '00:00:00', + value: true, + variable: 'cmd', + }, + ], + stem: 'FSW_CMD', + time: { + tag: '00:00:01', + type: 'COMMAND_RELATIVE', + }, + type: 'command', + }, + { + args: [], + stem: 'FSW_CMD_1', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + { + args: [ + { + type: 'number', + value: 10, + }, + { + type: 'string', + value: 'ENUM', + }, + ], + description: 'fsw cmd 2 description', + stem: 'FSW_CMD_2', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + ], + }; + + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "testDescription" + +C ECHO "TEST1" # a description +R00:00:01 FSW_CMD # fsw command description +@MODEL "cmd" TRUE "00:00:00" +C FSW_CMD_1 +C FSW_CMD_2 10 "ENUM" # fsw cmd 2 description +`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json immediate commands to sequence', () => { + const seqJson: SeqJson = { + id: 'testImmediate', + immediate_commands: [ + { + args: [ + { type: 'string', value: '1' }, + { type: 'number', value: 2 }, + { type: 'number', value: 3.0 }, + ], + description: 'immediate command', + metadata: {}, + stem: 'IC', + }, + { + args: [], + description: 'noop command, no arguments', + metadata: { processor: 'VC1A' }, + stem: 'NOOP', + }, + ], + metadata: {}, + }; + + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "testImmediate" + +@IMMEDIATE +IC "1" 2 3 # immediate command +NOOP # noop command, no arguments +@METADATA "processor" "VC1A" +`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json hardware commands to sequence', () => { + const seqJson: SeqJson = { + hardware_commands: [ + { + description: 'hardware command', + metadata: { + foo: 'bar', + hardware: 'HWC', + }, + stem: 'HWC', + }, + { + stem: 'HWC2', + }, + ], + id: 'testHardware', + metadata: {}, + }; + + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "testHardware" + +@HARDWARE +HWC # hardware command +@METADATA "foo" "bar" +@METADATA "hardware" "HWC" +HWC2`; + expect(sequence).toEqual(expectedSequence); + }); + + it('converts a seq json time tags to sequence', () => { + const seqJson: SeqJson = { + id: 'testTime', + metadata: {}, + steps: [ + { + args: [], + stem: 'FSA_CMD', + time: { + tag: '2020-173T20:00:00.000', + type: 'ABSOLUTE', + }, + type: 'command', + }, + { + args: [], + stem: 'FSR_CMD', + time: { tag: '00:00:10.000', type: 'COMMAND_RELATIVE' }, + type: 'command', + }, + { + args: [ + { + type: 'number', + value: 10, + }, + { + type: 'string', + value: 'ENUM', + }, + ], + stem: 'FSE_CMD', + time: { tag: '-00:00:01.000', type: 'EPOCH_RELATIVE' }, + type: 'command', + }, + ], + }; + + const sequence = seqJsonToSequence(seqJson, [], null); + const expectedSequence = `@ID "testTime" + +A2020-173T20:00:00.000 FSA_CMD +R00:00:10.000 FSR_CMD +E-00:00:01.000 FSE_CMD 10 "ENUM" +`; + expect(sequence).toEqual(expectedSequence); + }); +}); diff --git a/src/utilities/new-sequence-editor/from-seq-json.ts b/src/utilities/new-sequence-editor/from-seq-json.ts new file mode 100644 index 0000000000..c78443d515 --- /dev/null +++ b/src/utilities/new-sequence-editor/from-seq-json.ts @@ -0,0 +1,242 @@ +import type { ChannelDictionary, ParameterDictionary } from '@nasa-jpl/aerie-ampcs'; +import type { + Args, + BooleanArgument, + Description, + HexArgument, + Metadata, + Model, + NumberArgument, + SeqJson, + StringArgument, + SymbolArgument, + Time, + VariableDeclaration, +} from '@nasa-jpl/seq-json-schema/types'; +import { quoteEscape } from '../../components/sequencing/form/utils'; +import { customizeSeqJsonParsing } from './extension-points'; +import { logError } from './logger'; + +/** + * Transform a sequence JSON time to it's sequence string form. + */ +export function seqJsonTimeToSequence(time: Time): string { + switch (time.type) { + case 'ABSOLUTE': + return `A${time?.tag ?? ''}`; + case 'COMMAND_COMPLETE': + return 'C'; + case 'COMMAND_RELATIVE': + return `R${time?.tag ?? ''}`; + case 'EPOCH_RELATIVE': + return `E${time?.tag ?? ''}`; + default: + return ''; + } +} + +/** + * Transform a base argument (non-repeat) into a string. + */ +export function seqJsonBaseArgToSequence( + arg: StringArgument | NumberArgument | BooleanArgument | SymbolArgument | HexArgument, +): string { + switch (arg.type) { + case 'string': + case 'symbol': + return `"${arg.value}"`; + case 'boolean': + return arg.value ? 'TRUE' : 'FALSE'; + default: + return `${arg.value}`; + } +} + +export function seqJsonArgsToSequence(args: Args): string { + let result = ''; + + for (const arg of args) { + result += ' '; + if (arg.type === 'repeat') { + if (Array.isArray(arg.value) && arg.value.length) { + let repeatResult = ''; + for (const repeatArgSet of arg.value) { + if (Array.isArray(repeatArgSet)) { + for (const repeatArg of repeatArgSet) { + repeatResult += ` ${seqJsonBaseArgToSequence(repeatArg)}`; + } + repeatResult = repeatResult.trim(); + } else { + logError('Repeat arg set value is not an array'); + } + } + result += `[${repeatResult}]`; + } else { + logError('Repeat arg value is not an array'); + } + } else { + result += seqJsonBaseArgToSequence(arg); + } + result = result.trimEnd(); + } + + return result.trim().length > 0 ? ` ${result.trim()}` : ''; +} + +export function seqJsonModelsToSequence(models: Model[]): string { + // MODEL directives are one per line, the last new line is to start the next token + const modelString = models + .map(model => { + let formattedValue: Model['value'] = model.value; + if (typeof model.value === 'string') { + formattedValue = quoteEscape(model.value); + } else if (typeof model.value === 'boolean') { + formattedValue = model.value.toString().toUpperCase(); + } + return `@MODEL ${typeof model.variable === 'string' ? quoteEscape(String(model.variable)) : `"${model.variable}"`} ${formattedValue} ${quoteEscape(model.offset)}`; + }) + .join('\n'); + + return modelString.length > 0 ? `${modelString}\n` : ''; +} + +export function seqJsonMetadataToSequence(metadata: Metadata): string { + // METADATA directives are one per line, the last new line is to start the next token + const metaDataString = Object.entries(metadata) + .map( + ([key, value]: [key: string, value: unknown]) => + `@METADATA ${quoteEscape(key)} ${JSON.stringify(value, null, 2)}`, + ) + .join('\n'); + return metaDataString.length > 0 ? `${metaDataString}\n` : ''; +} + +function seqJsonVariableToSequence( + variables: [VariableDeclaration, ...VariableDeclaration[]], + type: 'INPUT_PARAMS' | 'LOCALS', +): string { + return `@${type} ${variables.map(variable => variable.name).join(' ')}\n`; +} + +function seqJsonDescriptionToSequence(description: Description): string { + return ` # ${description}\n`; +} + +/** + * Transforms a sequence JSON to a sequence string. + */ +export function seqJsonToSequence( + seqJson: SeqJson | null, + parameterDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, +): string { + const sequence: string[] = []; + + if (seqJson) { + customizeSeqJsonParsing(seqJson, parameterDictionaries, channelDictionary); + + // ID + sequence.push(`@ID "${seqJson.id}"\n`); + + //input params + if (seqJson.parameters) { + sequence.push(seqJsonVariableToSequence(seqJson.parameters, 'INPUT_PARAMS')); + } + + //locals + if (seqJson.locals) { + sequence.push(seqJsonVariableToSequence(seqJson.locals, 'LOCALS')); + } + + if (seqJson.metadata) { + // remove lgo from metadata if it exists + sequence.push( + seqJsonMetadataToSequence( + Object.entries(seqJson.metadata) + .filter(([key]) => key !== 'lgo') + .reduce((obj, [key, value]) => ({ ...obj, [key]: value }), {}) as Metadata, + ), + ); + } + + // Load and Go + if (seqJson.metadata.lgo) { + sequence.push(`\n@LOAD_AND_GO`); + } + + // FSW Commands + if (seqJson.steps) { + sequence.push(`\n`); + for (const step of seqJson.steps) { + if (step.type === 'command') { + const time = seqJsonTimeToSequence(step.time); + const args = seqJsonArgsToSequence(step.args); + const metadata = step.metadata ? seqJsonMetadataToSequence(step.metadata) : ''; + const models = step.models ? seqJsonModelsToSequence(step.models) : ''; + const description = step.description ? seqJsonDescriptionToSequence(step.description) : ''; + + let commandString = `${time} ${step.stem}${args}${description}`; + // add a new line if on doesn't exit at the end of the commandString + if (!commandString.endsWith('\n')) { + commandString += '\n'; + } + // Add modeling data if it exists + commandString += `${metadata}${models}`; + sequence.push(commandString); + } + } + } + + // Immediate Commands + if (seqJson.immediate_commands) { + sequence.push(`\n`); + sequence.push(`@IMMEDIATE\n`); + for (const icmd of seqJson.immediate_commands) { + const args = seqJsonArgsToSequence(icmd.args); + const description = icmd.description ? seqJsonDescriptionToSequence(icmd.description) : ''; + const metadata = icmd.metadata ? seqJsonMetadataToSequence(icmd.metadata) : ''; + sequence.push(`${icmd.stem}${args}${description}${metadata}`); + } + } + + // hardware commands + if (seqJson.hardware_commands) { + sequence.push(`\n`); + sequence.push(`@HARDWARE\n`); + for (const hdw of seqJson.hardware_commands) { + const description = hdw.description ? seqJsonDescriptionToSequence(hdw.description) : ''; + const metadata = hdw.metadata ? seqJsonMetadataToSequence(hdw.metadata) : ''; + sequence.push(`${hdw.stem}${description}${metadata}`); + } + } + } + + return sequence.join(''); +} + +/** + * Return a parsed sequence JSON from a file. + */ +export async function parseSeqJsonFromFile(files: FileList | null | undefined): Promise { + if (files) { + const file = files.item(0); + + if (file) { + try { + const fileText = await file.text(); + const seqJson = JSON.parse(fileText); + return seqJson; + } catch (e) { + const errorMessage = (e as Error).message; + logError(errorMessage); + return null; + } + } else { + logError('No file provided'); + return null; + } + } else { + logError('No file provided'); + return null; + } +} diff --git a/src/utilities/new-sequence-editor/grammar.test.ts b/src/utilities/new-sequence-editor/grammar.test.ts new file mode 100644 index 0000000000..40d372a1d1 --- /dev/null +++ b/src/utilities/new-sequence-editor/grammar.test.ts @@ -0,0 +1,64 @@ +import { testTree } from '@lezer/generator/dist/test'; +import { readFileSync, readdirSync } from 'fs'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import { describe, it } from 'vitest'; +import { SeqLanguage } from '../codemirror'; + +const caseDir = path.dirname(fileURLToPath(import.meta.url)) + '/../../tests/mocks/sequencing/grammar-cases'; + +for (const file of readdirSync(caseDir)) { + if (!/\.txt$/.test(file)) { + continue; + } + + const name = /^[^.]*/.exec(file)![0]; + describe(name, () => { + for (const { name, run } of fileTests(readFileSync(path.join(caseDir, file), 'utf8'), file)) { + it(name, () => run(SeqLanguage.parser)); + } + }); +} + +// modified version of function in @lezer/generator/dist/test +// lezer's fileTests strips whitespace off test strings which is bad for a whitespace delimited grammar +function fileTests(file: string, fileName: string, mayIgnore?: any) { + const caseExpr = /\s*#[ \t]*(.*)(?:\r\n|\r|\n)([^]*?)==+>([^]*?)(?:$|(?:\r\n|\r|\n)+(?=#))/gy; + const tests: any[] = []; + let lastIndex = 0; + const _loop_1 = function () { + const m = caseExpr.exec(file); + if (!m) { + throw new Error('Unexpected file format in '.concat(fileName, ' around\n\n').concat(`${fileName}: ${lastIndex}`)); + } + const text = m[2].trimStart(), + expected = m[3].trim(); + const _a = /(.*?)(\{.*?\})?$/.exec(m[1]), + name_2 = _a?.[1], + configStr = _a?.[2]; + const config = configStr ? JSON.parse(configStr) : null; + const strict = !/⚠|\.\.\./.test(expected); + tests.push({ + config: config, + configStr: configStr, + expected: expected, + name: name_2, + run: function (parser: any) { + testTree(parser.parse(text), expected, mayIgnore); + }, + strict: strict, + text: text, + }); + lastIndex = m.index + m[0].length; + if (lastIndex === file.length) { + return 'break'; + } + }; + for (;;) { + const state_1 = _loop_1(); + if (state_1 === 'break') { + break; + } + } + return tests; +} diff --git a/src/utilities/new-sequence-editor/logger.ts b/src/utilities/new-sequence-editor/logger.ts new file mode 100644 index 0000000000..b72316c214 --- /dev/null +++ b/src/utilities/new-sequence-editor/logger.ts @@ -0,0 +1,7 @@ +export function logError(error: string): void { + console.trace(error); +} + +export function logInfo(message: string): void { + console.info(message); +} diff --git a/src/utilities/new-sequence-editor/seq-json-linter.ts b/src/utilities/new-sequence-editor/seq-json-linter.ts new file mode 100644 index 0000000000..ec7c5efe28 --- /dev/null +++ b/src/utilities/new-sequence-editor/seq-json-linter.ts @@ -0,0 +1,89 @@ +import { linter, type Diagnostic } from '@codemirror/lint'; +import type { Extension, Text } from '@codemirror/state'; +import type { CommandDictionary } from '@nasa-jpl/aerie-ampcs'; +// @ts-expect-error library does not include type declarations +import { parse as jsonSourceMapParse } from 'json-source-map'; + +type JsonSourceMapPointerPosition = { + column: number; + line: number; + pos: number; +}; + +type JsonSourceMapPointer = { + key: JsonSourceMapPointerPosition; + keyEnd: JsonSourceMapPointerPosition; + value: JsonSourceMapPointerPosition; + valueEnd: JsonSourceMapPointerPosition; +}; + +/** + * Helper for getting an error position of JSON.prase throws a SyntaxError. + * @see https://github.com/codemirror/lang-json/blob/main/src/lint.ts + */ +function getErrorPosition(error: SyntaxError, doc: Text): number { + let m; + + if ((m = error.message.match(/at position (\d+)/))) { + return Math.min(+m[1], doc.length); + } + + if ((m = error.message.match(/at line (\d+) column (\d+)/))) { + return Math.min(doc.line(+m[1]).from + +m[2] - 1, doc.length); + } + + return 0; +} + +/** + * Linter function that returns a Code Mirror extension function. + * Can be optionally called with a command dictionary so it's available during linting. + */ +export function seqJsonLinter(commandDictionary: CommandDictionary | null = null): Extension { + return linter(view => { + const diagnostics: Diagnostic[] = []; + + try { + const text = view.state.doc.toString(); + const sourceMap = jsonSourceMapParse(text); + + if (commandDictionary) { + for (const [key, pointer] of Object.entries(sourceMap.pointers)) { + const stemMatch = key.match(/\/steps\/\d+\/stem/); + + if (stemMatch) { + const stemValue = view.state.doc.sliceString(pointer.value.pos, pointer.valueEnd.pos); + const stemValueNoQuotes = stemValue.replaceAll('"', ''); + const hasFswCommand = commandDictionary.fswCommandMap[stemValueNoQuotes] ?? false; + const hasHwCommand = commandDictionary.hwCommandMap[stemValueNoQuotes] ?? false; + const hasCommand = hasFswCommand || hasHwCommand; + + if (!hasCommand) { + diagnostics.push({ + actions: [], + from: pointer.value.pos, + message: 'Command not found', + severity: 'error', + to: pointer.valueEnd.pos, + }); + } + } + } + } + } catch (e) { + if (!(e instanceof SyntaxError)) { + throw e; + } + const pos = getErrorPosition(e, view.state.doc); + + diagnostics.push({ + from: pos, + message: e.message, + severity: 'error', + to: pos, + }); + } + + return diagnostics; + }); +} diff --git a/src/utilities/new-sequence-editor/sequence-completion.ts b/src/utilities/new-sequence-editor/sequence-completion.ts new file mode 100644 index 0000000000..aca98d652c --- /dev/null +++ b/src/utilities/new-sequence-editor/sequence-completion.ts @@ -0,0 +1,299 @@ +import type { Completion, CompletionContext, CompletionResult } from '@codemirror/autocomplete'; +import { syntaxTree } from '@codemirror/language'; +import type { ChannelDictionary, CommandDictionary, ParameterDictionary } from '@nasa-jpl/aerie-ampcs'; +import { fswCommandArgDefault } from './command-dictionary'; +import { getCustomArgDef } from './extension-points'; + +type CursorInfo = { + isAtLineComment: boolean; + isAtSymbolBefore: boolean; + isBeforeHDWCommands: boolean; + isBeforeImmedOrHDWCommands: boolean; + isTimeTagBefore: boolean; + position: number; +}; + +/** + * Completion function that returns a Code Mirror extension function. + * Can be optionally called with a command dictionary so it's available for completion. + */ +export function sequenceCompletion( + channelDictionary: ChannelDictionary | null = null, + commandDictionary: CommandDictionary | null = null, + parameterDictionaries: ParameterDictionary[], +) { + return (context: CompletionContext): CompletionResult | null => { + const nodeBefore = syntaxTree(context.state).resolveInner(context.pos, -1); + const nodeCurrent = syntaxTree(context.state).resolveInner(context.pos, 0); + const baseNode = syntaxTree(context.state).topNode; + + if (nodeBefore?.parent?.name === 'Args') { + // TODO: Handle argument completions. + return null; + } + + const word = context.matchBefore(/\w*/); + + if (word) { + if (word.from === word.to && !context.explicit) { + return null; + } + + const timeTagCompletions: Completion[] = []; + const enumerationCompletions: Completion[] = []; + const fswCommandsCompletions: Completion[] = []; + const hwCommandsCompletions: Completion[] = []; + const directivesCompletions: Completion[] = []; + + const cursor: CursorInfo = { + isAtLineComment: nodeCurrent.name === 'LineComment' || nodeBefore.name === 'LineComment', + isAtSymbolBefore: isAtTyped(context.state.doc.toString(), word), + isBeforeHDWCommands: context.pos < (baseNode.getChild('HardwareCommands')?.from ?? Infinity), + isBeforeImmedOrHDWCommands: + context.pos < + (baseNode.getChild('ImmediateCommands')?.from ?? baseNode.getChild('HardwareCommands')?.from ?? Infinity), + isTimeTagBefore: nodeBefore.parent?.getChild('TimeTag') ? true : false, + position: context.pos, + }; + + if (cursor.isBeforeImmedOrHDWCommands) { + directivesCompletions.push( + { + apply: `${cursor.isAtSymbolBefore ? '' : '@'}ID ""`, + info: 'Sequence ID', + label: '@ID', + section: 'Directives', + type: 'keyword', + }, + { + apply: `${cursor.isAtSymbolBefore ? '' : '@'}LOAD_AND_GO`, + info: 'Set Sequence as a Load and Go Sequence', + label: '@LOAD_AND_GO', + section: 'Directives', + type: 'keyword', + }, + { + apply: `${cursor.isAtSymbolBefore ? '' : '@'}INPUT_PARAMS VALUE`, + info: 'List of Input Parameters', + label: '@INPUT_PARAMS', + section: 'Directives', + type: 'keyword', + }, + { + apply: `${cursor.isAtSymbolBefore ? '' : '@'}LOCALS VALUE`, + info: 'List of Local Variables', + label: '@LOCALS', + section: 'Directives', + type: 'keyword', + }, + { + apply: `${cursor.isAtSymbolBefore ? '' : '@'}HARDWARE`, + info: 'A HARDWARE Directive', + label: '@HARDWARE', + section: 'Directives', + type: 'keyword', + }, + { + apply: `${cursor.isAtSymbolBefore ? '' : '@'}IMMEDIATE`, + info: 'A IMMEDIATE Directive', + label: '@IMMEDIATE', + section: 'Directives', + type: 'keyword', + }, + ); + + if (!cursor.isTimeTagBefore) { + timeTagCompletions.push( + { + apply: 'A0000-000T00:00:00 ', + info: 'Execute command at an absolute time', + label: `A (absolute)`, + section: 'Time Tags', + type: 'keyword', + }, + { + apply: 'C ', + info: 'Execute command after the previous command completes', + label: 'C (command complete)', + section: 'Time Tags', + type: 'keyword', + }, + { + apply: 'E+00:00:00 ', + info: 'Execute command at an offset from an epoch', + label: 'E (epoch)', + section: 'Time Tags', + type: 'keyword', + }, + { + apply: 'R1 ', + info: 'Execute command at an offset from the previous command', + label: 'R (relative)', + section: 'Time Tags', + type: 'keyword', + }, + ); + } + } + + // Directives. + directivesCompletions.push( + { + apply: `${isAtTyped(context.state.doc.toString(), word) ? '' : '@'}METADATA "Key" "Value"`, + info: 'Any key-value pairs', + label: `@METADATA`, + section: 'Directives', + type: 'keyword', + }, + { + apply: `${isAtTyped(context.state.doc.toString(), word) ? '' : '@'}MODEL "Variable" 0 "Offset"`, + info: 'List of Local Variables', + label: '@MODEL', + section: 'Directives', + type: 'keyword', + }, + ); + + // If TimeTag has not been entered by the user wait for 2 characters before showing the command completions list + // If TimeTag has been entered show the completion list when 1 character has been entered + if (word.text.length > (cursor.isTimeTagBefore || cursor.isBeforeImmedOrHDWCommands === false ? 0 : 1)) { + fswCommandsCompletions.push( + ...generateCommandCompletions(channelDictionary, commandDictionary, cursor, parameterDictionaries), + ); + } + + // TODO: Move to a function like generateCommandCompletions + hwCommandsCompletions.push(...generateHardwareCompletions(commandDictionary, cursor)); + + // + // Enumerations. + // TODO: Make context aware. + // for (const enumeration of commandDictionary.enums) { + // for (const enumValue of enumeration.values) { + // const { symbol } = enumValue; + + // enumerationCompletions.push({ + // label: symbol, + // section: 'Enumerations', + // type: 'labelName', + // }); + // } + // } + + const globals = globalThis.GLOBALS; + const globalCompletions: Completion[] = []; + + if (globals) { + for (const global of globals) { + globalCompletions.push({ + apply: global.name, + info: global.type, + label: global.name, + section: 'Globals', + type: 'keyword', + }); + } + } + + return { + from: word.from, + options: [ + ...directivesCompletions, + ...timeTagCompletions, + ...enumerationCompletions, + ...fswCommandsCompletions, + ...hwCommandsCompletions, + ...globalCompletions, + ], + }; + } + + return null; + }; +} + +function generateCommandCompletions( + channelDictionary: ChannelDictionary | null, + commandDictionary: CommandDictionary | null, + cursor: CursorInfo, + parameterDictionaries: ParameterDictionary[], +): Completion[] { + if (commandDictionary === null) { + return []; + } + + // if cursor is at the LineComment/Description don't show the command completions list + if (cursor.isAtLineComment || !cursor.isBeforeHDWCommands) { + return []; + } + + const fswCommandsCompletions: Completion[] = []; + for (const fswCommand of commandDictionary.fswCommands) { + const { description, stem, arguments: args } = fswCommand; + let apply = stem; + + if (args.length) { + const argDefaults: string[] = []; + args.forEach(arg => { + argDefaults.push( + fswCommandArgDefault( + getCustomArgDef(stem, arg, argDefaults.slice(), parameterDictionaries, channelDictionary), + commandDictionary.enumMap, + ), + ); + }); + const argsStr = argDefaults.join(' '); + apply = `${stem} ${argsStr} `; + } + + if (!cursor.isTimeTagBefore && cursor.isBeforeImmedOrHDWCommands) { + apply = 'C ' + apply; + } + + fswCommandsCompletions.push({ + apply, + info: description, + label: stem, + section: 'Flight Software Commands', + type: 'function', + }); + } + return fswCommandsCompletions; +} + +function generateHardwareCompletions(commandDictionary: CommandDictionary | null, cursor: CursorInfo): Completion[] { + if (commandDictionary === null) { + return []; + } + + // if cursor is at the LineComment/Description or before HDWCommands don't show the completions list + if (cursor.isAtLineComment || cursor.isBeforeHDWCommands) { + return []; + } + + const hwCommandsCompletions: Completion[] = []; + for (const hwCommand of commandDictionary.hwCommands) { + const { description, stem } = hwCommand; + + hwCommandsCompletions.push({ + apply: stem, + info: description, + label: stem, + section: 'Hardware Commands', + type: 'function', + }); + } + return hwCommandsCompletions; +} + +function isAtTyped(docString: string, word: { from: number; text: string; to: number }): boolean { + if (!word || word.from === undefined || word.text === undefined) { + return false; + } + const from = word.from - 1; + const docStringSlice = docString.slice(from, from + 1); + if (docStringSlice === undefined) { + return false; + } + return docStringSlice === '@'; +} diff --git a/src/utilities/new-sequence-editor/sequence-linter.ts b/src/utilities/new-sequence-editor/sequence-linter.ts new file mode 100644 index 0000000000..757c2a120d --- /dev/null +++ b/src/utilities/new-sequence-editor/sequence-linter.ts @@ -0,0 +1,1423 @@ +import { syntaxTree } from '@codemirror/language'; +import { linter, type Diagnostic } from '@codemirror/lint'; +import type { Extension } from '@codemirror/state'; +import type { SyntaxNode, Tree } from '@lezer/common'; +import type { + ChannelDictionary, + CommandDictionary, + EnumMap, + FswCommand, + FswCommandArgument, + HwCommand, + ParameterDictionary, +} from '@nasa-jpl/aerie-ampcs'; +import { closest, distance } from 'fastest-levenshtein'; +import { addDefaultArgs, quoteEscape } from '../../components/sequencing/form/utils'; + +import type { VariableDeclaration } from '@nasa-jpl/seq-json-schema/types'; +import type { EditorView } from 'codemirror'; +import { getCustomArgDef } from './extension-points'; +import { TOKEN_COMMAND, TOKEN_ERROR, TOKEN_REPEAT_ARG } from './sequencer-grammar-constants'; +import { + ABSOLUTE_TIME, + EPOCH_SIMPLE, + EPOCH_TIME, + RELATIVE_SIMPLE, + RELATIVE_TIME, + isTimeBalanced, + testTime, +} from './time-utils'; +import { getChildrenNode, getDeepestNode, getFromAndTo } from './tree-utils'; + +const KNOWN_DIRECTIVES = [ + 'LOAD_AND_GO', + 'ID', + 'IMMEDIATE', + 'HARDWARE', + 'LOCALS', + 'INPUT_PARAMS', + 'MODEL', + 'METADATA', +].map(name => `@${name}`); + +const MAX_ENUMS_TO_SHOW = 20; + +export function getAllEnumSymbols(enumMap: EnumMap, enumName: string): undefined | string[] { + return enumMap[enumName]?.values.map(({ symbol }) => symbol); +} + +function closestStrings(value: string, potentialMatches: string[], n: number) { + const distances = potentialMatches.map(s => ({ distance: distance(s, value), s })); + distances.sort((a, b) => a.distance - b.distance); + return distances.slice(0, n).map(pair => pair.s); +} + +type WhileOpener = { + command: SyntaxNode; + from: number; + stemToClose: string; + to: number; + word: string; +}; + +type IfOpener = WhileOpener & { + hasElse: boolean; +}; + +type VariableMap = { + [name: string]: VariableDeclaration; +}; + +/** + * Linter function that returns a Code Mirror extension function. + * Can be optionally called with a command dictionary so it's available during linting. + */ +export function sequenceLinter( + channelDictionary: ChannelDictionary | null = null, + commandDictionary: CommandDictionary | null = null, + parameterDictionaries: ParameterDictionary[] = [], +): Extension { + return linter(view => { + const tree = syntaxTree(view.state); + const treeNode = tree.topNode; + const docText = view.state.doc.toString(); + let diagnostics: Diagnostic[] = []; + + diagnostics.push(...validateParserErrors(tree)); + + // TODO: Get identify type mapping to use + const variables: VariableDeclaration[] = [ + ...(globalThis.GLOBALS?.map(g => ({ name: g.name, type: 'STRING' }) as const) ?? []), + ]; + + // Validate top level metadata + diagnostics.push(...validateMetadata(treeNode)); + + diagnostics.push(...validateId(treeNode, docText)); + + const localsValidation = validateLocals(treeNode.getChildren('LocalDeclaration'), docText); + variables.push(...localsValidation.variables); + diagnostics.push(...localsValidation.diagnostics); + + const parameterValidation = validateParameters(treeNode.getChildren('ParameterDeclaration'), docText); + variables.push(...parameterValidation.variables); + diagnostics.push(...parameterValidation.diagnostics); + + const variableMap = variables.reduce( + (vMap: VariableMap, variable: VariableDeclaration) => ({ + ...vMap, + [variable.name]: variable, + }), + {}, + ); + + // Validate command type mixing + diagnostics.push(...validateCommandTypeMixing(treeNode)); + + diagnostics.push(...validateCustomDirectives(treeNode, docText)); + + diagnostics.push( + ...commandLinter(treeNode.getChild('Commands')?.getChildren(TOKEN_COMMAND) || [], docText, variableMap), + ); + + diagnostics.push( + ...immediateCommandLinter( + treeNode.getChild('ImmediateCommands')?.getChildren(TOKEN_COMMAND) || [], + docText, + variableMap, + ), + ); + + diagnostics.push( + ...hardwareCommandLinter(treeNode.getChild('HardwareCommands')?.getChildren(TOKEN_COMMAND) || [], docText), + ); + + diagnostics.push( + ...conditionalAndLoopKeywordsLinter(treeNode.getChild('Commands')?.getChildren(TOKEN_COMMAND) || [], docText), + ); + + if (globalThis.LINT !== undefined && globalThis.LINT(commandDictionary, view, treeNode) !== undefined) { + diagnostics = [...diagnostics, ...globalThis.LINT(commandDictionary, view, treeNode)]; + } + + return diagnostics; + }); + + /** + * Checks for unexpected tokens. + * + * @param tree + * @returns + */ + function validateParserErrors(tree: Tree) { + const diagnostics: Diagnostic[] = []; + const MAX_PARSER_ERRORS = 100; + tree.iterate({ + enter: node => { + if (node.name === TOKEN_ERROR && diagnostics.length < MAX_PARSER_ERRORS) { + const { from, to } = node; + diagnostics.push({ + from, + message: `Unexpected token`, + severity: 'error', + to, + }); + } + }, + }); + return diagnostics; + } + + function conditionalAndLoopKeywordsLinter(commandNodes: SyntaxNode[], text: string): Diagnostic[] { + const diagnostics: Diagnostic[] = []; + const conditionalStack: IfOpener[] = []; + const loopStack: WhileOpener[] = []; + const conditionalKeywords = []; + const loopKeywords = []; + const conditionalStartingKeywords = globalThis.CONDITIONAL_KEYWORDS?.IF ?? ['CMD_IF']; + const conditionalElseKeyword = globalThis.CONDITIONAL_KEYWORDS?.ELSE ?? 'CMD_ELSE'; + const conditionalElseIfKeywords = globalThis.CONDITIONAL_KEYWORDS?.ELSE_IF ?? ['CMD_ELSE_IF']; + const conditionalEndingKeyword = globalThis.CONDITIONAL_KEYWORDS?.END_IF ?? 'CMD_END_IF'; + const loopStartingKeywords = globalThis.LOOP_KEYWORDS?.WHILE_LOOP ?? ['CMD_WHILE_LOOP', 'CMD_WHILE_LOOP_OR']; + const loopEndingKeyword = globalThis.LOOP_KEYWORDS?.END_WHILE_LOOP ?? 'CMD_END_WHILE_LOOP'; + + conditionalKeywords.push(conditionalElseKeyword, ...conditionalElseIfKeywords, conditionalEndingKeyword); + loopKeywords.push( + globalThis.LOOP_KEYWORDS?.BREAK ?? 'CMD_BREAK', + globalThis.LOOP_KEYWORDS?.CONTINUE ?? 'CMD_CONTINUE', + loopEndingKeyword, + ); + + for (const command of commandNodes) { + const stem = command.getChild('Stem'); + if (stem) { + const word = text.slice(stem.from, stem.to); + + if (conditionalStartingKeywords.includes(word)) { + conditionalStack.push({ + command, + from: stem.from, + hasElse: false, + stemToClose: conditionalEndingKeyword, + to: stem.to, + word, + }); + } + + if (conditionalKeywords.includes(word)) { + if (conditionalStack.length === 0) { + diagnostics.push({ + from: stem.from, + message: `${word} doesn't match a preceding ${conditionalStartingKeywords.join(', ')}.`, + severity: 'error', + to: stem.to, + }); + } else if (word === conditionalElseKeyword) { + if (!conditionalStack[conditionalStack.length - 1].hasElse) { + conditionalStack[conditionalStack.length - 1].hasElse = true; + } else { + diagnostics.push({ + from: stem.from, + message: `${word} doesn't match a preceding ${conditionalStartingKeywords.join(', ')}.`, + severity: 'error', + to: stem.to, + }); + } + } else if (word === conditionalEndingKeyword) { + conditionalStack.pop(); + } + } + + if (loopStartingKeywords.includes(word)) { + loopStack.push({ + command, + from: stem.from, + stemToClose: loopEndingKeyword, + to: stem.to, + word, + }); + } + + if (loopKeywords.includes(word)) { + if (loopStack.length === 0) { + diagnostics.push({ + from: stem.from, + message: `${word} doesn't match a preceding ${loopStartingKeywords.join(', ')}.`, + severity: 'error', + to: stem.to, + }); + } + + if (word === loopEndingKeyword) { + loopStack.pop(); + } + } + } + } + + // Anything left on the stack is unclosed + diagnostics.push( + ...[...loopStack, ...conditionalStack].map(block => { + return { + actions: [ + { + apply(view: EditorView, _from: number, _to: number) { + view.dispatch({ + changes: { + from: block.command.to, + insert: `\nC ${block.stemToClose}\n`, + }, + }); + }, + name: `Insert ${block.stemToClose}`, + }, + ], + from: block.from, + message: `Unclosed ${block.word}`, + severity: 'error', + to: block.to, + } as const; + }), + ); + + return diagnostics; + } + + /** + * Validates that a syntax node does not mix different command types. + * + * @param {SyntaxNode} node - The syntax node to validate. + * @return {Diagnostic[]} An array of diagnostics. + */ + function validateCommandTypeMixing(node: SyntaxNode): Diagnostic[] { + // Get the child nodes for Commands, ImmediateCommands, and HardwareCommands. + const commands = node.getChild('Commands'); + const immediateCommands = node.getChild('ImmediateCommands'); + const hardwareCommands = node.getChild('HardwareCommands'); + const lgo = commands?.getChild('LoadAndGoDirective') ?? null; + + // Check if each command type exists and has at least one child node. + const hasCommands = commands !== null && (commands?.getChildren(TOKEN_COMMAND).length > 0 || lgo !== null); + const hasImmediateCommands = immediateCommands !== null; + const hasHardwareCommands = hardwareCommands !== null; + + const diagnostics: Diagnostic[] = []; + + // Get the start. + const { from, to } = getFromAndTo([commands, immediateCommands, hardwareCommands]); + + // If there is a mix of command types, push a diagnostic. + if ( + (hasCommands && (hasImmediateCommands || hasHardwareCommands)) || + (hasImmediateCommands && hasHardwareCommands) + ) { + if (lgo) { + diagnostics.push({ + from, + message: `Directive 'LOAD_AND_GO' cannot be used with 'Immediate Commands' or 'Hardware Commands'.`, + severity: 'error', + to, + }); + } + diagnostics.push({ + from, + message: 'Cannot mix different command types in one Sequence.', + severity: 'error', + to, + }); + } + return diagnostics; + } + + function validateLocals(locals: SyntaxNode[], text: string) { + const variables: VariableDeclaration[] = []; + const diagnostics: Diagnostic[] = []; + diagnostics.push( + ...locals.slice(1).map( + local => + ({ + ...getFromAndTo([local]), + message: 'There is a maximum of one @LOCALS directive per sequence', + severity: 'error', + }) as const, + ), + ); + locals.forEach(local => { + let child = local.firstChild; + while (child) { + if (child.name !== 'Enum') { + diagnostics.push({ + from: child.from, + message: `@LOCALS values are required to be Enums`, + severity: 'error', + to: child.to, + }); + } else { + variables.push({ + name: text.slice(child.from, child.to), + // TODO - hook to check mission specific nomenclature + type: 'STRING', + }); + } + child = child.nextSibling; + } + }); + return { + diagnostics, + variables, + }; + } + + function validateParameters(inputParams: SyntaxNode[], text: string) { + const variables: VariableDeclaration[] = []; + const diagnostics: Diagnostic[] = []; + diagnostics.push( + ...inputParams.slice(1).map( + inputParam => + ({ + ...getFromAndTo([inputParam]), + message: 'There is a maximum of @INPUT_PARAMS directive per sequence', + severity: 'error', + }) as const, + ), + ); + inputParams.forEach(inputParam => { + let child = inputParam.firstChild; + while (child) { + if (child.name !== 'Enum') { + diagnostics.push({ + from: child.from, + message: `@INPUT_PARAMS values are required to be Enums`, + severity: 'error', + to: child.to, + }); + } else { + variables.push({ + name: text.slice(child.from, child.to), + // TODO - hook to check mission specific nomenclature + type: 'STRING', + }); + } + child = child.nextSibling; + } + }); + return { + diagnostics, + variables, + }; + } + + function validateCustomDirectives(node: SyntaxNode, text: string): Diagnostic[] { + const diagnostics: Diagnostic[] = []; + node.getChildren('GenericDirective').forEach(directiveNode => { + const child = directiveNode.firstChild; + // use first token as directive, preserve remainder of line + const { from, to } = { ...getFromAndTo([directiveNode]), ...(child ? { to: child.from } : {}) }; + const custom = text.slice(from, to).trim(); + const guess = closest(custom, KNOWN_DIRECTIVES); + const insert = guess + (child ? ' ' : '\n'); + diagnostics.push({ + actions: [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, insert, to } }); + }, + name: `Change to ${guess}`, + }, + ], + from, + message: `Unknown Directive ${custom}, did you mean ${guess}`, + severity: 'error', + to, + }); + }); + return diagnostics; + } + + function insertAction(name: string, insert: string) { + return { + apply(view: EditorView, from: number, _to: number) { + view.dispatch({ changes: { from, insert } }); + }, + name, + }; + } + + /** + * Function to generate diagnostics based on Commands section in the parse tree. + * + * @param {SyntaxNode[] | undefined} commandNodes - nodes representing commands + * @param {string} text - the text to validate against + * @return {Diagnostic[]} an array of diagnostics + */ + function commandLinter(commandNodes: SyntaxNode[] | undefined, text: string, variables: VariableMap): Diagnostic[] { + // If there are no command nodes, return an empty array of diagnostics + if (!commandNodes) { + return []; + } + + // Initialize an empty array to hold diagnostics + const diagnostics: Diagnostic[] = []; + + // Iterate over each command node + for (const command of commandNodes) { + // Get the TimeTag node for the current command + const timeTagNode = command.getChild('TimeTag'); + + // If the TimeTag node is missing, create a diagnostic + if (!timeTagNode) { + diagnostics.push({ + actions: [ + insertAction(`Insert 'C' (command complete)`, 'C '), + insertAction(`Insert 'R1' (relative 1)`, 'R '), + ], + from: command.from, + message: "Missing 'Time Tag' for command", + severity: 'error', + to: command.to, + }); + } else { + const timeTagAbsoluteNode = timeTagNode.getChild('TimeAbsolute'); + const timeTagEpochNode = timeTagNode.getChild('TimeEpoch'); + const timeTagRelativeNode = timeTagNode.getChild('TimeRelative'); + + if (timeTagAbsoluteNode) { + const absoluteText = text.slice(timeTagAbsoluteNode.from + 1, timeTagAbsoluteNode.to).trim(); + + if (!testTime(absoluteText, ABSOLUTE_TIME)) { + diagnostics.push({ + actions: [], + from: timeTagAbsoluteNode.from, + message: `Time Error: Incorrectly formatted 'Absolute' time. + Received : Malformed Absolute time. + Expected: YYYY-DOYThh:mm:ss[.sss]`, + severity: 'error', + to: timeTagAbsoluteNode.to, + }); + } else { + const result = isTimeBalanced(absoluteText, ABSOLUTE_TIME); + if (result.error) { + diagnostics.push({ + actions: [], + from: timeTagAbsoluteNode.from, + message: result.error, + severity: 'error', + to: timeTagAbsoluteNode.to, + }); + } else if (result.warning) { + diagnostics.push({ + actions: [], + from: timeTagAbsoluteNode.from, + message: result.warning, + severity: 'warning', + to: timeTagAbsoluteNode.to, + }); + } + } + } else if (timeTagEpochNode) { + const epochText = text.slice(timeTagEpochNode.from + 1, timeTagEpochNode.to).trim(); + if (!testTime(epochText, EPOCH_TIME) && !testTime(epochText, EPOCH_SIMPLE)) { + diagnostics.push({ + actions: [], + from: timeTagEpochNode.from, + message: `Time Error: Incorrectly formatted 'Epoch' time. + Received : Malformed Epoch time. + Expected: YYYY-DOYThh:mm:ss[.sss] or [+/-]ss`, + severity: 'error', + to: timeTagEpochNode.to, + }); + } else { + if (testTime(epochText, EPOCH_TIME)) { + const result = isTimeBalanced(epochText, EPOCH_TIME); + if (result.error) { + diagnostics.push({ + actions: [], + from: timeTagEpochNode.from, + message: result.error, + severity: 'error', + to: timeTagEpochNode.to, + }); + } else if (result.warning) { + diagnostics.push({ + actions: [], + from: timeTagEpochNode.from, + message: result.warning, + severity: 'warning', + to: timeTagEpochNode.to, + }); + } + } + } + } else if (timeTagRelativeNode) { + const relativeText = text.slice(timeTagRelativeNode.from + 1, timeTagRelativeNode.to).trim(); + if (!testTime(relativeText, RELATIVE_TIME) && !testTime(relativeText, RELATIVE_SIMPLE)) { + diagnostics.push({ + actions: [], + from: timeTagRelativeNode.from, + message: `Time Error: Incorrectly formatted 'Relative' time. + Received : Malformed Relative time. + Expected: [+/-]hh:mm:ss[.sss]`, + severity: 'error', + to: timeTagRelativeNode.to, + }); + } else { + if (testTime(relativeText, RELATIVE_TIME)) { + const result = isTimeBalanced(relativeText, RELATIVE_TIME); + if (result.error) { + diagnostics.push({ + actions: [], + from: timeTagRelativeNode.from, + message: result.error, + severity: 'error', + to: timeTagRelativeNode.to, + }); + } else if (result.warning) { + diagnostics.push({ + actions: [], + from: timeTagRelativeNode.from, + message: result.warning, + severity: 'warning', + to: timeTagRelativeNode.to, + }); + } + } + } + } + } + + // Validate the command and push the generated diagnostics to the array + diagnostics.push(...validateCommand(command, text, 'command', variables)); + + // Lint the metadata and models + diagnostics.push(...validateMetadata(command)); + diagnostics.push(...validateModel(command)); + } + + // Return the array of diagnostics + return diagnostics; + } + + /** + * Function to generate diagnostics for immediate commands in the parse tree. + * + * @param {SyntaxNode[] | undefined} commandNodes - Array of command nodes or undefined. + * @param {string} text - Text of the sequence. + * @return {Diagnostic[]} Array of diagnostics. + */ + function immediateCommandLinter( + commandNodes: SyntaxNode[] | undefined, + text: string, + variables: VariableMap, + ): Diagnostic[] { + // If there are no command nodes, return the empty array + if (!commandNodes) { + return []; + } + // Initialize an array to hold diagnostics + + const diagnostics: Diagnostic[] = []; + + // Iterate over each command node + for (const command of commandNodes) { + // Get the TimeTag node for the current command + const timeTagNode = command.getChild('TimeTag'); + + // If the TimeTag node exists, create a diagnostic + if (timeTagNode) { + diagnostics.push({ + actions: [], + from: command.from, + message: "Immediate commands can't have a time tag", + severity: 'error', + to: command.to, + }); + } + + // Validate the command and push the generated diagnostics to the array + diagnostics.push(...validateCommand(command, text, 'immediate', variables)); + + // Lint the metadata + diagnostics.push(...validateMetadata(command)); + + // immediate commands don't have models + const modelsNode = command.getChild('Models'); + if (modelsNode) { + diagnostics.push({ + from: modelsNode.from, + message: "Immediate commands can't have models", + severity: 'error', + to: modelsNode.to, + }); + } + } + + // Return the array of diagnostics + return diagnostics; + } + + /** + * Function to generate diagnostics based on HardwareCommands section in the parse tree. + * + * @param {SyntaxNode[] | undefined} commands - nodes representing hardware commands + * @param {string} text - the text to validate against + * @return {Diagnostic[]} an array of diagnostics + */ + function hardwareCommandLinter(commands: SyntaxNode[] | undefined, text: string): Diagnostic[] { + // Initialize an empty array to hold diagnostics + const diagnostics: Diagnostic[] = []; + + // If there are no command nodes, return an empty array of diagnostics + if (!commands) { + return diagnostics; + } + + // Iterate over each command node + for (const command of commands) { + // Get the TimeTag node for the current command + const timeTag = command.getChild('TimeTag'); + + // If the TimeTag node exists, create a diagnostic + if (timeTag) { + // Push a diagnostic to the array indicating that time tags are not allowed for hardware commands + diagnostics.push({ + actions: [], + from: command.from, + message: 'Time tag is not allowed for hardware commands', + severity: 'error', + to: command.to, + }); + } + + // Validate the command and push the generated diagnostics to the array + diagnostics.push(...validateCommand(command, text, 'hardware', {})); + + // Lint the metadata + diagnostics.push(...validateMetadata(command)); + + // hardware commands don't have models + const modelsNode = command.getChild('Models'); + if (modelsNode) { + diagnostics.push({ + actions: [], + from: modelsNode.from, + message: "Immediate commands can't have models", + severity: 'error', + to: modelsNode.to, + }); + } + } + + // Return the array of diagnostics + return diagnostics; + } + + /** + * Validates a command by validating its stem and arguments. + * + * @param command - The SyntaxNode representing the command. + * @param text - The text of the whole command. + * @returns An array of Diagnostic objects representing the validation errors. + */ + function validateCommand( + command: SyntaxNode, + text: string, + type: 'command' | 'immediate' | 'hardware' = 'command', + variables: VariableMap, + ): Diagnostic[] { + // If the command dictionary is not initialized, return an empty array of diagnostics. + if (!commandDictionary) { + return []; + } + + // Get the stem node of the command. + const stem = command.getChild('Stem'); + // If the stem node is null, return an empty array of diagnostics. + if (stem === null) { + return []; + } + + const stemText = text.slice(stem.from, stem.to); + + // Initialize an array to store the diagnostic errors. + const diagnostics: Diagnostic[] = []; + + // Validate the stem of the command. + const result = validateStem(stem, stemText, type); + // No command dictionary return []. + if (result === null) { + return []; + } + + // Stem was invalid. + else if (typeof result === 'object' && 'message' in result) { + diagnostics.push(result); + return diagnostics; + } + + const argNode = command.getChild('Args'); + const dictArgs = (result as FswCommand).arguments ?? []; + + // Lint the arguments of the command. + diagnostics.push( + ...validateAndLintArguments( + dictArgs, + argNode ? getChildrenNode(argNode) : null, + command, + text, + stemText, + variables, + ), + ); + + // Return the array of diagnostics. + return diagnostics; + } + + /** + * Validates the stem of a command. + * @param stem - The SyntaxNode representing the stem of the command. + * @param stemText - The command name + * @param type - The type of command (default: 'command'). + * @returns A Diagnostic if the stem is invalid, a FswCommand if the stem is valid, or null if the command dictionary is not initialized. + */ + function validateStem( + stem: SyntaxNode, + stemText: string, + type: 'command' | 'immediate' | 'hardware' = 'command', + ): Diagnostic | FswCommand | HwCommand | null { + if (commandDictionary === null) { + return null; + } + const { fswCommandMap, fswCommands, hwCommandMap, hwCommands } = commandDictionary; + + const dictionaryCommand: FswCommand | HwCommand | null = fswCommandMap[stemText] + ? fswCommandMap[stemText] + : hwCommandMap[stemText] + ? hwCommandMap[stemText] + : null; + + if (!dictionaryCommand) { + const ALL_STEMS = [...fswCommands.map(cmd => cmd.stem), ...hwCommands.map(cmd => cmd.stem)]; + return { + actions: closestStrings(stemText.toUpperCase(), ALL_STEMS, 3).map(guess => ({ + apply(view, from, to) { + view.dispatch({ changes: { from, insert: guess, to } }); + }, + name: `Change to ${guess}`, + })), + from: stem.from, + message: `Command '${stemText}' not found`, + severity: 'error', + to: stem.to, + }; + } + + switch (type) { + case 'command': + case 'immediate': + if (!fswCommandMap[stemText]) { + return { + from: stem.from, + message: 'Command must be a fsw command', + severity: 'error', + to: stem.to, + }; + } + break; + case 'hardware': + if (!hwCommandMap[stemText]) { + return { + from: stem.from, + message: 'Command must be a hardware command', + severity: 'error', + to: stem.to, + }; + } + break; + } + + return dictionaryCommand; + } + + /** + * Validates and lints the command arguments based on the dictionary of command arguments. + * @param dictArgs - The dictionary of command arguments. + * @param argNode - The SyntaxNode representing the arguments of the command. + * @param command - The SyntaxNode representing the command. + * @param text - The text of the document. + * @returns An array of Diagnostic objects representing the validation errors. + */ + function validateAndLintArguments( + dictArgs: FswCommandArgument[], + argNode: SyntaxNode[] | null, + command: SyntaxNode, + text: string, + stem: string, + variables: VariableMap, + ): Diagnostic[] { + // Initialize an array to store the validation errors + let diagnostics: Diagnostic[] = []; + + // Validate argument presence based on dictionary definition + if (dictArgs.length > 0) { + if (!argNode || argNode.length === 0) { + diagnostics.push({ + actions: [], + from: command.from, + message: 'The command is missing arguments.', + severity: 'error', + to: command.to, + }); + return diagnostics; + } + + if (argNode.length > dictArgs.length) { + const extraArgs = argNode.slice(dictArgs.length); + const { from, to } = getFromAndTo(extraArgs); + diagnostics.push({ + actions: [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, to } }); + }, + name: `Remove ${extraArgs.length} extra argument${extraArgs.length > 1 ? 's' : ''}`, + }, + ], + from, + message: `Extra arguments, definition has ${dictArgs.length}, but ${argNode.length} are present`, + severity: 'error', + to, + }); + return diagnostics; + } else if (argNode.length < dictArgs.length) { + const { from, to } = getFromAndTo(argNode); + const pluralS = dictArgs.length > argNode.length + 1 ? 's' : ''; + diagnostics.push({ + actions: [ + { + apply(view, _from, _to) { + if (commandDictionary) { + addDefaultArgs(commandDictionary, view, command, dictArgs.slice(argNode.length)); + } + }, + name: `Add default missing argument${pluralS}`, + }, + ], + from, + message: `Missing argument${pluralS}, definition has ${argNode.length}, but ${dictArgs.length} are present`, + severity: 'error', + to, + }); + return diagnostics; + } + } else if (argNode && argNode.length > 0) { + const { from, to } = getFromAndTo(argNode); + diagnostics.push({ + actions: [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, to } }); + }, + name: `Remove argument${argNode.length > 1 ? 's' : ''}`, + }, + ], + from: from, + message: 'The command should not have arguments', + severity: 'error', + to: to, + }); + return diagnostics; + } + + // don't check any further as there are no arguments in the command dictionary + if (dictArgs.length === 0) { + return diagnostics; + } + + const argValues = argNode?.map(arg => text.slice(arg.from, arg.to)) ?? []; + + // grab the first argument node + // let node = argNode?.firstChild ?? null; + + // Iterate through the dictionary of command arguments + for (let i = 0; i < dictArgs.length; i++) { + const dictArg = dictArgs[i]; // Get the current dictionary argument + const arg = argNode?.[i]; // Get the current argument node + // Check if there are no more argument nodes + if (!arg) { + // Push a diagnostic error for missing argument + diagnostics.push({ + actions: [], + from: command.from, + message: `Missing argument #${i + 1}, '${dictArg.name}' of type '${dictArg.arg_type}'`, + severity: 'error', + to: command.to, + }); + break; + } + + // Validate and lint the current argument node + diagnostics = diagnostics.concat( + ...validateArgument(dictArg, arg, command, text, stem, argValues.slice(0, i), variables), + ); + } + + // Return the array of validation errors + return diagnostics; + } + + /** ++ * Validates the given FSW command argument against the provided syntax node, ++ * and generates diagnostics if the validation fails. ++ * ++ * @param dictArg The FSW command argument to validate. ++ * @param argNode The syntax node to validate against. ++ * @param command The command node containing the argument node. ++ * @param text The full text of the document. ++ * @returns An array of diagnostics generated during the validation. ++ */ + function validateArgument( + dictArg: FswCommandArgument, + argNode: SyntaxNode, + command: SyntaxNode, + text: string, + stemText: string, + precedingArgValues: string[], + variables: VariableMap, + ): Diagnostic[] { + dictArg = getCustomArgDef(stemText, dictArg, precedingArgValues, parameterDictionaries, channelDictionary); + + const diagnostics: Diagnostic[] = []; + + const dictArgType = dictArg.arg_type; + const argType = argNode.name; + const argText = text.slice(argNode.from, argNode.to); + + switch (dictArgType) { + case 'enum': + if (argType !== 'String' && argType !== 'Enum') { + diagnostics.push({ + actions: [], + from: argNode.from, + message: `Incorrect type - expected 'enum' but got ${argType}`, + severity: 'error', + to: argNode.to, + }); + } else { + if (commandDictionary) { + const symbols = getAllEnumSymbols(commandDictionary?.enumMap, dictArg.enum_name) ?? dictArg.range ?? []; + const unquotedArgText = argText.replace(/^"|"$/g, ''); + if (!symbols.includes(unquotedArgText)) { + const guess = closest(unquotedArgText.toUpperCase(), symbols); + diagnostics.push({ + actions: [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, insert: `"${guess}"`, to } }); + }, + name: `Change to ${guess}`, + }, + ], + from: argNode.from, + message: `Enum should be "${symbols.slice(0, MAX_ENUMS_TO_SHOW).join(' | ')}${symbols.length > MAX_ENUMS_TO_SHOW ? ' ...' : ''}"\n`, + severity: 'error', + to: argNode.to, + }); + break; + } + } + if (argType === 'Enum') { + diagnostics.push({ + actions: [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, insert: `"${argText}"`, to } }); + }, + name: `Add quotes around ${argText}`, + }, + ], + from: argNode.from, + message: `Enum should be a "string"`, + severity: 'error', + to: argNode.to, + }); + } + } + break; + case 'float': + case 'integer': + case 'numeric': + case 'unsigned': + if (argType === 'Number') { + if (dictArg.range === null) { + break; + } + const { max, min } = dictArg.range; + const nodeTextAsNumber = parseFloat(argText); + + if (nodeTextAsNumber < min || nodeTextAsNumber > max) { + const message = + max !== min + ? `Number out of range. Range is between ${min} and ${max} inclusive.` + : `Number out of range. Range is ${min}.`; + diagnostics.push({ + actions: + max === min + ? [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, insert: `${min}`, to } }); + }, + name: `Change to ${min}`, + }, + ] + : [], + from: argNode.from, + message, + severity: 'error', + to: argNode.to, + }); + } + } else if (argType === 'Enum' && !variables[argText]) { + diagnostics.push({ + from: argNode.from, + message: `Unrecognized variable name ${argType}`, + severity: 'error', + to: argNode.to, + }); + } else { + diagnostics.push({ + from: argNode.from, + message: `Incorrect type - expected 'Number' but got ${argType}`, + severity: 'error', + to: argNode.to, + }); + } + break; + case 'fixed_string': + case 'var_string': + if (argType === 'Enum') { + if (!variables[argText]) { + const insert = closest(argText, Object.keys(variables)); + diagnostics.push({ + actions: [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, insert, to } }); + }, + name: `Change to ${insert}`, + }, + ], + from: argNode.from, + message: `Unrecognized variable name ${argText}`, + severity: 'error', + to: argNode.to, + }); + } + } else if (argType !== 'String') { + diagnostics.push({ + from: argNode.from, + message: `Incorrect type - expected 'String' but got ${argType}`, + severity: 'error', + to: argNode.to, + }); + } + break; + case 'repeat': + if (argType !== TOKEN_REPEAT_ARG) { + diagnostics.push({ + from: argNode.from, + message: `Incorrect type - expected '${TOKEN_REPEAT_ARG}' but got ${argType}`, + severity: 'error', + to: argNode.to, + }); + } else { + const repeatNodes = argNode.getChildren('Arguments'); + const repeatDef = dictArg.repeat; + if (repeatDef) { + const repeatLength = repeatDef.arguments.length; + const minSets = repeatDef.min ?? 0; + const maxSets = repeatDef.max ?? Infinity; + const minCount = repeatLength * minSets; + const maxCount = repeatLength * maxSets; + if (minCount > repeatNodes.length) { + diagnostics.push({ + actions: [], + from: argNode.from, + message: `Repeat argument should have at least ${minCount} value${minCount !== 0 ? 's' : ''} but has ${ + repeatNodes.length + }`, + severity: 'error', + to: argNode.to, + }); + } else if (maxCount < repeatNodes.length) { + diagnostics.push({ + actions: [], + from: argNode.from, + message: `Repeat argument should have at most ${maxCount} value${maxCount !== 0 ? 's' : ''} but has ${ + repeatNodes.length + }`, + severity: 'error', + to: argNode.to, + }); + } else if (repeatNodes.length % repeatLength !== 0) { + const allowedValues: number[] = []; + for (let i = minSets; i <= Math.min(maxSets, minSets + 2); i++) { + allowedValues.push(i * repeatLength); + } + let showEllipses = false; + if (allowedValues.length) { + const lastVal = allowedValues[allowedValues.length - 1]; + if (maxCount > lastVal) { + if (maxCount > lastVal + repeatLength) { + showEllipses = true; + } + allowedValues.push(maxCount); + } + } + const valStrings = allowedValues.map(i => i.toString()); + if (showEllipses) { + valStrings.splice(allowedValues.length - 1, 0, '...'); + } + + diagnostics.push({ + actions: [], + from: argNode.from, + message: `Repeat argument should have [${valStrings.join(', ')}] values`, + severity: 'error', + to: argNode.to, + }); + } else { + repeatNodes + .reduce((acc, node, i) => { + const chunkIndex = Math.floor(i / repeatLength); + if (!acc[chunkIndex]) { + acc[chunkIndex] = []; + } + acc[chunkIndex].push(node); + return acc; + }, []) + .forEach((repeat: SyntaxNode[]) => { + // check individual args + diagnostics.push( + ...validateAndLintArguments(repeatDef.arguments ?? [], repeat, command, text, stemText, variables), + ); + }); + } + } + } + + break; + } + return diagnostics; + } + + function validateId(commandNode: SyntaxNode, text: string): Diagnostic[] { + const diagnostics: Diagnostic[] = []; + const idNodes = commandNode.getChildren('IdDeclaration'); + if (idNodes.length) { + const idNode = idNodes[0]; + const idValNode = idNode.firstChild; + if (idValNode?.name === 'Enum' || idValNode?.name === 'Number') { + const { from, to } = getFromAndTo([idValNode]); + const idVal = text.slice(from, to); + diagnostics.push({ + actions: idValNode + ? [ + { + apply(view, from, to) { + view.dispatch({ changes: { from, insert: quoteEscape(idVal), to } }); + }, + name: `Quote ${idVal}`, + }, + ] + : [], + from, + message: `@ID directives must include double quoted string e.g. '@ID "sequence.name"'`, + severity: 'error', + to, + }); + } else if (!idValNode) { + diagnostics.push({ + ...getFromAndTo([idNode]), + message: `@ID directives must include a double quoted string e.g. '@ID "sequence.name"`, + severity: 'error', + }); + } + } + diagnostics.push( + ...idNodes.slice(1).map( + idNode => + ({ + ...getFromAndTo([idNode]), + message: 'Only one @ID directive is allowed per sequence', + severity: 'error', + }) as const, + ), + ); + return diagnostics; + } + + /** + * Validates the metadata of a command node and returns an array of diagnostics. + * @param commandNode - The command node to validate. + * @returns An array of diagnostic objects. + */ + function validateMetadata(commandNode: SyntaxNode): Diagnostic[] { + // Get the metadata node of the command node + const metadataNode = commandNode.getChild('Metadata'); + // If there is no metadata node, return an empty array + if (!metadataNode) { + return []; + } + // Get the metadata entry nodes of the metadata node + const metadataEntry = metadataNode.getChildren('MetaEntry'); + // If there are no metadata entry nodes, return an empty array + if (!metadataEntry) { + return []; + } + + const diagnostics: Diagnostic[] = []; + + // Iterate over each metadata entry node + metadataEntry.forEach(entry => { + // Get the children nodes of the metadata entry node + const metadataNodeChildren = getChildrenNode(entry); + + if (metadataNodeChildren.length > 2) { + diagnostics.push({ + actions: [], + from: entry.from, + message: `Should only have a 'key' and a 'value'`, + severity: 'error', + to: entry.to, + }); + } else { + // Define the template for metadata nodes + const metadataTemplate = ['Key', 'Value']; + // Iterate over each template node + for (let i = 0; i < metadataTemplate.length; i++) { + // Get the name of the template node + const templateName = metadataTemplate[i]; + // Get the metadata node of the current template node + const metadataNode = metadataNodeChildren[i]; + + // If there is no metadata node, add a diagnostic + if (!metadataNode) { + diagnostics.push({ + actions: [], + from: entry.from, + message: `Missing ${templateName}`, + severity: 'error', + to: entry.to, + }); + break; + } + + // If the name of the metadata node is not the template node name + if (metadataNode.name !== templateName) { + // Get the name of the deepest node of the metadata node + const deepestNodeName = getDeepestNode(metadataNode).name; + // Add a diagnostic based on the name of the deepest node + switch (deepestNodeName) { + case 'String': + break; // do nothing as it is a string + case 'Number': + case 'Enum': + case 'Boolean': + diagnostics.push({ + from: metadataNode.from, + message: `Incorrect type - expected 'String' but got ${deepestNodeName}`, + severity: 'error', + to: metadataNode.to, + }); + break; + default: + diagnostics.push({ + from: entry.from, + message: `Missing ${templateName}`, + severity: 'error', + to: entry.to, + }); + } + } + } + } + }); + + return diagnostics; + } + + function validateModel(commandNode: SyntaxNode): Diagnostic[] { + const models = commandNode.getChild('Models')?.getChildren('Model'); + if (!models) { + return []; + } + + const diagnostics: Diagnostic[] = []; + + models.forEach(model => { + const modelChildren = getChildrenNode(model); + if (modelChildren.length > 3) { + diagnostics.push({ + from: model.from, + message: `Should only have 'Variable', 'value', and 'Offset'`, + severity: 'error', + to: model.to, + }); + } else { + const modelTemplate = ['Variable', 'Value', 'Offset']; + for (let i = 0; i < modelTemplate.length; i++) { + const templateName = modelTemplate[i]; + const modelNode = modelChildren[i]; + if (!modelNode) { + diagnostics.push({ + from: model.from, + message: `Missing ${templateName}`, + severity: 'error', + to: model.to, + }); + } + + if (modelNode.name !== templateName) { + const deepestNodeName = getDeepestNode(modelNode).name; + if (deepestNodeName === TOKEN_ERROR) { + diagnostics.push({ + from: model.from, + message: `Missing ${templateName}`, + severity: 'error', + to: model.to, + }); + break; + } else { + if (templateName === 'Variable' || templateName === 'Offset') { + if (deepestNodeName !== 'String') { + diagnostics.push({ + from: modelNode.from, + message: `Incorrect type - expected 'String' but got ${deepestNodeName}`, + severity: 'error', + to: modelNode.to, + }); + break; + } + } else { + // Value + if (deepestNodeName !== 'Number' && deepestNodeName !== 'String' && deepestNodeName !== 'Boolean') { + diagnostics.push({ + from: modelNode.from, + message: `Incorrect type - expected 'Number', 'String', or 'Boolean' but got ${deepestNodeName}`, + severity: 'error', + to: modelNode.to, + }); + break; + } + } + } + } + } + } + }); + + return diagnostics; + } +} diff --git a/src/utilities/new-sequence-editor/sequence-tooltip.ts b/src/utilities/new-sequence-editor/sequence-tooltip.ts new file mode 100644 index 0000000000..af7ba1b3d7 --- /dev/null +++ b/src/utilities/new-sequence-editor/sequence-tooltip.ts @@ -0,0 +1,154 @@ +import { syntaxTree } from '@codemirror/language'; +import type { Extension } from '@codemirror/state'; +import { hoverTooltip, type EditorView, type Tooltip } from '@codemirror/view'; +import type { SyntaxNode } from '@lezer/common'; +import type { + ChannelDictionary, + CommandDictionary, + FswCommand, + HwCommand, + ParameterDictionary, +} from '@nasa-jpl/aerie-ampcs'; +import ArgumentTooltip from '../../components/sequencing/ArgumentTooltip.svelte'; +import CommandTooltip from '../../components/sequencing/CommandTooltip.svelte'; +import { getCustomArgDef } from './extension-points'; + +/** + * Searches up through a node's ancestors to find a node by the given name. + */ +function getParentNodeByName(view: EditorView, pos: number, name: string): SyntaxNode | undefined { + let node: SyntaxNode | undefined = syntaxTree(view.state).resolveInner(pos, -1); + + // TODO - replace with getAncestorNode + while (node && node.name !== name) { + node = node.parent?.node; + } + + return node; +} + +/** + * Returns a text token range for a line in the view at a given position. + * @see https://codemirror.net/examples/tooltip/#hover-tooltips + */ +function getTokenPositionInLine(view: EditorView, pos: number) { + const { from, to, text } = view.state.doc.lineAt(pos); + const tokenRegex = /[a-zA-Z0-9_".-]/; + + let start = pos; + let end = pos; + + while (start > from && tokenRegex.test(text[start - from - 1])) { + --start; + } + + while (end < to && tokenRegex.test(text[end - from])) { + ++end; + } + + return { from: start, to: end }; +} + +/** + * Tooltip function that returns a Code Mirror extension function. + * Can be optionally called with a command dictionary so it's available during tooltip generation. + */ +export function sequenceTooltip( + channelDictionary: ChannelDictionary | null = null, + commandDictionary: CommandDictionary | null = null, + parameterDictionaries: ParameterDictionary[] = [], +): Extension { + return hoverTooltip((view, pos, side): Tooltip | null => { + const { from, to } = getTokenPositionInLine(view, pos); + + // First handle the case where the token is out of bounds. + if ((from === pos && side < 0) || (to === pos && side > 0)) { + return null; + } + + // Check to see if we are hovering over a command stem. + // TODO: Get token from AST? For now just assumes token is a commend stem if found in dictionary. + if (commandDictionary) { + const { hwCommandMap, fswCommandMap } = commandDictionary; + const text = view.state.doc.sliceString(from, to); + const command: FswCommand | HwCommand | null = fswCommandMap[text] ?? hwCommandMap[text] ?? null; + + if (command) { + return { + above: true, + create() { + const dom = document.createElement('div'); + new CommandTooltip({ props: { command }, target: dom }); + return { dom }; + }, + end: to, + pos: from, + }; + } + } + + // Check to see if we are hovering over command arguments. + const argsNode = getParentNodeByName(view, pos, 'Args'); + + if (argsNode) { + const stem = argsNode.parent?.getChild('Stem'); + + if (commandDictionary && stem) { + const { fswCommandMap } = commandDictionary; + const text = view.state.doc.sliceString(stem.from, stem.to); + const fswCommand: FswCommand | null = fswCommandMap[text] ?? null; + const argValues: string[] = []; + + if (!fswCommand) { + return null; + } + + let argNode = argsNode.firstChild; + + while (argNode) { + argValues.push(view.state.doc.sliceString(argNode.from, argNode.to)); + argNode = argNode.nextSibling; + } + + let i = 0; + argNode = argsNode.firstChild; + // TODO tooltips in repeats + while (argNode) { + // if (argNode.name === TOKEN_REPEAT_ARG) { + // let repeatArg = argNode.firstChild; + + // } + + if (argNode.from === from && argNode.to === to) { + const arg = getCustomArgDef( + text, + fswCommand.arguments[i], + argValues, + parameterDictionaries, + channelDictionary, + ); + + // TODO. Type check arg for type found in AST so we do not show tooltips incorrectly. + if (arg) { + return { + above: true, + create() { + const dom = document.createElement('div'); + new ArgumentTooltip({ props: { arg, commandDictionary }, target: dom }); + return { dom }; + }, + end: to, + pos: from, + }; + } + } + + argNode = argNode.nextSibling; + ++i; + } + } + } + + return null; + }); +} diff --git a/src/utilities/new-sequence-editor/sequencer-grammar-constants.ts b/src/utilities/new-sequence-editor/sequencer-grammar-constants.ts new file mode 100644 index 0000000000..77eff640d7 --- /dev/null +++ b/src/utilities/new-sequence-editor/sequencer-grammar-constants.ts @@ -0,0 +1,3 @@ +export const TOKEN_COMMAND = 'Command'; +export const TOKEN_REPEAT_ARG = 'RepeatArg'; +export const TOKEN_ERROR = '⚠'; diff --git a/src/utilities/new-sequence-editor/time-utils.ts b/src/utilities/new-sequence-editor/time-utils.ts new file mode 100644 index 0000000000..3004f5de1b --- /dev/null +++ b/src/utilities/new-sequence-editor/time-utils.ts @@ -0,0 +1,249 @@ +export const ABSOLUTE_TIME = /^(\d{4})-(\d{3})T(\d{2}):(\d{2}):(\d{2})(?:\.(\d{3}))?$/g; + +export const RELATIVE_TIME = /([0-9]{3}T)?([0-9]{2}):([0-9]{2}):([0-9]{2})(\.[0-9]+)?$/g; +export const RELATIVE_SIMPLE = /(\d+)(\.[0-9]+)?$/g; + +export const EPOCH_TIME = /(^[+-]?)([0-9]{3}T)?([0-9]{2}):([0-9]{2}):([0-9]{2})(\.[0-9]+)?$/g; +export const EPOCH_SIMPLE = /(^[+-]?)(\d+)(\.[0-9]+)?$/g; + +/** + * Tests if a given time string matches a specified regular expression. + * + * @param {string} time - The time string to be tested. + * @param {RegExp} regex - The regular expression to test against. + * @return {RegExpExecArray | null} The result of the regular expression execution, or null if no match is found. + */ +export function testTime(time: string, regex: RegExp): RegExpExecArray | null { + regex.lastIndex = 0; + return regex.exec(time); +} + +export function isTimeBalanced( + time: string, + regex: RegExp, +): { error?: string | undefined; warning?: string | undefined } { + const { years, days, hours, minutes, seconds, milliseconds } = extractTime(time, regex); + + if (regex === ABSOLUTE_TIME && years !== undefined && days !== undefined) { + const isUnbalanced = + (years >= 0 && + years <= 9999 && + days >= 0 && + days <= (isLeapYear(years) ? 366 : 365) && + hours >= 0 && + hours <= 23 && + minutes >= 0 && + minutes <= 59 && + seconds >= 0 && + seconds <= 59) === false; + + if (isUnbalanced) { + return balanceAbsolute(years, days, hours, minutes, seconds, milliseconds); + } + } else { + const isUnbalanced = + (days !== undefined + ? days >= 1 && + days <= 365 && + hours >= 0 && + hours <= 23 && + minutes >= 0 && + minutes <= 59 && + seconds >= 0 && + seconds <= 59 + : hours >= 0 && hours <= 23 && minutes >= 0 && minutes <= 59 && seconds >= 0 && seconds <= 59) === false; + + if (isUnbalanced) { + return balanceDuration(days ?? 0, hours, minutes, seconds, milliseconds); + } + } + + return {}; +} + +function extractTime( + time: string, + regex: RegExp, +): { + days?: number; + hours: number; + milliseconds: number; + minutes: number; + seconds: number; + years?: number; +} { + regex.lastIndex = 0; + const matches = regex.exec(time); + + if (!matches) { + return { hours: 0, milliseconds: 0, minutes: 0, seconds: 0 }; + } + + if (regex.source === ABSOLUTE_TIME.source) { + const [, years = '0', days = '0', hours = '0', minutes = '0', seconds = '0', milliseconds = '0'] = matches; + const [yearsNum, daysNum, hoursNum, minuteNum, secondsNum, millisecondNum] = [ + years, + days, + hours, + minutes, + seconds, + milliseconds, + ].map(Number); + return { + days: daysNum, + hours: hoursNum, + milliseconds: millisecondNum, + minutes: minuteNum, + seconds: secondsNum, + years: yearsNum, + }; + } + if (regex.source === EPOCH_TIME.source) { + const [, , days = undefined, hours = '0', minutes = '0', seconds = '0', milliseconds = '0'] = matches; + const [hoursNum, minuteNum, secondsNum, millisecondNum] = [hours, minutes, seconds, milliseconds].map(Number); + const daysNum = days !== undefined ? Number(days.replace('T', '')) : days; + return { + days: daysNum, + hours: hoursNum, + milliseconds: millisecondNum, + minutes: minuteNum, + seconds: secondsNum, + }; + } else if (regex.source === RELATIVE_TIME.source) { + const [, days = undefined, hours = '0', minutes = '0', seconds = '0', milliseconds = '0'] = matches; + const [hoursNum, minuteNum, secondsNum, millisecondNum] = [hours, minutes, seconds, milliseconds].map(Number); + const daysNum = days !== undefined ? Number(days.replace('T', '')) : days; + return { + days: daysNum, + hours: hoursNum, + milliseconds: millisecondNum, + minutes: minuteNum, + seconds: secondsNum, + }; + } + + return { hours: 0, milliseconds: 0, minutes: 0, seconds: 0 }; +} + +function balanceDuration( + unbalanceDays: number, + unbalancedHours: number, + unbalanceMinutes: number, + unbalanceSeconds: number, + unbalanceMilliseconds: number, +): { error?: string | undefined; warning?: string | undefined } { + const { days, hours, minutes, seconds, milliseconds } = normalizeTime( + unbalanceDays, + unbalancedHours, + unbalanceMinutes, + unbalanceSeconds, + unbalanceMilliseconds, + ); + + const DD = days !== 0 ? `${formatNumber(days, 3)}T` : ''; + const HH = days !== 0 ? formatNumber(hours, 2) : formatNumber(hours, 2); + const MM = formatNumber(minutes, 2); + const SS = formatNumber(seconds, 2); + const sss = formatNumber(milliseconds, 3); + + const balancedTime = `${DD}${HH}:${MM}:${SS}[.${sss}]`; + + if (days > 365) { + return { + error: `Time Error: Maximum time reached. + Received: Balanced time - ${balancedTime}. + Expected: ${balancedTime} <= 365T23:59:59.999`, + }; + } else { + return { + warning: `Time Warning: Unbalanced time used. + Suggestion: ${balancedTime}`, + }; + } +} + +function balanceAbsolute( + unbalanceYears: number, + unbalanceDays: number, + unbalancedHours: number, + unbalanceMinutes: number, + unbalanceSeconds: number, + unbalanceMilliseconds: number, +): { error?: string | undefined; warning?: string | undefined } { + const { years, days, hours, minutes, seconds, milliseconds } = normalizeTime( + unbalanceDays, + unbalancedHours, + unbalanceMinutes, + unbalanceSeconds, + unbalanceMilliseconds, + unbalanceYears, + ); + + const YY = years !== 0 && years !== undefined ? `${formatNumber(years, 4)}-` : ''; + const DD = (years !== 0 && days === 0) || days !== 0 ? `${formatNumber(days, 3)}T` : ''; + const HH = days !== 0 ? formatNumber(hours, 2) : formatNumber(hours, 2); + const MM = formatNumber(minutes, 2); + const SS = formatNumber(seconds, 2); + const sss = formatNumber(milliseconds, 3); + + const balancedTime = `${YY}${DD}${HH}:${MM}:${SS}.${sss}`; + + if (years && years > 9999) { + return { + error: `Time Error: Maximum time reached + Received: Balanced time - ${balancedTime}. + Expected: ${balancedTime} <= 9999-365T23:59:59.999`, + }; + } + + return { + warning: `Time Warning: Unbalanced time used. + Suggestion: ${balancedTime}`, + }; +} + +function normalizeTime( + days: number, + hours: number, + minutes: number, + seconds: number, + milliseconds: number, + years?: number, +): { days: number; hours: number; milliseconds: number; minutes: number; seconds: number; years?: number } { + // Normalize milliseconds and seconds + seconds += Math.floor(milliseconds / 1000); + milliseconds = milliseconds % 1000; + + // Normalize seconds and minutes + minutes += Math.floor(seconds / 60); + seconds = seconds % 60; + + // Normalize minutes and hours + hours += Math.floor(minutes / 60); + minutes = minutes % 60; + + // Normalize hours and days + days += Math.floor(hours / 24); + hours = hours % 24; + + // Normalize days and years + if (years !== undefined) { + const isLY = isLeapYear(years); + years += Math.floor(days / (isLY ? 366 : 365)); + days = days % (isLY ? 366 : 365); + } + + // Return the normalized values + return { days, hours, milliseconds, minutes, seconds, years }; +} + +function isLeapYear(year: number): boolean { + return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0; +} + +function formatNumber(number: number, size: number): string { + const isNegative = number < 0; + const absoluteNumber = Math.abs(number).toString(); + const formattedNumber = absoluteNumber.padStart(size, '0'); + return isNegative ? `-${formattedNumber}` : formattedNumber; +} diff --git a/src/utilities/new-sequence-editor/to-seq-json.test.ts b/src/utilities/new-sequence-editor/to-seq-json.test.ts new file mode 100644 index 0000000000..04ad8b7e6e --- /dev/null +++ b/src/utilities/new-sequence-editor/to-seq-json.test.ts @@ -0,0 +1,403 @@ +import { + parse, + type CommandDictionary, + type FswCommand, + type FswCommandArgument, + type FswCommandArgumentFloat, + type FswCommandArgumentMap, + type HwCommand, +} from '@nasa-jpl/aerie-ampcs'; +import { readFileSync } from 'fs'; +import { describe, expect, it } from 'vitest'; +import { SeqLanguage } from '../codemirror'; +import { sequenceToSeqJson } from './to-seq-json'; + +function argArrToMap(cmdArgs: FswCommandArgument[]): FswCommandArgumentMap { + return cmdArgs.reduce((argMap, arg) => ({ ...argMap, [arg.name]: arg }), {}); +} + +function floatArg(name: string, units: string = ''): FswCommandArgumentFloat { + return { + arg_type: 'float', + bit_length: 64, + default_value: 0, + description: '', + name, + range: { max: 5, min: -5 }, + units, + }; +} + +const fswCommands: FswCommand[] = [ + { + arguments: [floatArg('float_arg_1', ''), floatArg('float_arg_2', '')] as FswCommandArgument[], + stem: 'FSW_CMD_1', + }, +].map(stub => ({ ...stub, argumentMap: argArrToMap(stub.arguments), description: '', type: 'fsw_command' })); + +const fswCommandMap = fswCommands.reduce((cmdMap, hwCmd) => ({ ...cmdMap, [hwCmd.stem]: hwCmd }), {}); + +const hwCommands = [ + { + stem: 'HDW_CMD_1', + }, + { + stem: 'HDW_CMD_2', + }, +].map((stub): HwCommand => ({ ...stub, description: '', type: 'hw_command' })); + +const hwCommandMap = hwCommands.reduce((cmdMap, hwCmd) => ({ ...cmdMap, [hwCmd.stem]: hwCmd }), {}); + +const commandDictionary: CommandDictionary = { + enumMap: {}, + enums: [], + fswCommandMap, + fswCommands, + header: { + mission_name: 'unittest', + schema_version: '1', + spacecraft_ids: [1], + version: '1', + }, + hwCommandMap, + hwCommands, + id: 'command_dictionary', + path: '/file/path', +}; + +const commandBanana = parse(readFileSync('src/tests/mocks/sequencing/dictionaries/command_banananation.xml', 'utf-8')); + +describe('convert a sequence to seq json', () => { + it('hardware command', () => { + const seq = `@HARDWARE +HDW_CMD`; + const id = 'test'; + const expectedJson = { + hardware_commands: [ + { + stem: 'HDW_CMD', + }, + ], + id: 'test', + metadata: {}, + }; + const actual = sequenceToSeqJson(SeqLanguage.parser.parse(seq), seq, commandDictionary, [], null, id); + expect(actual).toEqual(expectedJson); + }); + + it('multiple hardware commands', () => { + const seq = `@HARDWARE +HDW_CMD_1 +# comment +HDW_CMD_2 +`; + const id = 'test'; + const expectedJson = { + hardware_commands: [{ stem: 'HDW_CMD_1' }, { stem: 'HDW_CMD_2' }], + id: 'test', + metadata: {}, + }; + const actual = sequenceToSeqJson(SeqLanguage.parser.parse(seq), seq, commandDictionary, [], null, id); + expect(actual).toEqual(expectedJson); + }); + + it('load and go with commands', () => { + const seq = `@LOAD_AND_GO +C FSW_CMD_1 1e3 2.34 +# comment +C FSW_CMD_1 0.123 -2.34 # inline description +`; + const id = 'test'; + const expectedJson = { + id: 'test', + metadata: { + lgo: true, + }, + steps: [ + { + args: [ + { + name: 'float_arg_1', + type: 'number', + value: 1e3, + }, + { + name: 'float_arg_2', + type: 'number', + value: 2.34, + }, + ], + stem: 'FSW_CMD_1', + time: { type: 'COMMAND_COMPLETE' }, + type: 'command', + }, + { + args: [ + { + name: 'float_arg_1', + type: 'number', + value: 0.123, + }, + { + name: 'float_arg_2', + type: 'number', + value: -2.34, + }, + ], + description: 'inline description', + stem: 'FSW_CMD_1', + time: { type: 'COMMAND_COMPLETE' }, + type: 'command', + }, + ], + }; + const actual = sequenceToSeqJson(SeqLanguage.parser.parse(seq), seq, commandDictionary, [], null, id); + expect(actual).toEqual(expectedJson); + }); + + it('command dictionary file', () => { + const id = 'test.sequence'; + const seq = ` +@ID "test.inline" + +@INPUT_PARAMS L00INT L01INT L02STR + +# comment +R10 ECHO "string arg" +R71 ECHO L02STR + `; + const expectedJson = { + id: 'test.inline', + metadata: {}, + parameters: [ + { + name: 'L00INT', + type: 'INT', + }, + { + name: 'L01INT', + type: 'INT', + }, + { + name: 'L02STR', + type: 'STRING', + }, + ], + steps: [ + { + args: [ + { + name: 'echo_string', + type: 'string', + value: 'string arg', + }, + ], + stem: 'ECHO', + time: { tag: '00:00:10', type: 'COMMAND_RELATIVE' }, + type: 'command', + }, + { + args: [ + { + name: 'echo_string', + type: 'symbol', + value: 'L02STR', + }, + ], + stem: 'ECHO', + time: { tag: '00:01:11', type: 'COMMAND_RELATIVE' }, + type: 'command', + }, + ], + }; + const actual = sequenceToSeqJson(SeqLanguage.parser.parse(seq), seq, commandBanana, [], null, id); + expect(actual).toEqual(expectedJson); + }); + + it('repeat args', () => { + const id = 'test.sequence'; + const seq = `@ID "test.inline" + +# comment +R10 PACKAGE_BANANA 2 [ "bundle1" 5 "bundle2" 10] + `; + const expectedJson = { + id: 'test.inline', + metadata: {}, + steps: [ + { + args: [ + { + name: 'lot_number', + type: 'number', + value: 2, + }, + { + name: 'bundle', + type: 'repeat', + value: [ + [ + { + name: 'bundle_name', + type: 'string', + value: 'bundle1', + }, + { + name: 'number_of_bananas', + type: 'number', + value: 5, + }, + ], + [ + { + name: 'bundle_name', + type: 'string', + value: 'bundle2', + }, + { + name: 'number_of_bananas', + type: 'number', + value: 10, + }, + ], + ], + }, + ], + stem: 'PACKAGE_BANANA', + time: { tag: '00:00:10', type: 'COMMAND_RELATIVE' }, + type: 'command', + }, + ], + }; + const actual = sequenceToSeqJson(SeqLanguage.parser.parse(seq), seq, commandBanana, [], null, id); + expect(actual).toEqual(expectedJson); + }); + + it('local variables', () => { + const id = 'test.sequence'; + const seq = `@ID "test.inline" +@LOCALS L00STR +C ECHO L00STR +C ECHO "L00STR" +C ECHO L01STR + `; + const actual = sequenceToSeqJson(SeqLanguage.parser.parse(seq), seq, commandBanana, [], null, id); + const expectedJson = { + id: 'test.inline', + locals: [ + { + name: 'L00STR', + type: 'STRING', + }, + ], + metadata: {}, + steps: [ + { + args: [ + { + name: 'echo_string', + type: 'symbol', + value: 'L00STR', + }, + ], + stem: 'ECHO', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + { + args: [ + { + name: 'echo_string', + type: 'string', + value: 'L00STR', + }, + ], + stem: 'ECHO', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + { + args: [ + { + name: 'echo_string', + type: 'symbol', + value: 'L01STR', + }, + ], + stem: 'ECHO', + time: { + type: 'COMMAND_COMPLETE', + }, + type: 'command', + }, + ], + }; + expect(actual).toEqual(expectedJson); + }); + + it('header ordering', () => { + function allPermutations(inputArr: string[]) { + const result: string[][] = []; + function permute(arr: string[], m: string[] = []) { + if (arr.length === 0) { + result.push(m); + } else { + for (let i = 0; i < arr.length; i++) { + const curr = arr.slice(); + const next = curr.splice(i, 1); + permute(curr.slice(), m.concat(next)); + } + } + } + permute(inputArr); + return result; + } + + const permutations = allPermutations([ + `@ID "test.seq"`, + `@INPUT_PARAMS L01STR L02STR`, + `@LOCALS L01INT L02INT L01UINT L02UINT`, + ]); + permutations.forEach((ordering: string[]) => { + const input = ordering.join('\n\n'); + const actual = sequenceToSeqJson(SeqLanguage.parser.parse(input), input, commandBanana, [], null, 'id'); + const expected = { + id: 'test.seq', + locals: [ + { + name: 'L01INT', + type: 'INT', + }, + { + name: 'L02INT', + type: 'INT', + }, + { + name: 'L01UINT', + type: 'UINT', + }, + { + name: 'L02UINT', + type: 'UINT', + }, + ], + metadata: {}, + parameters: [ + { + name: 'L01STR', + type: 'STRING', + }, + { + name: 'L02STR', + type: 'STRING', + }, + ], + }; + expect(actual).toEqual(expected); + }); + }); +}); diff --git a/src/utilities/new-sequence-editor/to-seq-json.ts b/src/utilities/new-sequence-editor/to-seq-json.ts new file mode 100644 index 0000000000..5dc435ab63 --- /dev/null +++ b/src/utilities/new-sequence-editor/to-seq-json.ts @@ -0,0 +1,534 @@ +import type { SyntaxNode, Tree } from '@lezer/common'; +import type { + ChannelDictionary, + CommandDictionary, + FswCommandArgument, + FswCommandArgumentRepeat, + ParameterDictionary, +} from '@nasa-jpl/aerie-ampcs'; +import type { + Args, + BooleanArgument, + Command, + HardwareCommand, + HexArgument, + ImmediateCommand, + Metadata, + Model, + NumberArgument, + RepeatArgument, + SeqJson, + StringArgument, + SymbolArgument, + Time, + VariableDeclaration, +} from '@nasa-jpl/seq-json-schema/types'; +import { customizeSeqJson } from './extension-points'; +import { logInfo } from './logger'; +import { TOKEN_REPEAT_ARG } from './sequencer-grammar-constants'; +import { EPOCH_SIMPLE, EPOCH_TIME, RELATIVE_SIMPLE, RELATIVE_TIME, testTime } from './time-utils'; + +/** + * Returns a minimal valid Seq JSON object. + * Use for getting a default Seq JSON throughout the application. + */ +export function seqJsonDefault(): SeqJson { + return { id: '', metadata: {} }; +} + +/** + * Walks the sequence parse tree and converts it to a valid Seq JSON object. + */ +export function sequenceToSeqJson( + node: Tree, + text: string, + commandDictionary: CommandDictionary | null, + parameterDictionaries: ParameterDictionary[], + channelDictionary: ChannelDictionary | null, + sequenceName: string, +): SeqJson { + const baseNode = node.topNode; + const seqJson: SeqJson = seqJsonDefault(); + const variableList: string[] = []; + + seqJson.id = parseId(baseNode, text, sequenceName); + seqJson.metadata = { ...parseLGO(baseNode), ...parseMetadata(baseNode, text) }; + seqJson.locals = parseVariables(baseNode, text, 'LocalDeclaration') ?? undefined; + if (seqJson.locals) { + variableList.push(...seqJson.locals.map(value => value.name)); + } + seqJson.parameters = parseVariables(baseNode, text, 'ParameterDeclaration') ?? undefined; + if (seqJson.parameters) { + variableList.push(...seqJson.parameters.map(value => value.name)); + } + seqJson.steps = + baseNode + .getChild('Commands') + ?.getChildren('Command') + .map(command => parseCommand(command, text, commandDictionary)) ?? undefined; + seqJson.immediate_commands = + baseNode + .getChild('ImmediateCommands') + ?.getChildren('Command') + .map(command => parseImmediateCommand(command, text, commandDictionary)) ?? undefined; + seqJson.hardware_commands = + baseNode + .getChild('HardwareCommands') + ?.getChildren('Command') + .map(command => parseHardwareCommand(command, text)) ?? undefined; + customizeSeqJson(seqJson, parameterDictionaries, channelDictionary); + return seqJson; +} + +function parseLGO(node: SyntaxNode): Metadata | undefined { + const lgoNode = node.getChild('Commands')?.getChild('LoadAndGoDirective'); + if (!lgoNode) { + return undefined; + } + + return { + lgo: true, + }; +} + +function parseArg( + node: SyntaxNode, + text: string, + dictionaryArg: FswCommandArgument | null, +): BooleanArgument | HexArgument | NumberArgument | StringArgument | SymbolArgument | undefined { + const nodeValue = text.slice(node.from, node.to); + + if (node.name === 'Boolean') { + const value = nodeValue === 'TRUE' ? true : false; + const booleanArg: BooleanArgument = { type: 'boolean', value }; + if (dictionaryArg) { + booleanArg.name = dictionaryArg.name; + } + return booleanArg; + } else if (node.name === 'Enum') { + const value = nodeValue; + const enumArg: SymbolArgument = { type: 'symbol', value }; + if (dictionaryArg) { + enumArg.name = dictionaryArg.name; + } + return enumArg; + } else if (node.name === 'Number') { + if (nodeValue.slice(0, 2) === '0x') { + const hexArg: HexArgument = { type: 'hex', value: nodeValue }; + if (dictionaryArg) { + hexArg.name = dictionaryArg.name; + } + return hexArg; + } else { + const value = parseFloat(nodeValue); + const numberArg: NumberArgument = { type: 'number', value }; + if (dictionaryArg) { + numberArg.name = dictionaryArg.name; + } + return numberArg; + } + } else if (node.name === 'String') { + const value = JSON.parse(nodeValue); + const arg: StringArgument = { type: 'string', value }; + if (dictionaryArg) { + arg.name = dictionaryArg.name; + } + return arg; + } +} + +export function parseRepeatArgs( + repeatArgsNode: SyntaxNode, + text: string, + dictRepeatArgument: FswCommandArgumentRepeat | null, +) { + const repeatArg: RepeatArgument = { name: dictRepeatArgument?.name, type: 'repeat', value: [] }; + const repeatArgs = dictRepeatArgument?.repeat?.arguments; + const repeatArgsLength = repeatArgs?.length ?? Infinity; + let repeatArgNode: SyntaxNode | null = repeatArgsNode; + + if (repeatArgNode) { + let args: RepeatArgument['value'][0] = []; + let argNode = repeatArgNode.firstChild; + + let i = 0; + while (argNode) { + if (i % repeatArgsLength === 0) { + // [[1 2] [3 4]] in seq.json is flattened in seqN [1 2 3 4] + // dictionary definition is required to disambiguate + args = []; + repeatArg.value.push(args); + } + const arg = parseArg(argNode, text, repeatArgs?.[i % repeatArgsLength] ?? null); + if (arg) { + args.push(arg); + } else { + logInfo(`Could not parse arg for node with name ${argNode.name}`); + } + + argNode = argNode.nextSibling; + i++; + } + + repeatArgNode = repeatArgNode.nextSibling; + } + + return repeatArg; +} + +export function parseArgs( + argsNode: SyntaxNode, + text: string, + commandDictionary: CommandDictionary | null, + stem: string, +): Args { + const args: Args = []; + let argNode = argsNode.firstChild; + const dictArguments = commandDictionary?.fswCommandMap[stem]?.arguments ?? []; + let i = 0; + + while (argNode) { + const dictArg = dictArguments[i] ?? null; + if (argNode.name === TOKEN_REPEAT_ARG) { + const arg = parseRepeatArgs(argNode, text, (dictArg as FswCommandArgumentRepeat) ?? null); + if (arg) { + args.push(arg); + } else { + logInfo(`Could not parse repeat arg for node with name ${argNode.name}`); + } + } else { + const arg = parseArg(argNode, text, dictArg); + if (arg) { + args.push(arg); + } else { + logInfo(`Could not parse arg for node with name ${argNode.name}`); + } + } + argNode = argNode?.nextSibling; + ++i; + } + + return args; +} + +/** + * + * @param commandNode + * @param text + * @returns + */ + +/** + * Parses a time tag node and returns a Seq JSON time. + * Defaults to an unknown absolute time if a command does not have a valid time tag. + */ +export function parseTime(commandNode: SyntaxNode, text: string): Time { + const timeTagNode = commandNode.getChild('TimeTag'); + let tag = 'UNKNOWN'; + + if (timeTagNode == null) { + return { tag, type: 'ABSOLUTE' }; + } + + const timeTagAbsoluteNode = timeTagNode.getChild('TimeAbsolute'); + const timeTagCompleteNode = timeTagNode.getChild('TimeComplete'); + const timeTagEpochNode = timeTagNode.getChild('TimeEpoch'); + const timeTagRelativeNode = timeTagNode.getChild('TimeRelative'); + + if (timeTagCompleteNode) { + return { type: 'COMMAND_COMPLETE' }; + } + + if (!timeTagAbsoluteNode && !timeTagEpochNode && !timeTagRelativeNode) { + return { tag, type: 'ABSOLUTE' }; + } + + if (timeTagAbsoluteNode) { + const tag = text.slice(timeTagAbsoluteNode.from + 1, timeTagAbsoluteNode.to).trim(); + return { tag, type: 'ABSOLUTE' }; + } else if (timeTagEpochNode) { + const timeTagEpochText = text.slice(timeTagEpochNode.from + 1, timeTagEpochNode.to).trim(); + + // a regex to determine if this string [+/-]####T##:##:##.### + let match = testTime(timeTagEpochText, EPOCH_TIME); + if (match) { + const [, sign, doy, hh, mm, ss, ms] = match; + tag = `${sign === '-' ? '-' : ''}${doy !== undefined ? doy : ''}${hh ? hh : '00'}:${mm ? mm : '00'}:${ + ss ? ss : '00' + }${ms ? ms : ''}`; + return { tag, type: 'EPOCH_RELATIVE' }; + } + + // a regex to determine if this string [+/-]###.### + match = testTime(timeTagEpochText, EPOCH_SIMPLE); + if (match) { + const [, sign, second, ms] = match; + tag = `${sign === '-' ? '-' : ''}${second ? secondsToHMS(Number(second)) : ''}${ms ? ms : ''}`; + return { tag, type: 'EPOCH_RELATIVE' }; + } + } else if (timeTagRelativeNode) { + const timeTagRelativeText = text.slice(timeTagRelativeNode.from + 1, timeTagRelativeNode.to).trim(); + + // a regex to determine if this string ####T##:##:##.### + let match = testTime(timeTagRelativeText, RELATIVE_TIME); + if (match) { + RELATIVE_TIME.lastIndex = 0; + const [, doy, hh, mm, ss, ms] = match; + tag = `${doy !== undefined ? doy : ''}${doy !== undefined ? doy : ''}${hh ? hh : '00'}:${mm ? mm : '00'}:${ + ss ? ss : '00' + }${ms ? ms : ''}`; + return { tag, type: 'COMMAND_RELATIVE' }; + } + match = testTime(timeTagRelativeText, RELATIVE_SIMPLE); + if (match) { + RELATIVE_SIMPLE.lastIndex = 0; + const [, second, ms] = match; + tag = `${second ? secondsToHMS(Number(second)) : ''}${ms ? ms : ''}`; + return { tag, type: 'COMMAND_RELATIVE' }; + } + } + return { tag, type: 'ABSOLUTE' }; +} + +function secondsToHMS(seconds: number): string { + if (typeof seconds !== 'number' || isNaN(seconds)) { + throw new Error(`Expected a valid number for seconds, got ${seconds}`); + } + + const hours: number = Math.floor(seconds / 3600); + const minutes: number = Math.floor((seconds % 3600) / 60); + const remainingSeconds: number = seconds % 60; + + const hoursString = hours.toString().padStart(2, '0'); + const minutesString = minutes.toString().padStart(2, '0'); + const remainingSecondsString = remainingSeconds.toString().padStart(2, '0'); + + return `${hoursString}:${minutesString}:${remainingSecondsString}`; +} + +// min length of one +type VariableDeclarationArray = [VariableDeclaration, ...VariableDeclaration[]]; + +function parseVariables( + node: SyntaxNode, + text: string, + type: 'LocalDeclaration' | 'ParameterDeclaration' = 'LocalDeclaration', +): VariableDeclarationArray | undefined { + const variableContainer = node.getChild(type); + if (!variableContainer) { + return undefined; + } + const variables = variableContainer.getChildren('Enum'); + if (!variables || variables.length === 0) { + return undefined; + } + + return variables.map((variable: SyntaxNode) => { + const variableText = text.slice(variable.from, variable.to); + + //parse the text [a-z]D*("UINT"|"INT"|"FLOAT"|"ENUM"|"STR")L07 + const match = /(?:[a-zA-Z]*)(?:[0-9]{2})(INT|UINT|FLT|ENUM|STR)/g.exec(variableText); + if (match) { + const kind = match[1]; + + let type = 'UNKNOWN'; + switch (kind) { + case 'STR': + type = 'STRING'; + break; + case 'FLT': + type = 'FLOAT'; + break; + default: + type = kind; + break; + } + + return { + name: variableText, + type: type as VariableDeclaration['type'], + }; + } else { + return { + name: variableText, + type: 'UNKNOWN' as VariableDeclaration['type'], + }; + } + }) as VariableDeclarationArray; +} + +function parseModel(node: SyntaxNode, text: string): Model[] | undefined { + const modelContainer = node.getChild('Models'); + if (!modelContainer) { + return undefined; + } + + const modelNodes = modelContainer.getChildren('Model'); + if (!modelNodes || modelNodes.length === 0) { + return undefined; + } + + const models: Model[] = []; + for (const modelNode of modelNodes) { + const variableNode = modelNode.getChild('Variable'); + const valueNode = modelNode.getChild('Value'); + const offsetNode = modelNode.getChild('Offset'); + + const variable = variableNode + ? (removeQuotes(text.slice(variableNode.from, variableNode.to)) as string) + : 'UNKNOWN'; + + // Value can be string, number or boolean + let value: Model['value'] = 0; + if (valueNode) { + const valueChild = valueNode.firstChild; + if (valueChild) { + const valueText = text.slice(valueChild.from, valueChild.to); + if (valueChild.name === 'String') { + value = removeQuotes(valueText); + } else if (valueChild.name === 'Boolean') { + value = !/^FALSE$/i.test(valueText); + } else if (valueChild.name === 'Number') { + value = Number(valueText); + } + } + } + const offset = offsetNode ? (removeQuotes(text.slice(offsetNode.from, offsetNode.to)) as string) : 'UNKNOWN'; + + models.push({ offset, value, variable }); + } + + return models; +} + +function parseDescription(node: SyntaxNode, text: string): string | undefined { + const descriptionNode = node.getChild('LineComment'); + if (!descriptionNode) { + return undefined; + } + const description = text.slice(descriptionNode.from + 1, descriptionNode.to).trim(); + return removeQuotes(description) as string; +} + +function removeQuotes(text: string | number | boolean): string | number | boolean { + if (typeof text === 'string') { + return text.replace(/^"|"$/g, '').replaceAll('\\"', '"'); + } + return text; +} + +export function parseCommand( + commandNode: SyntaxNode, + text: string, + commandDictionary: CommandDictionary | null, +): Command { + const time = parseTime(commandNode, text); + + const stemNode = commandNode.getChild('Stem'); + const stem = stemNode ? text.slice(stemNode.from, stemNode.to) : 'UNKNOWN'; + + const argsNode = commandNode.getChild('Args'); + const args = argsNode ? parseArgs(argsNode, text, commandDictionary, stem) : []; + + const description = parseDescription(commandNode, text); + const metadata: Metadata | undefined = parseMetadata(commandNode, text); + const models: Model[] | undefined = parseModel(commandNode, text); + + return { + args, + stem, + time, + type: 'command', + ...(description ? { description } : {}), + ...(models ? { models } : {}), + ...(metadata ? { metadata } : {}), + }; +} + +export function parseImmediateCommand( + commandNode: SyntaxNode, + text: string, + commandDictionary: CommandDictionary | null, +): ImmediateCommand { + const stemNode = commandNode.getChild('Stem'); + const stem = stemNode ? text.slice(stemNode.from, stemNode.to) : 'UNKNOWN'; + + const argsNode = commandNode.getChild('Args'); + const args = argsNode ? parseArgs(argsNode, text, commandDictionary, stem) : []; + + const description = parseDescription(commandNode, text); + const metadata: Metadata | undefined = parseMetadata(commandNode, text); + + return { + args, + stem, + ...(description ? { description } : {}), + ...(metadata ? { metadata } : {}), + }; +} + +export function parseHardwareCommand(commandNode: SyntaxNode, text: string): HardwareCommand { + const stemNode = commandNode.getChild('Stem'); + const stem = stemNode ? text.slice(stemNode.from, stemNode.to) : 'UNKNOWN'; + const description = parseDescription(commandNode, text); + const metadata: Metadata | undefined = parseMetadata(commandNode, text); + + return { + stem, + ...(description ? { description } : {}), + ...(metadata ? { metadata } : {}), + }; +} + +/** + * This looks for a @ID directive to specify sequence id, if not present use ground name without extensions + * + * @param node - top node of parsed tree + * @param text - text of sequence + * @param sequenceName - ground name of sequence + * @returns + */ +function parseId(node: SyntaxNode, text: string, sequenceName: string): string { + const stringNode = node.getChild('IdDeclaration')?.getChild('String'); + if (!stringNode) { + return sequenceName.split('.')[0]; + } + + const id = JSON.parse(text.slice(stringNode.from, stringNode.to)); + return id; +} + +function parseMetadata(node: SyntaxNode, text: string): Metadata | undefined { + const metadataNode = node.getChild('Metadata'); + if (!metadataNode) { + return undefined; + } + + const metadataEntry = metadataNode.getChildren('MetaEntry'); + if (!metadataEntry || metadataEntry.length === 0) { + return undefined; + } + + const obj: Metadata = {}; + metadataEntry.forEach(entry => { + const keyNode = entry.getChild('Key'); + const valueNode = entry.getChild('Value'); + + if (!keyNode || !valueNode) { + return; // Skip this entry if either the key or value is missing + } + + const keyText = removeQuotes(text.slice(keyNode.from, keyNode.to)) as string; + + let value = text.slice(valueNode.from, valueNode.to); + try { + value = JSON.parse(value); + } catch (e) { + logInfo(`Malformed metadata ${value}`); + } + + obj[keyText] = value; + }); + + return obj; +} diff --git a/src/utilities/new-sequence-editor/token.test.ts b/src/utilities/new-sequence-editor/token.test.ts new file mode 100644 index 0000000000..531056f62b --- /dev/null +++ b/src/utilities/new-sequence-editor/token.test.ts @@ -0,0 +1,342 @@ +/* eslint-disable no-undef */ + +import type { SyntaxNode, Tree } from '@lezer/common'; +import assert from 'assert'; +import { readFileSync, readdirSync } from 'fs'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import { describe, it } from 'vitest'; +import { SeqLanguage } from '../codemirror'; + +const ERROR = '⚠'; +const ENUM_TOKEN = 'Enum'; +const LINE_COMMENT_TOKEN = 'LineComment'; +const STEM_TOKEN = 'Stem'; +const STRING_TOKEN = 'String'; +const METADATA_TOKEN = 'Metadata'; +const METADATA_ENTRY_TOKEN = 'MetaEntry'; +const ID_DECLARATION = 'IdDeclaration'; +const PARAMETER_DECLARATION = 'ParameterDeclaration'; +const LOCAL_DECLARATION = 'LocalDeclaration'; + +function getMetaType(node: SyntaxNode) { + return node?.firstChild?.nextSibling?.firstChild?.name; +} + +function getMetaValue(node: SyntaxNode, input: string) { + const mv = node?.firstChild?.nextSibling?.firstChild; + return JSON.parse(input.slice(mv!.from, mv!.to)); +} + +describe('metadata', () => { + it('primitive types', () => { + const input = ` +@METADATA "name 1" "string val" +@METADATA "name 2" false +@METADATA "name3" 3 +@METADATA "name4" 4e1 +C STEM +`; + const parseTree = SeqLanguage.parser.parse(input); + assertNoErrorNodes(input); + const topLevelMetaData = parseTree.topNode.getChild(METADATA_TOKEN); + const metaEntries = topLevelMetaData!.getChildren(METADATA_ENTRY_TOKEN); + assert.equal(metaEntries.length, 4); + assert.equal(getMetaType(metaEntries[0]), STRING_TOKEN); + assert.equal(getMetaType(metaEntries[1]), 'Boolean'); + assert.equal(getMetaType(metaEntries[2]), 'Number'); + assert.equal(getMetaType(metaEntries[3]), 'Number'); + }); + + it('structured types', () => { + const input = ` +@METADATA "name 1" [ 1,2 , 3 ] +@METADATA "name 2" ["a", true , + 2 ] + +@METADATA "name 3" { + "level1": { + "level2": [ + false, + 1, + "two" + ], + "level2 nest": { + "level3": true + } + } +} + + @METADATA "name 4" {} + +C STEM +`; + assertNoErrorNodes(input); + const parseTree = SeqLanguage.parser.parse(input); + const topLevelMetaData = parseTree.topNode.getChild(METADATA_TOKEN); + const metaEntries = topLevelMetaData!.getChildren(METADATA_ENTRY_TOKEN); + assert.equal(metaEntries.length, 4); + assert.equal(getMetaType(metaEntries[0]), 'Array'); + assert.equal(getMetaType(metaEntries[1]), 'Array'); + assert.equal(getMetaType(metaEntries[2]), 'Object'); + assert.equal(getMetaType(metaEntries[3]), 'Object'); + assert.deepStrictEqual(getMetaValue(metaEntries[0], input), [1, 2, 3]); + assert.deepStrictEqual(getMetaValue(metaEntries[1], input), ['a', true, 2]); + assert.deepStrictEqual(getMetaValue(metaEntries[2], input), { + level1: { + level2: [false, 1, 'two'], + 'level2 nest': { + level3: true, + }, + }, + }); + assert.deepStrictEqual(getMetaValue(metaEntries[3], input), {}); + }); +}); + +describe('header directives', () => { + function getIdValue(parseTree: Tree, input: string) { + const idToken = parseTree.topNode.getChild(ID_DECLARATION)?.firstChild; + if (idToken) { + return input.slice(idToken.from, idToken.to); + } + return null; + } + + function getNodeText(node: SyntaxNode, input: string) { + return input.slice(node.from, node.to); + } + + function allPermutations(inputArr: string[]) { + const result: string[][] = []; + function permute(arr: string[], m: string[] = []) { + if (arr.length === 0) { + result.push(m); + } else { + for (let i = 0; i < arr.length; i++) { + const curr = arr.slice(); + const next = curr.splice(i, 1); + permute(curr.slice(), m.concat(next)); + } + } + } + permute(inputArr); + return result; + } + + it('expected id', () => { + const input = ` + @ID "test.name" + + C CMD_NO_ARGS + `; + assertNoErrorNodes(input); + const parseTree = SeqLanguage.parser.parse(input); + assert.equal(getIdValue(parseTree, input), '"test.name"'); + }); + + it('enum id type', () => { + // This is an error in the linter, but an easy mistake to mistake + const input = ` + @ID test_name + + C CMD_NO_ARGS + `; + assertNoErrorNodes(input); + const parseTree = SeqLanguage.parser.parse(input); + assert.equal(getIdValue(parseTree, input), 'test_name'); + }); + + it('number id type', () => { + // This is an error in the linter, but an easy mistake to mistake + const input = ` + @ID 21 + + C CMD_NO_ARGS + `; + assertNoErrorNodes(input); + const parseTree = SeqLanguage.parser.parse(input); + assert.equal(getIdValue(parseTree, input), '21'); + }); + + it('all permutations', () => { + const permutations = allPermutations([ + `@ID "test.seq"`, + `@INPUT_PARAMS L01STR L02STR`, + `@LOCALS L01INT L02INT L01UINT L02UINT`, + ]); + permutations.forEach((ordering: string[]) => { + const input = ordering.join('\n\n'); + assertNoErrorNodes(input); + const parseTree = SeqLanguage.parser.parse(input); + assert.equal(getIdValue(parseTree, input), `"test.seq"`); + assert.deepEqual( + parseTree.topNode + .getChild(LOCAL_DECLARATION) + ?.getChildren(ENUM_TOKEN) + .map(node => getNodeText(node, input)), + ['L01INT', 'L02INT', 'L01UINT', 'L02UINT'], + ); + assert.deepEqual( + parseTree.topNode + .getChild(PARAMETER_DECLARATION) + ?.getChildren(ENUM_TOKEN) + .map(node => getNodeText(node, input)), + ['L01STR', 'L02STR'], + ); + }); + }); +}); + +describe('error positions', () => { + for (const { testname, input, first_error } of [ + { + first_error: 7, + input: 'FSW_CMD%', + testname: 'bad stem ending', + }, + { + first_error: 3, + input: 'FOO$BAR^BAZ', + testname: 'bad stem', + }, + { + first_error: 4, + input: 'FOO "', + testname: 'bad string arg', + }, + { + first_error: 6, + input: 'CMD 12,34', + testname: 'bad number arg', + }, + { + first_error: 24, + input: `COM 12345 + COM "dsa" + @UNKNOWN DIRECTIVE`, + testname: 'good and bad commands', + }, + ]) { + it(testname, () => { + const cursor = SeqLanguage.parser.parse(input).cursor(); + do { + const { node } = cursor; + if (node.type.name === ERROR) { + assert.strictEqual(cursor.from, first_error); + break; + } + } while (cursor.next()); + }); + } +}); + +describe('seqfiles', () => { + const seqDir = path.dirname(fileURLToPath(import.meta.url)) + '/../../tests/mocks/sequencing/sequences'; + for (const file of readdirSync(seqDir)) { + if (!/\.txt$/.test(file)) { + continue; + } + + const name = /^[^.]*/.exec(file)![0]; + it(name, () => { + const input = readFileSync(path.join(seqDir, file), 'utf8'); + // printNodes(input, (ttype) => ttype === ERROR); + assertNoErrorNodes(input); + }); + } +}); + +describe('token positions', () => { + it('comment indentation', () => { + const input = `#COMMENT01 +# COMMENT2 + +CMD0 + + + COMMAND_01 ARG3 "ARG4" 5 + + + # COMMENT3 + + + COMMAND___002 "str_arg_2_0" "str_arg_2_1" + +@METADATA "md3" "foobar"`; + const nodeLocation = (input: string, nodeText: string, position?: number) => { + const from = input.indexOf(nodeText, position); + return { + from, + to: from + nodeText.length, + }; + }; + const expectedCommentLocations = { + [LINE_COMMENT_TOKEN]: [ + nodeLocation(input, '#COMMENT01'), + nodeLocation(input, '# COMMENT2'), + nodeLocation(input, '# COMMENT3'), + ], + [STEM_TOKEN]: [ + nodeLocation(input, 'CMD0'), + nodeLocation(input, 'COMMAND_01'), + nodeLocation(input, 'COMMAND___002'), + ], + }; + const actualCommentLocations: { [name: string]: { from: number; to: number }[] } = {}; + assertNoErrorNodes(input); + const parseTree = SeqLanguage.parser.parse(input); + const cursor = parseTree.cursor(); + do { + const { node } = cursor; + // printNode(input, node); + if ([LINE_COMMENT_TOKEN, STEM_TOKEN].includes(node.type.name)) { + const { to, from } = node; + if (actualCommentLocations[node.type.name] === undefined) { + actualCommentLocations[node.type.name] = []; + } + actualCommentLocations[node.type.name].push({ from, to }); + } + } while (cursor.next()); + assert.deepStrictEqual(expectedCommentLocations, actualCommentLocations); + + const cmd2 = parseTree.topNode.getChild('Commands')!.getChildren('Command')[2]; + const cmd2args = cmd2.getChild('Args'); + assert.strictEqual('"str_arg_2_0"', nodeContents(input, cmd2args!.getChildren('String')[0])); + assert.strictEqual('"str_arg_2_1"', nodeContents(input, cmd2args!.getChildren('String')[1])); + assert.strictEqual('"str_arg_2_1"', nodeContents(input, cmd2args!.firstChild!.nextSibling!)); + assert.strictEqual(null, cmd2args!.firstChild!.nextSibling!.nextSibling); // only 2 arguments + + const cmd2meta = cmd2.getChild(METADATA_TOKEN)!.getChild(METADATA_ENTRY_TOKEN); + assert.strictEqual('"md3"', nodeContents(input, cmd2meta!.getChild('Key')!)); + assert.strictEqual('"foobar"', nodeContents(input, cmd2meta!.getChild('Value')!)); + }); +}); + +function assertNoErrorNodes(input: string) { + const cursor = SeqLanguage.parser.parse(input).cursor(); + do { + const { node } = cursor; + assert.notStrictEqual(node.type.name, ERROR); + } while (cursor.next()); +} + +function nodeContents(input: string, node: SyntaxNode) { + return input.substring(node.from, node.to); +} + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function printNode(input: string, node: SyntaxNode) { + console.log(`${node.type.name}[${node.from}.${node.to}] --> '${nodeContents(input, node)}'`); +} + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function printNodes(input: string, filter?: (name: string) => boolean) { + const cursor = SeqLanguage.parser.parse(input).cursor(); + do { + const { node } = cursor; + if (!filter || filter(node.type.name)) { + printNode(input, node); + } + } while (cursor.next()); +} diff --git a/src/utilities/new-sequence-editor/tree-utils.ts b/src/utilities/new-sequence-editor/tree-utils.ts new file mode 100644 index 0000000000..bf0b90fe89 --- /dev/null +++ b/src/utilities/new-sequence-editor/tree-utils.ts @@ -0,0 +1,55 @@ +import type { SyntaxNode } from '@lezer/common'; + +export function numberOfChildren(node: SyntaxNode): number { + let count = 0; + let child = node.firstChild; + while (child) { + count++; + child = child.nextSibling; + } + return count; +} + +export function getChildrenNode(node: SyntaxNode): SyntaxNode[] { + const children = []; + let child = node.firstChild; + while (child) { + children.push(child); + child = child.nextSibling; + } + return children; +} + +export function getDeepestNode(node: SyntaxNode): SyntaxNode { + let currentNode = node; + while (currentNode.firstChild) { + currentNode = currentNode.firstChild; + } + while (currentNode.nextSibling) { + currentNode = currentNode.nextSibling; + } + return currentNode; +} + +export function getFromAndTo(nodes: (SyntaxNode | null)[]): { from: number; to: number } { + return nodes.reduce( + (acc, node) => { + if (node === null) { + return acc; + } + return { + from: Math.min(acc.from, node.from), + to: Math.max(acc.to, node.to), + }; + }, + { from: Number.MAX_VALUE, to: Number.MIN_VALUE }, + ); +} + +export function getAncestorNode(node: SyntaxNode | null, name: string) { + let ancestorNode: SyntaxNode | null = node; + while (ancestorNode && ancestorNode.name !== name) { + ancestorNode = ancestorNode.parent; + } + return ancestorNode; +} diff --git a/src/utilities/permissions.ts b/src/utilities/permissions.ts index a84a476ae8..0d69c6dd15 100644 --- a/src/utilities/permissions.ts +++ b/src/utilities/permissions.ts @@ -22,7 +22,7 @@ import type { SchedulingGoalDefinition, SchedulingGoalMetadata, } from '../types/scheduling'; -import type { UserSequence } from '../types/sequencing'; +import type { Parcel, UserSequence } from '../types/sequencing'; import type { Simulation, SimulationTemplate } from '../types/simulation'; import type { Tag } from '../types/tags'; import type { View, ViewSlim } from '../types/view'; @@ -319,8 +319,8 @@ const queryPermissions = { CREATE_ACTIVITY_PRESET: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_ACTIVITY_PRESET], user); }, - CREATE_COMMAND_DICTIONARY: (user: User | null): boolean => { - return isUserAdmin(user) || getPermission([Queries.UPLOAD_DICTIONARY], user); + CREATE_CHANNEL_DICTIONARY: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.INSERT_CHANNEL_DICTIONARY], user); }, CREATE_CONSTRAINT: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_CONSTRAINT_METADATA], user); @@ -331,6 +331,9 @@ const queryPermissions = { CREATE_CONSTRAINT_MODEL_SPECIFICATION: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_CONSTRAINT_MODEL_SPECIFICATION], user); }, + CREATE_DICTIONARY: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.INSERT_DICTIONARY], user); + }, CREATE_EXPANSION_RULE: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_EXPANSION_RULE], user); }, @@ -347,6 +350,15 @@ const queryPermissions = { CREATE_MODEL: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_MISSION_MODEL], user); }, + CREATE_PARAMETER_DICTIONARY: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.INSERT_PARAMETER_DICTIONARY], user); + }, + CREATE_PARCEL: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.INSERT_PARCEL], user); + }, + CREATE_PARCEL_TO_PARAMETER_DICTIONARIES: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.INSERT_PARCEL_TO_PARAMETER_DICTIONARY], user); + }, CREATE_PLAN: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_PLAN], user); }, @@ -400,6 +412,9 @@ const queryPermissions = { CREATE_SCHEDULING_PLAN_SPECIFICATION: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_SCHEDULING_SPECIFICATION], user); }, + CREATE_SEQUENCE_ADAPTATION: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.INSERT_SEQUENCE_ADAPTATION], user); + }, CREATE_SIMULATION_TEMPLATE: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.INSERT_SIMULATION_TEMPLATE], user); }, @@ -448,6 +463,9 @@ const queryPermissions = { DELETE_ACTIVITY_PRESET: (user: User | null, preset: AssetWithOwner): boolean => { return isUserAdmin(user) || (getPermission([Queries.DELETE_ACTIVITY_PRESET], user) && isUserOwner(user, preset)); }, + DELETE_CHANNEL_DICTIONARY: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.DELETE_CHANNEL_DICTIONARY], user); + }, DELETE_COMMAND_DICTIONARY: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.DELETE_COMMAND_DICTIONARY], user); }, @@ -489,6 +507,15 @@ const queryPermissions = { DELETE_MODEL: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.DELETE_MISSION_MODEL], user); }, + DELETE_PARAMETER_DICTIONARY: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.DELETE_PARAMETER_DICTIONARY], user); + }, + DELETE_PARCEL: (user: User | null, parcel: AssetWithOwner): boolean => { + return isUserAdmin(user) || (getPermission([Queries.DELETE_PARCEL], user) && isUserOwner(user, parcel)); + }, + DELETE_PARCEL_TO_PARAMETER_DICTIONARIES: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.DELETE_PARCEL_TO_PARAMETER_DICTIONARY], user); + }, DELETE_PLAN: (user: User | null, plan: PlanWithOwners): boolean => { return ( isUserAdmin(user) || @@ -541,6 +568,10 @@ const queryPermissions = { DELETE_SCHEDULING_GOAL_PLAN_SPECIFICATIONS: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.DELETE_SCHEDULING_SPECIFICATION_GOALS], user); }, + DELETE_SEQUENCE_ADAPTATION: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.DELETE_SEQUENCE_ADAPTATION], user); + }, + DELETE_SIMULATION_TEMPLATE: (user: User | null, template: SimulationTemplate): boolean => { return ( isUserAdmin(user) || (getPermission([Queries.DELETE_SIMULATION_TEMPLATE], user) && isUserOwner(user, template)) @@ -581,7 +612,10 @@ const queryPermissions = { GET_EXPANSION_SEQUENCE_SEQ_JSON: () => true, GET_EXTENSIONS: () => true, GET_MODELS: () => true, + GET_PARCEL: () => true, + GET_PARSED_CHANNEL_DICTIONARY: () => true, GET_PARSED_COMMAND_DICTIONARY: () => true, + GET_PARSED_PARAMETER_DICTIONARY: () => true, GET_PERMISSIBLE_QUERIES: () => true, GET_PLAN: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.PLAN], user); @@ -599,6 +633,7 @@ const queryPermissions = { GET_ROLE_PERMISSIONS: () => true, GET_SCHEDULING_SPEC_CONDITIONS_FOR_CONDITION: () => true, GET_SCHEDULING_SPEC_GOALS_FOR_GOAL: () => true, + GET_SEQUENCE_ADAPTATION: () => true, GET_SIMULATION_DATASET_ID: () => true, GET_SPANS: () => true, GET_TYPESCRIPT_ACTIVITY_TYPE: () => true, @@ -727,6 +762,7 @@ const queryPermissions = { }, SUB_ACTIVITY_TYPES: () => true, SUB_ANCHOR_VALIDATION_STATUS: () => true, + SUB_CHANNEL_DICTIONARIES: () => true, SUB_COMMAND_DICTIONARIES: () => true, SUB_CONSTRAINT: () => true, SUB_CONSTRAINTS: (user: User | null): boolean => { @@ -744,6 +780,11 @@ const queryPermissions = { }, SUB_MODEL: () => true, SUB_MODELS: () => true, + SUB_PARAMETER_DICTIONARIES: () => true, + SUB_PARCELS: (user: User | null): boolean => { + return isUserAdmin(user) || getPermission([Queries.PARCELS], user); + }, + SUB_PARCEL_TO_PARAMETER_DICTIONARIES: () => true, SUB_PLANS_USER_WRITABLE: () => true, SUB_PLAN_DATASET: () => true, SUB_PLAN_LOCKED: () => true, @@ -770,6 +811,7 @@ const queryPermissions = { return isUserAdmin(user) || getPermission([Queries.SCHEDULING_SPECIFICATION], user); }, SUB_SCHEDULING_REQUESTS: () => true, + SUB_SEQUENCE_ADAPTATIONS: () => true, SUB_SIMULATION: (user: User | null): boolean => { return isUserAdmin(user) || getPermission([Queries.SIMULATIONS], user); }, @@ -851,6 +893,9 @@ const queryPermissions = { UPDATE_MODEL: (user: User | null) => { return isUserAdmin(user) && getPermission([Queries.UPDATE_MISSION_MODEL], user); }, + UPDATE_PARCEL: (user: User | null, parcel: AssetWithOwner): boolean => { + return isUserAdmin(user) || (getPermission([Queries.UPDATE_PARCEL], user) && isUserOwner(user, parcel)); + }, UPDATE_PLAN_SNAPSHOT: (user: User | null): boolean => { return getPermission([Queries.UPDATE_PLAN_SNAPSHOT], user); }, @@ -1133,6 +1178,7 @@ interface FeaturePermissions { expansionSequences: ExpansionSequenceCRUDPermission>; expansionSets: ExpansionSetsCRUDPermission>; model: CRUDPermission; + parcels: CRUDPermission>; plan: CRUDPermission; planBranch: PlanBranchCRUDPermission; planCollaborators: PlanCollaboratorsCRUDPermission; @@ -1166,7 +1212,7 @@ const featurePermissions: FeaturePermissions = { canUpdate: (user, _plan, preset) => queryPermissions.UPDATE_ACTIVITY_PRESET(user, preset), }, commandDictionary: { - canCreate: user => queryPermissions.CREATE_COMMAND_DICTIONARY(user), + canCreate: user => queryPermissions.CREATE_DICTIONARY(user), canDelete: user => queryPermissions.DELETE_COMMAND_DICTIONARY(user), canRead: () => false, // Not implemented canUpdate: () => false, // Not implemented @@ -1211,6 +1257,12 @@ const featurePermissions: FeaturePermissions = { canRead: user => queryPermissions.GET_PLANS_AND_MODELS(user), canUpdate: user => queryPermissions.UPDATE_MODEL(user), }, + parcels: { + canCreate: user => queryPermissions.CREATE_PARCEL(user), + canDelete: (user, parcel) => queryPermissions.DELETE_PARCEL(user, parcel), + canRead: user => queryPermissions.SUB_PARCELS(user), + canUpdate: (user, parcel) => queryPermissions.UPDATE_PARCEL(user, parcel), + }, plan: { canCreate: user => queryPermissions.CREATE_PLAN(user), canDelete: (user, plan) => queryPermissions.DELETE_PLAN(user, plan), diff --git a/vite.config.js b/vite.config.js index 934afdcc5f..6e64ee9117 100644 --- a/vite.config.js +++ b/vite.config.js @@ -3,6 +3,7 @@ import svg from '@poppanator/sveltekit-svg'; import basicSsl from '@vitejs/plugin-basic-ssl'; import { defineConfig, loadEnv } from 'vite'; import { WorkerBuildPlugin } from './vite.worker-build-plugin'; +import { lezer } from '@lezer/generator/rollup'; const config = ({ mode }) => { const viteEnvVars = loadEnv(mode, process.cwd()); @@ -15,6 +16,7 @@ const config = ({ mode }) => { }, plugins: [ ...(viteEnvVars.VITE_HTTPS === 'true' ? [basicSsl()] : []), + lezer(), sveltekit(), svg({ svgoOptions: {