diff --git a/src/utilities/codemirror/satf/sasf.test.ts b/src/utilities/codemirror/satf/sasf.test.ts new file mode 100644 index 0000000000..2571b7c44b --- /dev/null +++ b/src/utilities/codemirror/satf/sasf.test.ts @@ -0,0 +1,106 @@ +import { testTree } from '@lezer/generator/test'; +import { describe, expect, test } from 'vitest'; +import { SatfLanguage } from './satf'; + +const tests = [ + [ + 'Empty SASF', + ` + $$EOH + $$EOD + $$EOF + `, + `satf_sasf(Sasf( + Header, + Body + ))`, + ], + [ + 'Sample SASF', + ` + $$EOH + $$EOD + request(VFT2_REQUEST_01, + START_TIME, 2024-266T19:59:57, + REQUESTOR, "vft2_anichola", + PROCESSOR, "VC2AB", + KEY, "No_Key") + + command(1, + SCHEDULED_TIME,\\00:00:01\\,FROM_PREVIOUS_START, + COMMENT,\\"VFT2_short_name : sada_pause_test_main_seq", + "VFT2_full_name : psy.abs.vc2ab.sada_pause_test_main_seq.r02", + "VFT2_onboard_name: sada_pause_test_main_seq", + "VFT2_product_type: load_and_go_abs", + "VFT2_pce : AB", + "VFT2_vc : 2", + "cumulative_time is 1 sec (2024-266T19:59:58)"\\, + FILE_REMOVE("/eng/seq/sada_pause_test_main_seq.abs") + ) + end + $$EOF`, + `satf_sasf(Sasf(Header, + Body( + Requests( + Request(RequestName,Time,Requestor,Processor,Key, + Steps(Command(StepLabel,ScheduledTime(Time,TimeRelation), + Comment,Stem,Args(String))))))))`, + ], + [ + 'Mulitple Requests', + ` + $$EOH + $$EOD + request(VFT2_REQUEST_01, + START_TIME, 2024-266T19:59:57, + REQUESTOR, "vft2_anichola", + PROCESSOR, "VC2AB", + KEY, "No_Key") + + command(1, + SCHEDULED_TIME,\\00:00:01\\,FROM_PREVIOUS_START, + COMMENT,\\"VFT2_short_name : sada_pause_test_main_seq", + "VFT2_full_name : psy.abs.vc2ab.sada_pause_test_main_seq.r02", + "VFT2_onboard_name: sada_pause_test_main_seq", + "VFT2_product_type: load_and_go_abs", + "VFT2_pce : AB", + "VFT2_vc : 2", + "cumulative_time is 1 sec (2024-266T19:59:58)"\\, + FILE_REMOVE("/eng/seq/sada_pause_test_main_seq.abs") + ) + end, + request(VFT2_REQUEST_02, + START_TIME, 2024-266T19:59:57, + REQUESTOR, "vft2_anichola", + PROCESSOR, "VC2AB", + KEY, "No_Key") + + command(1, + SCHEDULED_TIME,\\00:00:01\\,FROM_PREVIOUS_START, + FILE_REMOVE("/test.abs") + ) + end + $$EOF`, + `satf_sasf(Sasf(Header,Body( + Requests( + Request(RequestName,Time,Requestor,Processor,Key, + Steps(Command(StepLabel,ScheduledTime(Time,TimeRelation),Comment,Stem,Args(String)))), + Request(RequestName,Time,Requestor,Processor,Key, + Steps(Command(StepLabel,ScheduledTime(Time,TimeRelation),Stem,Args(String)))) + ) + )))`, + ], +]; + +describe.each([['parse tree structure', tests]])('grammar tests - %s', (_name: string, testArray: string[][]) => { + test.each(testArray)('%s', (_: string, input: string, expected: string) => { + /* The Lezer parser is "Error-Insensitive" + "Being designed for the code editor use case, the parser is equipped with strategies for recovering + from syntax errors, and can produce a tree for any input." - (https://lezer.codemirror.net/) as such + it always returns a tree, though the tree may have error tokens ("⚠"). + + testTree will throw if there's a mismatch between the returned actual and expected trees, it returns + undefined when they match. */ + expect(testTree(SatfLanguage.parser.parse(input), expected, undefined)).toBeUndefined(); + }); +}); diff --git a/src/utilities/codemirror/satf/satf-sasf-utils.test.ts b/src/utilities/codemirror/satf/satf-sasf-utils.test.ts new file mode 100644 index 0000000000..93a1f3cf5e --- /dev/null +++ b/src/utilities/codemirror/satf/satf-sasf-utils.test.ts @@ -0,0 +1,323 @@ +import { describe, expect, it } from 'vitest'; +import { satfToSequence } from './satf-sasf-utils'; + +describe('satfToSequence', () => { + it('should return empty header and sequences for empty SATF string', async () => { + const satf = ''; + const result = await satfToSequence(satf); + expect(result).toEqual({ header: '', sequences: [] }); + }); + + it('should return empty for invalid SATF string', async () => { + const satf = ' invalid satf string '; + + const result = await satfToSequence(satf); + expect(result).toEqual({ header: '', sequences: [] }); + }); + + it('should parse valid SATF string with header and sequences', async () => { + const satf = ` + $$EOH + CCS3ZF0000100000001NJPL3KS0L015$$MARK$$; + MISSION_NAME = TEST; + CCSD3RE00000$$MARK$$NJPL3IF0M01400000001; + $$TEST SPACECRAFT ACTIVITY TYPE FILE + ************************************************************ + *PROJECT TEST + *SPACECRAFT 000 + *Input files used: + *File Type Last modified File name + *SC_MODEL Thu Jan 01 00:00:00 UTC 1970 /Default Sequence Project/SC_MODEL/ + ************************************************************ + $$EOH + absolute(temp,\\temp\\) + $$EOF + `; + const result = await satfToSequence(satf); + expect(result).toHaveProperty('header'); + expect(result).toHaveProperty('sequences'); + expect(result.sequences).toBeInstanceOf(Array); + }); + + it('should return empty sequences for SATF string with missing sequences', async () => { + const satf = ` + CCS3ZF0000100000001NJPL3KS0L015$$MARK$$; + MISSION_NAME = TEST; + CCSD3RE00000$$MARK$$NJPL3IF0M01400000001; + $$TEST SPACECRAFT ACTIVITY TYPE FILE + ************************************************************ + *PROJECT TEST + *SPACECRAFT 000 + *Input files used: + *File Type Last modified File name + *SC_MODEL Thu Jan 01 00:00:00 UTC 1970 /Default Sequence Project/SC_MODEL/ + ************************************************************ + $$EOH + $$EOF + `; + const result = await satfToSequence(satf); + expect(result).toHaveProperty('header'); + expect(result.sequences).toEqual([]); + }); + + it('should return empty header for SATF string with missing header', async () => { + const satf = ` + $$EOH + absolute(temp,\\temp\\) + $$EOF + `; + const result = await satfToSequence(satf); + expect(result).toHaveProperty('sequences'); + expect(result.header).toEqual(''); + }); + + it('should return valid sequence with models', async () => { + const satf = ` + $$EOH + ABSOLUTE_SEQUENCE(test,\\testv01\\, + STEPS, + command ( + 3472, SCHEDULED_TIME, \\00:01:00\\, FROM_ACTIVITY_START, INCLUSION_CONDITION, \\param_rate == receive_rate\\, + DRAW, \\VERTICAL\\, + COMMENT, \\This command turns, to correct position.\\, ASSUMED_MODEL_VALUES, \\x=1,z=1.1,y="abc"\\, + 01VV (param6, 10, false, "abc"), + PROCESSORS, "PRI", end), + end + ) + $$EOF + `; + const result = await satfToSequence(satf); + expect(result).toHaveProperty('sequences'); + expect(result.sequences[0].name).toStrictEqual('test'); + expect(result.sequences[0].sequence).toStrictEqual(`## test +R00:01:00 01VV param6 10 false "abc" # This command turns, to correct position. +@MODEL(x,1,"00:00:00") +@MODEL(z,1.1,"00:00:00") +@MODEL(y,"abc","00:00:00")`); + }); + + it('should return multiple sequence with models', async () => { + const satf = ` + $$EOH + ABSOLUTE_SEQUENCE(test,\\testv01\\, + STEPS, + command ( + 3472, SCHEDULED_TIME, \\00:01:00\\, FROM_ACTIVITY_START, INCLUSION_CONDITION, \\param_rate == receive_rate\\, + DRAW, \\VERTICAL\\, + COMMENT, \\This command turns, to correct position.\\, ASSUMED_MODEL_VALUES, \\x=1,z=1.1,y="abc"\\, + 01VV (param6, 10, false, "abc"), + PROCESSORS, "PRI", end), + end + ), + RT_on_board_block(test2,\\testv02\\, + STEPS, + command ( + 3472, SCHEDULED_TIME, \\00:01:00\\, FROM_ACTIVITY_START, INCLUSION_CONDITION, \\param_rate == receive_rate\\, + DRAW, \\VERTICAL\\, + COMMENT, \\This command turns, to correct position.\\, ASSUMED_MODEL_VALUES, \\x=1,z=1.1,y="abc"\\, + 01VV (param6, 10, false, "abc"), + PROCESSORS, "PRI", end), + end + ) + $$EOF + `; + const result = await satfToSequence(satf); + expect(result).toHaveProperty('sequences'); + expect(result.sequences.length).toBe(2); + expect(result.sequences[0].name).toStrictEqual('test'); + expect(result.sequences[0].sequence).toStrictEqual(`## test +R00:01:00 01VV param6 10 false "abc" # This command turns, to correct position. +@MODEL(x,1,"00:00:00") +@MODEL(z,1.1,"00:00:00") +@MODEL(y,"abc","00:00:00")`); + }); +}); + +describe('sasfToSequence', () => { + it('should return empty header and sequences for empty SATF string', async () => { + const sasf = ''; + const result = await satfToSequence(sasf); + expect(result).toEqual({ header: '', sequences: [] }); + }); + + it('should return empty invalid SATF string', async () => { + const sasf = ' invalid satf string '; + + const result = await satfToSequence(sasf); + expect(result).toEqual({ header: '', sequences: [] }); + }); + + it('should parse valid SASF string with header and sequences', async () => { + const sasf = ` + $$EOH + CCS3ZF0000100000001NJPL3KS0L015$$MARK$$; + MISSION_NAME = TEST; + CCSD3RE00000$$MARK$$NJPL3IF0M01400000001; + $$TEST SPACECRAFT ACTIVITY TYPE FILE + ************************************************************ + *PROJECT TEST + *SPACECRAFT 000 + *Input files used: + *File Type Last modified File name + *SC_MODEL Thu Jan 01 00:00:00 UTC 1970 /Default Sequence Project/SC_MODEL/ + ************************************************************ + $$EOH + $$EOD + $$EOF + `; + const result = await satfToSequence(sasf); + expect(result).toHaveProperty('header'); + expect(result).toHaveProperty('sequences'); + expect(result.sequences).toBeInstanceOf(Array); + }); + + it('should return valid request with models', async () => { + const sasf = ` + $$EOH + $$EOD + request(VFT2_REQUEST_01, + START_TIME, 2024-266T19:59:57, + REQUESTOR, "me", + PROCESSOR, "VC2AB", + KEY, "No_Key") + + command(1, + SCHEDULED_TIME,\\00:00:01\\,FROM_PREVIOUS_START, + COMMENT,\\"this is a comment"\\, + FILE_REMOVE("/eng/seq/awesome.abs") + ), + command(2, + SCHEDULED_TIME,\\00:00:01\\,FROM_PREVIOUS_START, + COMMENT,\\"cumulative_time is 2 sec"\\, + USER_SEQ_ECHO("SEQ awesome COMPLETION IN 2 MINS") + ), + end; + $$EOF + `; + const result = await satfToSequence(sasf); + expect(result).toHaveProperty('sequences'); + expect(result.sequences[0].name).toStrictEqual('VFT2_REQUEST_01'); + expect(result.sequences[0].sequence).toStrictEqual(`R2024-266T19:59:57 @REQUEST_BEGIN("VFT2_REQUEST_01") + R00:00:01 FILE_REMOVE "/eng/seq/awesome.abs" # "this is a comment" + R00:00:01 USER_SEQ_ECHO "SEQ awesome COMPLETION IN 2 MINS" # "cumulative_time is 2 sec" +@REQUEST_END +@METADATA("REQUESTOR":"me") +@METADATA("PROCESSOR":"VC2AB") +@METADATA("KEY":"No_Key") +`); + }); + + it('Parameters', async () => { + const satf = ` + $$EOH + RT_on_board_block(/start.txt,\\start\\, + PARAMETERS, + unsigned_decimal( + TYPE,UNSIGNED_DECIMAL, + RANGE,\\10.01...99.99\\, + RANGE,\\100...199.99\\, + ), + signed_decimal( + TYPE,SIGNED_DECIMAL, + DEFAULT, 10 + RANGE,\\10, 90000, 120000, 150000, 360001\\, + HELP, \\This is a help\\ + ), + hex( + TYPE,HEXADECIMAL, + RANGE,\\0x00...0xff\\ + ), + octal( + TYPE,OCTAL, + DEFAULT, 10 + RANGE,\\0, 1, 2, 3, 4, 5, 6, 7\\ + ), + binary( + TYPE,BINARY, + RANGE,\\0, 1\\), + engine( + TYPE,ENGINEERING, + ), + time( + TYPE,TIME, + RANGE,\\0T00:00:00...100T00:00:00\\ + ), + duration( + TYPE,DURATION, + DEFAULT, \\00:01:00\\ + ), + enum( + TYPE,STRING, + ENUM_NAME,\\STORE_NAME\\, + DEFAULT, \\BOB_HARDWARE\\, + RANGE,\\BOB_HARDWARE, SALLY_FARM, "TIM_FLOWERS"\\ + ), + string( + TYPE,STRING, + DEFAULT, abc + ), + quoted_string( + TYPE,QUOTED_STRING, + DEFAULT, "abc" + RANGES,\\"abc", "123"\\ + ), + end, + STEPS, + command ( + 1, SCHEDULED_TIME, \\00:01:00\\, FROM_ACTIVITY_START, + NOOP() + end + ) + $$EOF + `; + const result = await satfToSequence(satf); + expect(result).toHaveProperty('sequences'); + expect(result.sequences[0].name).toStrictEqual('start.txt'); + expect(result.sequences[0].sequence).toStrictEqual(`## /start.txt +@INPUT_PARAMS_BEGIN +unsigned_decimal UINT "10.01...99.99, 100...199.99" +signed_decimal INT "" "10, 90000, 120000, 150000, 360001" +hex STRING "0x00...0xff" +octal STRING "" "0, 1, 2, 3, 4, 5, 6, 7" +binary STRING "" "0, 1" +engine FLOAT +time STRING "0T00:00:00...100T00:00:00" +duration STRING +enum ENUM STORE_NAME "" "BOB_HARDWARE, SALLY_FARM, TIM_FLOWERS" +string STRING +quoted_string STRING "" "abc, 123" +@INPUT_PARAMS_END + +R00:01:00 NOOP`); + }); + + it('Quoted Parameters', async () => { + const satf = ` + $$EOH + RT_on_board_block(/start.txt,\\start\\, + PARAMETERS, + attitude_spec( + TYPE,STRING, + ENUM_NAME,\\STORE_NAME\\, + RANGE,\\"BOB_HARDWARE", "SALLY_FARM", "TIM_FLOWERS"\\ + ), + end, + STEPS, + command ( + 1, SCHEDULED_TIME, \\00:01:00\\, FROM_ACTIVITY_START, INCLUSION_CONDITION, \\param_rate == receive_rate\\, + ECHO ("abc") + end + ) + $$EOF + `; + const result = await satfToSequence(satf); + expect(result).toHaveProperty('sequences'); + expect(result.sequences[0].name).toStrictEqual('start.txt'); + expect(result.sequences[0].sequence).toStrictEqual(`## /start.txt +@INPUT_PARAMS_BEGIN +attitude_spec ENUM STORE_NAME "" "BOB_HARDWARE, SALLY_FARM, TIM_FLOWERS" +@INPUT_PARAMS_END + +R00:01:00 ECHO "abc"`); + }); +}); diff --git a/src/utilities/codemirror/satf/satf-sasf-utils.ts b/src/utilities/codemirror/satf/satf-sasf-utils.ts new file mode 100644 index 0000000000..6018d8ebec --- /dev/null +++ b/src/utilities/codemirror/satf/satf-sasf-utils.ts @@ -0,0 +1,888 @@ +import type { SyntaxNode } from '@lezer/common'; +import { Tree } from '@lezer/common'; +import type { CommandDictionary, FswCommandArgument } from '@nasa-jpl/aerie-ampcs'; +import { TimeTypes } from '../../../enums/time'; +import { unquoteUnescape } from '../../codemirror/codemirror-utils'; +import { getBalancedDuration, getDurationTimeComponents, parseDurationString, validateTime } from '../../time'; +import { SatfLanguage } from './satf'; +import { + ACTIVITY_TYPE_DEFINITIONS, + ACTIVITY_TYPE_GROUP, + ACTIVITY_TYPE_NAME, + ARGS, + ARITHMETICAL, + ASSUMED_MODEL_VALUES, + BODY, + BOOLEAN, + COMMAND, + COMMANDS, + COMMENT, + ENGINE, + ENTRY, + ENUM, + ENUM_NAME, + EPOCH, + GLOBAL, + HEADER, + HEADER_PAIR, + HEADER_PAIRS, + KEY, + LINE_COMMENT, + MODEL, + NAME, + NUMBER, + ON_BOARD_FILENAME, + ON_BOARD_PATH, + PARAM_BINARY, + PARAM_DURATION, + PARAM_ENGINEERING, + PARAM_HEXADECIMAL, + PARAM_OCTAL, + PARAM_QUOTED_STRING, + PARAM_SIGNED_DECIMAL, + PARAM_TIME, + PARAM_UNSIGNED_DECIMAL, + PARAMETERS, + PROCESSOR, + RANGE, + REQUEST, + REQUEST_NAME, + REQUESTOR, + REQUESTS, + SASF, + SATF, + SCHEDULED_TIME, + SEQGEN, + SFDU_HEADER, + STEM, + STEPS, + STRING, + TIME, + TIME_RELATION, + TYPE, + VALUE, + VAR_ENUM, + VAR_FLOAT, + VAR_INT, + VAR_STRING, + VAR_UINT, + VARIABLES, + VIRTUAL_CHANNEL, +} from './satfConstants'; + +/** + * Parses the metadata from seqN. + * @param tree - The SeqN tree of the sequence. + * @param sequence - The sequence string. + * @returns An object with a single key, "metadata", which is an object of key-value pairs. + * The keys are the names of the metadata entries, and the values are the corresponding values. + */ +export function parseMetadata(tree: Tree, sequence: string): { metadata: Record } { + const metadata: Record = {}; + const metadataEntries = tree.topNode.getChild('Metadata')?.getChildren('MetaEntry') ?? []; + + for (const entry of metadataEntries) { + const keyNode = entry.getChild('Key'); + const valueNode = entry.getChild('Value'); + + if (keyNode === null || valueNode === null) { + continue; + } + + const key = unquoteUnescape(sequence.slice(keyNode.from, keyNode.to)); + const value = sequence.slice(valueNode.from, valueNode.to); + + metadata[key] = value; + } + + let commentMetadata = tree.topNode.getChildren('LineComment'); + if (commentMetadata.length === 0) { + commentMetadata = tree.topNode.getChild('Commands')?.getChildren('LineComment') ?? []; + } + + for (const comment of commentMetadata) { + const text = sequence.slice(comment.from, comment.to); + const [key, value] = text.split('=').map(unquoteUnescape); + + if (key && value) { + metadata[key] = value; + } + } + + return { metadata }; +} + +/** + * Converts a SeqN tree into SATF/SASF Steps. + * + * @param tree - The SeqN syntax tree representing the parsed sequence. + * @param sequence - The sequence string. + * @param commandDictionary - An optional dictionary containing command definitions. + * @returns A formatted SATF/SASF string of Steps or undefined if no commands are found. + */ +export function generateSatfSteps( + tree: Tree, + sequence: string, + commandDictionary: CommandDictionary | null, +): string | undefined { + let stepNode = tree.topNode.getChild(COMMANDS)?.firstChild; + let steps = []; + while (stepNode) { + steps.push(stepNode); + stepNode = stepNode.nextSibling; + } + + steps = steps.filter((step: SyntaxNode) => step.name !== LINE_COMMENT); + + if (steps === null || steps.length === 0) { + return undefined; + } + return `STEPS,\n${steps + .map((step, index) => { + return parseStep(step, sequence, commandDictionary, 1 + index++); + }) + .filter((step: string | undefined) => step) + .join('\n')}\nend`; +} + +export function parseStep( + child: SyntaxNode | null, + text: string, + commandDictionary: CommandDictionary | null, + order: number, +): string | undefined { + switch (child?.name) { + case 'Command': + return parseCommand(child, text, commandDictionary, order); + case 'Activate': + return parseActivate(child, text, order); + case 'Load': + case 'GroundBlock': + case 'GroundEvent': + default: + return undefined; + } +} + +function parseCommand( + commandNode: SyntaxNode, + sequence: string, + commandDictionary: CommandDictionary | null, + order: number, +): string { + const time = parseTime(commandNode, sequence); + + const stemNode = commandNode.getChild('Stem'); + const stem = stemNode ? sequence.slice(stemNode.from, stemNode.to) : 'UNKNOWN'; + + const argsNode = commandNode.getChild('Args'); + const args = argsNode ? parseArgs(argsNode, sequence, commandDictionary, stem) : []; + + return `${'\t'}command(${order}, + ${'\t'.repeat(2)}SCHEDULED_TIME,\\${time.tag}\\,${time.type}, + ${'\t'.repeat(2)}${stem}(${serializeArgs(args)}) + ${'\t'}),`; +} + +function parseActivate(stepNode: SyntaxNode, sequence: string, order: number): string { + const nameNode = stepNode.getChild('SequenceName'); + const sequenceName = nameNode ? unquoteUnescape(sequence.slice(nameNode.from, nameNode.to)) : 'UNKNOWN'; + const time = parseTime(stepNode, sequence); + + const argsNode = stepNode.getChild('Args'); + const args = argsNode ? parseArgs(argsNode, sequence, null, sequenceName) : []; + + const engine = parseEngine(stepNode, sequence); + const epoch = parseEpoch(stepNode, sequence); + + return `${'\t'}SPAWN(${order}, + ${'\t'.repeat(2)}SCHEDULED_TIME,\\${time.tag}\\,${time.type},${ + engine !== undefined + ? ` + ${'\t'.repeat(2)}RETURN_ENGINE_ID_TO,\\${engine}\\,` + : '' + }${ + epoch !== undefined + ? ` + ${'\t'.repeat(2)}EPOCH,${epoch},` + : '' + } + ${'\t'.repeat(2)}RT_on_board_block(${sequenceName},${serializeArgs(args)}) + ${'\t'}),`; +} + +function parseEngine(stepNode: SyntaxNode, text: string): number | undefined { + const engineNode = stepNode.getChild(ENGINE)?.getChild(NUMBER); + return engineNode ? parseInt(text.slice(engineNode.from, engineNode.to), 10) : undefined; +} + +function parseEpoch(stepNode: SyntaxNode, text: string): string | undefined { + const epochNode = stepNode.getChild(EPOCH)?.getChild(STRING); + return epochNode ? unquoteUnescape(text.slice(epochNode.from, epochNode.to)) : undefined; +} + +function parseTime( + commandNode: SyntaxNode, + sequence: string, +): { + tag: string; + type: 'UNKNOWN' | 'ABSOLUTE' | 'WAIT_PREVIOUS_END' | 'EPOCH' | 'FROM_PREVIOUS_START' | 'GROUND_EPOCH'; +} { + const tag = '00:00:01'; + const timeTagNode = commandNode.getChild('TimeTag'); + if (timeTagNode === null) { + return { tag: '00:00:00', type: 'UNKNOWN' }; + } + + const time = timeTagNode.firstChild; + if (time === null) { + return { tag, type: 'UNKNOWN' }; + } + + const timeValue = sequence.slice(time.from + 1, time.to).trim(); + + if (time.name === 'TimeComplete') { + return { tag, type: 'WAIT_PREVIOUS_END' }; + } else if (time.name === 'TimeGroundEpoch') { + const parentNode = time.parent; + + if (parentNode) { + // ex: G+3:00 "GroundEpochName" + const parentNodeText = sequence.slice(parentNode.from, parentNode.to); + const splitParentNodeText = parentNodeText.slice(1, parentNodeText.length).split(' '); + + if (splitParentNodeText.length > 0) { + const tag = splitParentNodeText[0]; + const epochName = unquoteUnescape(splitParentNodeText[1]); + + return { tag: `${epochName}${tag}`, type: 'GROUND_EPOCH' }; + } + } + } else { + if (validateTime(timeValue, TimeTypes.ABSOLUTE)) { + return { tag: timeValue, type: 'ABSOLUTE' }; + } else if (validateTime(timeValue, TimeTypes.EPOCH)) { + const { isNegative, days, hours, minutes, seconds, milliseconds } = getDurationTimeComponents( + parseDurationString(timeValue, 'seconds'), + ); + return { tag: `${isNegative}${days}${hours}:${minutes}:${seconds}${milliseconds}`, type: 'EPOCH' }; + } else if (validateTime(timeValue, TimeTypes.EPOCH_SIMPLE)) { + let balancedTime = getBalancedDuration(timeValue); + if (parseDurationString(balancedTime, 'seconds').milliseconds === 0) { + balancedTime = balancedTime.slice(0, -4); + } + return { tag: balancedTime, type: 'EPOCH' }; + } else if (validateTime(timeValue, TimeTypes.RELATIVE)) { + const { isNegative, days, hours, minutes, seconds, milliseconds } = getDurationTimeComponents( + parseDurationString(timeValue, 'seconds'), + ); + return { tag: `${isNegative}${days}${hours}:${minutes}:${seconds}${milliseconds}`, type: 'FROM_PREVIOUS_START' }; + } else if (validateTime(timeValue, TimeTypes.RELATIVE_SIMPLE)) { + let balancedTime = getBalancedDuration(timeValue); + if (parseDurationString(balancedTime).milliseconds === 0) { + balancedTime = balancedTime.slice(0, -4); + } + return { tag: balancedTime, type: 'FROM_PREVIOUS_START' }; + } + } + return { tag, type: 'UNKNOWN' }; +} + +function parseArgs( + argsNode: SyntaxNode, + sequence: string, + commandDictionary: CommandDictionary | null, + stem: string, +): { name?: string; type: 'boolean' | 'enum' | 'number' | 'string'; value: boolean | string }[] { + const args = []; + let argNode = argsNode.firstChild; + const dictArguments = commandDictionary?.fswCommandMap[stem]?.arguments ?? []; + let i = 0; + + while (argNode) { + const dictionaryArg = dictArguments[i] ?? null; + const arg = parseArg(argNode, sequence, dictionaryArg); + + if (arg !== undefined) { + args.push(arg); + } + + argNode = argNode?.nextSibling; + i++; + } + + return args; +} + +function parseArg( + argNode: SyntaxNode, + sequence: string, + dictionaryArg: FswCommandArgument | null, +): { name?: string | undefined; type: 'boolean' | 'enum' | 'number' | 'string'; value: boolean | string } | undefined { + const nodeValue = sequence.slice(argNode.from, argNode.to); + + switch (argNode.name) { + case 'Boolean': { + return { + name: dictionaryArg ? dictionaryArg.name : undefined, + type: 'boolean' as const, + value: nodeValue === 'TRUE' ? true : false, + }; + } + case 'Enum': { + return { name: dictionaryArg ? dictionaryArg.name : undefined, type: 'enum' as const, value: nodeValue }; + } + case 'Number': { + const decimalCount = nodeValue.slice(nodeValue.indexOf('.') + 1).length; + return { + name: dictionaryArg ? dictionaryArg.name : undefined, + type: 'number', + value: parseFloat(nodeValue).toFixed(decimalCount), + }; + } + case 'String': { + return { name: dictionaryArg ? dictionaryArg.name : undefined, type: 'string', value: nodeValue }; + } + default: { + break; + } + } +} + +function serializeArgs(args: any[]): string { + return args + .map(arg => { + return `${arg.value}`; + }) + .join(', '); +} + +/** + * Converts a seqN tree into SATF/SASF variables. + * + * @param {Tree} tree - The SeqN tree of the sequence + * @param {string} text - The original text of the sequence + * @param {string} type - The SATF/SASF variable type, either 'Parameters' + * or 'Variables' + * @returns {string | undefined} - The generated string or undefined if there + * are no variables + */ +export function generateSatfVariables( + tree: Tree, + text: string, + type: 'Parameters' | 'Variables' = 'Parameters', +): string | undefined { + let nType = 'ParameterDeclaration'; + if (type === 'Variables') { + nType = 'LocalDeclaration'; + } + + const variableContainer = tree.topNode.getChild(nType); + if (!variableContainer) { + return undefined; + } + + const variables = variableContainer.getChildren('Variable'); + if (!variables || variables.length === 0) { + return undefined; + } + + const serializedVariables = variables + .map((variableNode: SyntaxNode) => { + const nameNode = variableNode.getChild('Enum'); + const typeNode = variableNode.getChild('Type'); + const enumNode = variableNode.getChild('EnumName'); + const rangeNode = variableNode.getChild('Range'); + const allowableValuesNode = variableNode.getChild('Values'); + const objects = variableNode.getChildren('Object'); + + const variableText = nameNode ? text.slice(nameNode.from, nameNode.to) : 'UNKNOWN'; + const variable: { + allowable_ranges?: any[]; + allowable_values?: any[]; + enum_name?: string; + name: string; + sc_name?: string; + type: 'FLOAT' | 'INT' | 'STRING' | 'UINT' | 'ENUM'; + } = { name: variableText, type: 'INT' }; + + if (typeNode) { + variable.type = text.slice(typeNode.from, typeNode.to) as 'FLOAT' | 'INT' | 'STRING' | 'UINT' | 'ENUM'; + if (enumNode) { + variable.enum_name = text.slice(enumNode.from, enumNode.to); + } + if (rangeNode) { + const allowableRanges = parseAllowableRanges(text, rangeNode); + if (allowableRanges && allowableRanges.length > 0) { + variable.allowable_ranges = allowableRanges; + } + } + if (allowableValuesNode) { + const allowableValues = parseAllowableValues(text, allowableValuesNode); + if (allowableValues && allowableValues.length > 0) { + variable.allowable_values = allowableValues; + } + } + } else { + for (const object of objects) { + const properties = object.getChildren('Property'); + + properties.forEach(property => { + const propertyName = property.getChild('PropertyName'); + const propertyValue = propertyName?.nextSibling; + if (!propertyName || !propertyValue) { + return; + } + const propertyNameString = text.slice(propertyName?.from, propertyName?.to).replaceAll('"', ''); + const propertyValueString = text.slice(propertyValue?.from, propertyValue?.to).replaceAll('"', ''); + + switch (propertyNameString.toLowerCase()) { + case 'allowable_ranges': { + if (!propertyValue) { + break; + } + const allowableRanges = parseAllowableRanges(text, propertyValue); + if (allowableRanges && allowableRanges.length > 0) { + variable.allowable_ranges = allowableRanges; + } + break; + } + case 'allowable_values': + { + if (!propertyValue) { + break; + } + const allowableValues = parseAllowableValues(text, propertyValue); + if (allowableValues && allowableValues.length > 0) { + variable.allowable_values = allowableValues; + } + } + break; + case 'enum_name': + variable.enum_name = propertyValueString; + break; + case 'sc_name': + variable.sc_name = propertyValueString; + break; + case 'type': + variable.type = propertyValueString as 'FLOAT' | 'INT' | 'STRING' | 'UINT' | 'ENUM'; + break; + } + }); + } + } + + return ( + `\t${variable.name}` + + `(\n\t\tTYPE,${variable.type}${variable.enum_name ? `\n\tENUM,${variable.enum_name}` : ''}` + + `${variable.allowable_ranges ? `\n\t\tRANGES,${variable.allowable_ranges}` : ''}` + + `${variable.allowable_values ? `\n\t\tVALUES,${variable.allowable_values}` : ''}` + + `${variable.sc_name ? `\n\t\tSC_NAME,${variable.sc_name}` : ''}\n\t)` + ); + }) + .join(',\n\t'); + + return `${type},\n' + ${serializedVariables},\nend,\n`; +} + +function parseAllowableRanges(text: string, rangeNode: any): { max: number; min: number }[] { + return text + .slice(rangeNode.from, rangeNode.to) + .split(',') + .map(range => { + const rangeMatch = /^([-+]?\d+)?(\.\.\.)([-+]?\d+)?$/.exec(range.replaceAll('"', '').trim()); + if (rangeMatch) { + const [, min, , max] = rangeMatch; + const maxNum = !isNaN(Number(max)) ? Number(max) : Infinity; + const minNum = !isNaN(Number(min)) ? Number(min) : -Infinity; + + return { max: maxNum, min: minNum }; + } + return undefined; + }) + .filter(range => range !== undefined) as { max: number; min: number }[]; +} + +function parseAllowableValues(text: string, allowableValuesNode: any): string[] | undefined { + const allowableValues = text + .slice(allowableValuesNode.from + 1, allowableValuesNode.to - 1) + .split(',') + .map(value => value.trim()); + + return allowableValues.length > 0 ? allowableValues : undefined; +} + +/** + * Converts a seqN tree into a string of SASF requests. + * @param tree The seqN tree for the sequence. + * @param sequence The text of the sequence. + * @param commandDictionary The command dictionary for the sequence. + * @returns A string of SASF requests, or undefined if the sequence contains no requests. + */ +export async function generateRequests( + tree: Tree, + sequence: string, + commandDictionary: CommandDictionary | null, +): Promise { + const requests = tree.topNode.getChild('Commands')?.getChildren('Request'); + if (requests == null || requests.length === 0) { + return undefined; + } + return requests + .map((requestNode: SyntaxNode) => { + const nameNode = requestNode.getChild('RequestName'); + const name = nameNode ? unquoteUnescape(sequence.slice(nameNode.from, nameNode.to)) : 'UNKNOWN'; + const parsedTime = parseTime(requestNode, sequence); + const request = `request(${name}, +\tSTART_TIME, ${parsedTime.tag}, +\tREQUESTOR, "systems", +\tPROCESSOR, "VC2AB", +\tKEY, "NO_KEY")\n\n`; + let order = 1; + let child = requestNode?.getChild('Steps')?.firstChild; + const steps = []; + + while (child) { + steps.push(`${parseStep(child, sequence, commandDictionary, order++)}`); + child = child?.nextSibling; + } + + return `${request}${steps.join('\n')}\nend;\n`; + }) + .join(',\n'); +} + +/** + * Takes a SATF/SASF string and parses it into SeqN sequence strings. + * @param satf The SATF/SASF string to parse. + * @returns top level metadata and list of sequences . + */ +export async function satfToSequence(satfOrSasf: string): Promise<{ + header: string; + sequences: { + name: string; + sequence: string; + }[]; +}> { + const base = SatfLanguage.parser.parse(satfOrSasf).topNode; + + const satfOrSasfNode = base.getChild(SATF) || base.getChild(SASF); + + if (satfOrSasfNode) { + const header = parseHeader(satfOrSasfNode.getChild(HEADER), satfOrSasf); + const sequences = parseBody(satfOrSasfNode.getChild(BODY), satfOrSasf); + return { header, sequences }; + } + + return { header: '', sequences: [] }; +} + +function parseHeader(headerNode: SyntaxNode | null, text: string): string { + const header = ''; + if (!headerNode) { + return header; + } + + const sfduHeader = headerNode.getChild(SFDU_HEADER)?.getChild(HEADER_PAIRS)?.getChildren(HEADER_PAIR) ?? []; + + return sfduHeader + .map((pairNode: SyntaxNode) => { + const keyNode = pairNode.getChild(KEY); + const valueNode = pairNode.getChild(VALUE); + if (!keyNode || !valueNode) { + console.error(`Error processing header entry: ${text.slice(pairNode.from, pairNode.to)}`); + return ''; + } + const key = text.slice(keyNode.from, keyNode.to).trim(); + const value = text.slice(valueNode.from, valueNode.to).trim(); + + if (key.length === 0 || value.length === 0) { + return ''; + } + return `@METADATA "${key}" "${value}"`; + }) + .join('\n'); +} + +function parseBody(bodyNode: SyntaxNode | null, text: string): { name: string; sequence: string }[] { + if (!bodyNode) { + return []; + } + + //satf + if (bodyNode.getChild(ACTIVITY_TYPE_DEFINITIONS)) { + const activityTypeNodes = bodyNode.getChild(ACTIVITY_TYPE_DEFINITIONS)?.getChildren(ACTIVITY_TYPE_GROUP) ?? []; + + return activityTypeNodes.map((group, i) => { + let sequence = ''; + let sequenceName = 'sequence-' + i; + const sequenceNameNode = group.getChild(ACTIVITY_TYPE_NAME); + const seqGenNode = group.getChild(SEQGEN); + const vcNode = group.getChild(VIRTUAL_CHANNEL); + const onBoardFilenameNode = group.getChild(ON_BOARD_FILENAME); + const onBoardFilePathNode = group.getChild(ON_BOARD_PATH); + + if (sequenceNameNode) { + const name = text.slice(sequenceNameNode.from, sequenceNameNode.to); + sequence += `## ${name}\n`; + sequenceName = name.split('/').pop() || 'sequence-' + i; + } + + sequence += parseParameters(group.getChild(PARAMETERS), 'INPUT_PARAMS', text); + sequence += parseParameters(group.getChild(VARIABLES), 'LOCALS', text); + + if (vcNode) { + sequence += `@METADATA "VIRTUAL_CHANNEL" "${text.slice(vcNode.from, vcNode.to)}" \n`; + } + if (onBoardFilenameNode) { + sequence += `@METADATA "ON_BOARD_FILENAME" "${text.slice(onBoardFilenameNode.from, onBoardFilenameNode.to)}" \n`; + } + + if (onBoardFilePathNode) { + sequence += `@METADATA "ON_BOARD_PATH" "${text.slice(onBoardFilePathNode.from, onBoardFilePathNode.to)}" \n`; + } + if (seqGenNode) { + sequence += `@METADATA "SEQGEN" "${text.slice(seqGenNode.from, seqGenNode.to)}" \n`; + } + + sequence += parseSteps(group.getChild(STEPS), text); + + return { name: sequenceName, sequence }; + }); + } + + if (bodyNode.getChild(REQUESTS)) { + const requestNodes = bodyNode.getChild(REQUESTS)?.getChildren(REQUEST) ?? []; + + return requestNodes.map((group, i) => { + let sequence = ''; + + const requestNameNode = group.getChild(REQUEST_NAME); + const timeNode = group.getChild(TIME); + const requestorNode = group.getChild(REQUESTOR); + const processorNode = group.getChild(PROCESSOR); + const keyNode = group.getChild(KEY); + const sequenceName = requestNameNode ? text.slice(requestNameNode.from, requestNameNode.to) : 'sequence-' + i; + sequence += `${timeNode ? `R${text.slice(timeNode.from, timeNode.to)} ` : 'C '}`; + sequence += `@REQUEST_BEGIN("${sequenceName}")\n`; + sequence += parseSteps(group.getChild(STEPS), text) + .split('\n') + .map(line => ' '.repeat(2) + line) + .join('\n'); + sequence += `\n@REQUEST_END\n`; + if (requestorNode) { + sequence += `@METADATA("REQUESTOR":${text.slice(requestorNode.from, requestorNode.to)})\n`; + } + if (processorNode) { + sequence += `@METADATA("PROCESSOR":${text.slice(processorNode.from, processorNode.to)})\n`; + } + if (keyNode) { + sequence += `@METADATA("KEY":${text.slice(keyNode.from, keyNode.to)})\n`; + } + + return { name: sequenceName, sequence }; + }); + } + + return []; +} + +function parseParameters( + parameterNode: SyntaxNode | null, + variableType: 'INPUT_PARAMS' | 'LOCALS', + text: string, +): string { + if (!parameterNode) { + return ''; + } + const entries = parameterNode.getChildren(ENTRY); + if (entries && entries.length > 0) { + let parameter = `@${variableType}_BEGIN\n`; + parameter += entries + .map(param => { + const nameNode = param.getChild(NAME); + const typeNode = param.getChild(TYPE); + const rangesNode = param.getChildren(RANGE); + const enumNameNode = param.getChild(ENUM_NAME); + + const name = nameNode ? `${text.slice(nameNode.from, nameNode.to)}` : ''; + const enumName = enumNameNode ? ` ${text.slice(enumNameNode.from, enumNameNode.to)}` : ''; + let type = typeNode ? text.slice(typeNode.from, typeNode.to).trim() : ''; + switch (type) { + case PARAM_UNSIGNED_DECIMAL: + type = VAR_UINT; + break; + case PARAM_SIGNED_DECIMAL: + type = VAR_INT; + break; + case PARAM_HEXADECIMAL: + case PARAM_OCTAL: + case PARAM_BINARY: + case PARAM_TIME: + case PARAM_DURATION: + case PARAM_QUOTED_STRING: + type = VAR_STRING; + break; + case PARAM_ENGINEERING: + type = VAR_FLOAT; + break; + case VAR_STRING: + { + if (enumNameNode) { + type = VAR_ENUM; + } else { + type = VAR_STRING; + } + } + break; + default: + console.log(`type: ${type} is not supported`); + } + + const allowable_values: string[] = []; + const allowable_ranges: string[] = []; + rangesNode.forEach((range: any) => { + text + .slice(range.from, range.to) + .split(',') + .forEach(r => { + r = r.replaceAll('"', '').trim(); + if (r.includes('...')) { + allowable_ranges.push(r); + } else { + allowable_values.push(r); + } + }); + }); + + return `${name} ${type}${enumName}${allowable_ranges.length === 0 ? (allowable_values.length === 0 ? '' : ' ""') : ` "${allowable_ranges.join(', ')}"`}${allowable_values.length === 0 ? '' : ` "${allowable_values.join(', ')}"`}`; + }) + .join('\n'); + parameter += `\n@${variableType}_END\n\n`; + + return parameter; + } + return ''; +} + +function parseSteps(stepNode: SyntaxNode | null, text: string): string { + const step = ''; + if (!stepNode) { + return step; + } + + const commandNodes = stepNode.getChildren(COMMAND); + + return commandNodes + .map(command => { + const time = parseTimeNode(command.getChild(SCHEDULED_TIME), text); + const stem = parseStem(command.getChild(STEM), text); + const comment = parseComment(command.getChild(COMMENT), text); + const args = parseArgsNode(command.getChild(ARGS), text); + const models = parseModel(command.getChild(ASSUMED_MODEL_VALUES), text); + return `${time} ${stem}${args.length > 0 ? ` ${args}` : ''}${comment.length > 0 ? ` ${comment}` : ''}${models.length > 0 ? `\n${models}` : ''}`; + }) + .join('\n'); +} + +function parseTimeNode(timeNode: SyntaxNode | null, text: string): string { + if (!timeNode) { + return ''; + } + + const timeValueNode = timeNode.getChild(TIME); + const timeTagNode = timeNode.getChild(TIME_RELATION); + + if (!timeTagNode || !timeValueNode) { + return ''; + } + + const time = text.slice(timeValueNode.from, timeValueNode.to); + const timeTag = text.slice(timeTagNode.from, timeTagNode.to); + switch (timeTag.trim()) { + case 'ABSOLUTE': + return `A${time} `; + case 'EPOCH': + return `E${time} `; + case 'FROM_PREVIOUS_START': + return `R${time} `; + case 'FROM_REQUEST_START': + case 'FROM_ACTIVITY_START': + // TODO: This needs to be changed to refer to the start of the request. + return `R${time} `; + case 'WAIT_PREVIOUS_END': + return `C `; + default: + return 'error'; + } +} + +function parseComment(commentNode: SyntaxNode | null, text: string): string { + return commentNode + ? `# ${text + .slice(commentNode.from, commentNode.to) + .split('\n') + .map(line => line.trim())}` // flatten comment to one line SeqN doesn't support multi-line comments on a command + : ''; +} + +function parseStem(stemNode: SyntaxNode | null, text: string): string { + return stemNode ? text.slice(stemNode.from, stemNode.to) : ''; +} + +function parseArgsNode(argsNode: SyntaxNode | null, text: string): string { + if (!argsNode) { + return ''; + } + let argNode = argsNode.firstChild; + const args = []; + while (argNode) { + args.push(`${parseArgNode(argNode, text)}`); + argNode = argNode?.nextSibling; + } + return args.join(' '); +} + +function parseArgNode(argNode: SyntaxNode, text: string): string { + if (!argNode) { + return ''; + } + const argValue = text.slice(argNode.from, argNode.to); + + switch (argNode.name) { + case STRING: + return `${argValue}`; + case NUMBER: + return `${argValue}`; + case BOOLEAN: + return `${argValue}`; + case ENUM: + return `${argValue}`; + case GLOBAL: + return `${argValue}`; + case ARITHMETICAL: + return `(${argValue})`; + default: { + console.log(`${argNode.name}: ${argValue} is not supported`); + return 'Error'; + } + } +} + +function parseModel(modelNode: SyntaxNode | null, text: string): string { + if (!modelNode) { + return ''; + } + const modelsNode = modelNode.getChildren(MODEL); + return modelsNode + .map(model => { + const keyNode = model.getChild(KEY); + const valueNode = model.getChild(VALUE); + if (!keyNode || !valueNode) { + return null; + } + return `@MODEL(${text.slice(keyNode.from, keyNode.to)},${text.slice(valueNode.from, valueNode.to)},"00:00:00")`; + }) + .filter(model => model !== null) + .join('\n'); +} diff --git a/src/utilities/codemirror/satf/satf-sasf.grammar b/src/utilities/codemirror/satf/satf-sasf.grammar new file mode 100644 index 0000000000..dea9ecfd47 --- /dev/null +++ b/src/utilities/codemirror/satf/satf-sasf.grammar @@ -0,0 +1,422 @@ +@top satf_sasf { + Satf | Sasf + } + + Satf{ + newLine? + Header newLine? + Body {ActivityTypeDefinitions?} + (eof newLine?) + } + + Sasf{ + newLine? + Header newLine? + eod newLine? + Body { Requests? } + (eof newLine?) + } + + Header { + (SfduHeader newLine?)? + (SeqHeader newLine?)? + eoh + } + + HeaderPairs { HeaderPair* } + HeaderPair {Key"="Value ";" newLine} + Key { (identifier | ":")* } + Value { headerValue+ } + SfduHeader { headerMarker newLine HeaderPairs headerMarker} + + Start { "$$"identifier identifier* newLine} + LineSeparator { "**" ("*" | "**")+ newLine} + Keyword { identifier } + Data { anyASCII+ } + HeaderRecords { HeaderRecord+ } + HeaderRecord { "*"Keyword Data? newLine} + SeqHeader { Start LineSeparator HeaderRecords? LineSeparator } + + ActivityTypeDefinitions { ActivityTypeGroup (ActivityTypeGroup)* } + ActivityTypeGroup { + identifier parentheses< + newLine?"`"? + definitions + ( + Parameters{input} | + Variables{ input} + )* + Steps? + > + newLine? + } + + Requests { Request (Request)* } + + RequestName {identifier} + Request { request parentheses< + newLine? + definition + definitionUnslashValuedefinitionUnslashValue + (definitionUnslashValuedefinitionUnslashValue | + definitionUnslashValue definitionUnslashValue? | + definitionUnslashValue definitionUnslashValue)* + > + Steps{newLine? step+ end}? +} + + definition { content definitionEnd?} + // StartTag[closedBy="EndTag"] { "\\" } + // EndTag[openedBy="StartTag"] { "\\" } + definitionValue {"\\" content "\\" definitionEnd?} + definitionUnslashValue {content definitionEnd?} + definitionEnd { "," | (newLine? ",") | ("," newLine?) | newLine } + + definitions { + (definition (definitionValue | definitionUnslashValue)) + ((definition (definitionValue | definitionUnslashValue)) | + (definition (definitionValue | definitionUnslashValue)) | + (definition definitionValue) | + (definition definitionValue) | + (definition (definitionValue | definitionUnslashValue) ) | + (definition (definitionValue | definitionUnslashValue)) | + (definition (definitionValue | definitionUnslashValue)))* + } + ActivityTypeName { path } + ActivityTypeCode { identifier } + ReturnType { identifier } + Flags { Flag+ } + Flag { identifier | ("|" identifier) } //todo + Help { (anyASCII | newLine)+ } + SeqgenText { anyASCII } + VirtualChannel { identifier} + OnBoardFilename { path } + OnBoardPath { path } + + + input { category "," newLine? (entry newLine?)+ end newLine?} + Entry { Name { identifier } parentheses + < + newLine? + paramterKeyValue | definitionUnslashValue)> + (paramterKeyValue | definitionUnslashValue)> | + ranges | + paramterKeyValue | definitionUnslashValue)> | + paramterKeyValue | definitionUnslashValue )> | + paramterKeyValue>)* + > + } + + paramterKeyValue { key "," value} + + Type { identifier (("[" singleDigit* "]") | "[]")? } + EnumName {identifier} + Range { anyASCII+ } + ranges {(paramterKeyValue>) } + Prefix { "YES" | "NO" } + Default { anyASCII } + + Steps { stepsKeyword "," newLine? step+ end } + step { Command | Ground | CommandDynamic | Note | Activity | Loop | Assignment } // | + + Command { + command + parentheses< + commonFields + stepFields{ + definitionUnslashValue< + Stem{identifier} + parentheses< + Args< + String | Number | Enum | Global | Arithmetical + >? + >? + >? + (definition definitionUnslashValue? end)? + } + > + newLine? + } + + CommandDynamic { + commandDynamic + parentheses< + commonFields + stepFields { + definitionUnslashValue< + Stem{"\""identifier"\""} + parentheses< + Args< + String | Number | Enum | Global | Expression | Arithmetical + >? + >? + >? + (definition definitionUnslashValue? end) + } + > + newLine? + } + + Note { + note + parentheses< + commonFields + stepFields{ + definitionUnslashValue< + Stem{ definition<"TEXT">} + Args{Expression} newLine? + > + } + > + newLine? + } + + Activity { + activity + parentheses< + commonFields + stepFields{ + Group + } + > + newLine? + } + + Loop { + loop + parentheses< + commonFields + stepFields{ + definition + definitionValue + } + > + newLine? + Steps{Command+} + EndLoop{"end_loop"} + parentheses< + commonFields + > + newLine? + } + + Count{ anyASCII+ } + + Assignment { + assignment + parentheses< + commonFields + stepFields{ + definition + definitionValue< + Args< + String | Number | Enum | Global | Expression | Arithmetical + > + > + } + > + newLine? + } + + Ground { + ground + parentheses< + commonFields + stepFields{ + definitionUnslashValue< + Stem{identifier} + parentheses< + Args< + String | Number | Enum | Global | Arithmetical + >? + >? + >? + (definition definitionUnslashValue? end)? + } + > + newLine? + } + + commonFields { + definitionUnslashValue + ScheduledTime + ((definition definitionValue) | + (definition (definitionValue | definitionUnslashValue )) | + (definition definitionValue) | + (definition definitionValue) | + (definition definitionValue))* + + } + + parentheses { "(" newLine? expression (~newLine)? (")" | "),")} + StepLabel { singleDigit+ ("."singleDigit+)? } + ScheduledTime { definition<"SCHEDULED_TIME"> (definitionValue | definitionUnslashValue) TimeRelation{identifier} newLine? ","? newLine? } + InclusionCondition {anyASCII+} + AssumedModelValues{ Model (","Model)* } + Model { Key"="Value{modelValue+} } + Draw { "VERTICAL" | "HORIZONTAL" | "HATCHED" | "STANDARD" | "DIAGONAL" } + Comment { (anyASCII | newLine)+ } + Ntext { (anyASCII | newLine)+ } + Enum { identifier } + Arithmetical { identifier "+" identifier} + Args { args newLine? ("," newLine? args)* } + Expression {"\\"anyASCII+"\\"} + Processors {string} + String { string } + Label { string } + Opcode { string } + Pars { string } + + Group{ + ( + TypeGroup{identifier} + parentheses< + definition + Args< + String | Number | Enum | Expression + >? + > + newLine? + ) | ( + TypeGroup{seqgenDirective} + parentheses< + (definition Filename{string}) | + (definition Name{string}?) | + (definition definition definition definition ) + > + newLine? + ) | ( + TypeGroup{seqTranSet} + parentheses< + definition + definition