From 33bfc8ae6c03101ee4c16edc5afe8fb6f3db44a6 Mon Sep 17 00:00:00 2001 From: Alexander Medvednikov Date: Tue, 12 Nov 2024 14:26:48 +0300 Subject: [PATCH] css_parser.vv --- .../esbuild/css_parser/css_parser.vv | 1426 ++ complex_tests/esbuild/js_parser/js_parser.go | 18021 ++++++++++++++++ 2 files changed, 19447 insertions(+) create mode 100644 complex_tests/esbuild/css_parser/css_parser.vv create mode 100644 complex_tests/esbuild/js_parser/js_parser.go diff --git a/complex_tests/esbuild/css_parser/css_parser.vv b/complex_tests/esbuild/css_parser/css_parser.vv new file mode 100644 index 0000000..724fa3a --- /dev/null +++ b/complex_tests/esbuild/css_parser/css_parser.vv @@ -0,0 +1,1426 @@ +module css_parser + +import ast // local module +import compat // local module +import config // local module +import css_ast // local module +import css_lexer // local module +import logger // local module + +struct parser { +pub mut: + log logger.Log + source logger.Source + tokens []css_lexer.Token + all_comments []logger.Range + legal_comments []css_lexer.Comment + stack []css_lexer.T + import_records []ast.ImportRecord + symbols []ast.Symbol + composes map[ast.Ref]&css_ast.Composes + local_symbols []ast.LocRef + local_scope map[string]ast.LocRef + global_scope map[string]ast.LocRef + nesting_warnings map[logger.Loc]logger.Loc + tracker logger.LineColumnTracker + enclosing_at_media [][]css_ast.Token + layers_pre_import [][]string + layers_post_import [][]string + enclosing_layer []string + anon_layer_count isize + index isize + legal_comment_index isize + in_selector_subtree isize + prev_error logger.Loc + options Options + nesting_is_present bool + make_local_symbols bool + has_seen_at_import bool +} + +struct Options { +pub mut: + css_prefix_data map[css_ast.D]compat.CSSPrefix + // This is an embedded struct. Always access these directly instead of off + // the name "optionsThatSupportStructuralEquality". This is only grouped like + // this to make the equality comparison easier and safer (and hopefully faster). +} + +type symbolMode = u8 + +enum symbolMode { + symbol_mode_disabled + symbol_mode_global + symbol_mode_local +} + +struct optionsThatSupportStructuralEquality { +pub mut: + original_target_env string + unsupported_cssf_eatures compat.CSSFeature + minify_syntax bool + minify_whitespace bool + minify_identifiers bool + symbol_mode symbolMode +} + +pub fn options_from_config(loader config.Loader, options &config.Options) Options { + mut symbolMode := 0 + match loader { + config.loader_global_css { + symbol_mode = symbol_mode_global + } + config.loader_local_css { + symbol_mode = symbol_mode_local + } + } + return Options{ + css_prefix_data: options.cssp_refix_data + options_that_support_structural_equality: OptionsThatSupportStructuralEquality{ + minify_syntax: options.minify_syntax + minify_whitespace: options.minify_whitespace + minify_identifiers: options.minify_identifiers + unsupported_cssf_eatures: options.unsupported_cssf_eatures + original_target_env: options.original_target_env + symbol_mode: symbol_mode + } + } +} + +pub fn (a &Options) equal(b &Options) bool { + if a.options_that_support_structural_equality != b.options_that_support_structural_equality { + return false + } + if a.css_prefix_data.len != b.css_prefix_data.len { + return false + } + for k, va in a.css_prefix_data { + mut vb, ok := b.css_prefix_data[k] + if !ok || va != vb { + return false + } + } + for k_1, _ in b.css_prefix_data { + _, ok_1 := b.css_prefix_data[k] + if !ok { + return false + } + } + return true +} + +pub fn parse(log logger.Log, source logger.Source, options Options) css_ast.AST { + mut result := css_lexer.tokenize(log, source, css_lexer.Options{ + record_all_comments: options.minify_identifiers + }) + mut p := Parser{ + log: log + source: source + tracker: logger.make_line_column_tracker(&source) + options: options + tokens: result.tokens + all_comments: result.all_comments + legal_comments: result.legal_comments + prev_error: logger.Loc{ + start: -1 + } + composes: map[ast.Ref]&css_ast.Composes{} + local_scope: map[string]ast.LocRef{} + global_scope: map[string]ast.LocRef{} + make_local_symbols: options.symbol_mode == symbol_mode_local + } + mut rules := p.parse_list_of_rules(RuleContext{ + is_top_level: true + parse_selectors: true + }) + p.expect(css_lexer.te_nd_of_file) + return css_ast.AST{ + rules: rules + char_freq: p.compute_character_frequency() + symbols: p.symbols + import_records: p.import_records + approximate_line_count: result.approximate_line_count + source_map_comment: result.source_map_comment + local_symbols: p.local_symbols + local_scope: p.local_scope + global_scope: p.global_scope + composes: p.composes + layers_pre_import: p.layers_pre_import + layers_post_import: p.layers_post_import + } +} + +// Compute a character frequency histogram for everything that's not a bound +// symbol. This is used to modify how minified names are generated for slightly +// better gzip compression. Even though it's a very small win, we still do it +// because it's simple to do and very cheap to compute. +fn (p &Parser) compute_character_frequency() &ast.CharFreq { + if !p.options.minify_identifiers { + return nil + } + mut char_freq := &ast.CharFreq{} + char_freq.scan(p.source.contents, 1) + for _, comment_range in p.all_comments { + char_freq.scan(p.source.text_for_range(comment_range), -1) + } + for _, record in p.import_records { + if !record.source_index.is_valid() { + char_freq.scan(record.path.text, -1) + } + } + for _, symbol in p.symbols { + if symbol.kind == ast.symbol_local_css { + char_freq.scan(symbol.original_name, -i32(symbol.use_count_estimate)) + } + } + return char_freq +} + +fn (p &Parser) advance() { + if p.index < p.tokens.len { + p.index++ + } +} + +fn (p &Parser) at(index isize) css_lexer.Token { + if index < p.tokens.len { + return p.tokens[index] + } + return css_lexer.Token{ + kind: css_lexer.te_nd_of_file + range: logger.Range{ + loc: logger.Loc{ + start: i32(p.source.contents.len) + } + } + } +} + +fn (p &Parser) current() css_lexer.Token { + return p.at(p.index) +} + +fn (p &Parser) next() css_lexer.Token { + return p.at(p.index + 1) +} + +fn (p &Parser) raw() string { + mut t := p.current() + return p.source.contents[t.range.loc.start..t.range.end()] +} + +fn (p &Parser) decoded() string { + return p.current().decoded_text(p.source.contents) +} + +fn (p &Parser) peek(kind css_lexer.T) bool { + return kind == p.current().kind +} + +fn (p &Parser) eat(kind css_lexer.T) bool { + if p.peek(kind) { + p.advance() + return true + } + return false +} + +fn (p &Parser) expect(kind css_lexer.T) bool { + return p.expect_with_matching_loc(kind, logger.Loc{ + start: -1 + }) +} + +fn (p &Parser) expect_with_matching_loc(kind css_lexer.T, matchingLoc logger.Loc) bool { + if p.eat(kind) { + return true + } + mut t := p.current() + if (t.flags & css_lexer.did_warn_about_single_line_comment) != 0 { + return false + } + mut text := 0 + mut suggestion := 0 + mut notes := []logger.MsgData{} + mut expected := kind.string() + if expected.has_prefix('"') && expected.has_suffix('"') { + suggestion = expected[1..expected.len - 1] + } + if (kind == css_lexer.ts_emicolon || kind == css_lexer.tc_olon) && p.index > 0 && p.at(p.index - 1).kind == css_lexer.tw_hitespace { + text = strconv.v_sprintf('Expected %s', expected) + t = p.at(p.index - 1) + } else if (kind == css_lexer.tc_lose_brace || kind == css_lexer.tc_lose_bracket + || kind == css_lexer.tc_lose_paren) && matching_loc.start != + -1 && isize(matching_loc.start) + 1 <= p.source.contents.len { + mut c := p.source.contents[matching_loc.start..matching_loc.start + 1] + text = strconv.v_sprintf('Expected %s to go with %q', expected, c) + notes << p.tracker.msg_data(logger.Range{ + loc: matching_loc + len: 1 + }, strconv.v_sprintf('The unbalanced %q is here:', c)) + } else { + match t.kind { + css_lexer.te_nd_of_file, css_lexer.tw_hitespace { + text = strconv.v_sprintf('Expected %s but found %s', expected, t.kind.string()) + t.range.len = isize(0) + } + css_lexer.tb_ad_url, css_lexer.tu_nterminated_string { + text = strconv.v_sprintf('Expected %s but found %s', expected, t.kind.string()) + } + else { + text = strconv.v_sprintf('Expected %s but found %q', expected, p.raw()) + } + } + } + if t.range.loc.start > p.prev_error.start { + mut data := p.tracker.msg_data(t.range, text) + data.location.suggestion = suggestion + p.log.add_msg_id(logger.msg_id_css_csss_yntax_error, logger.Msg{ + kind: logger.warning + data: data + notes: notes + }) + p.prev_error = t.range.loc + } + return false +} + +fn (p &Parser) unexpected() { + mut t := p.current() + if t.range.loc.start > p.prev_error.start && (t.flags & css_lexer.did_warn_about_single_line_comment) == 0 { + mut text := 0 + match t.kind { + css_lexer.te_nd_of_file, css_lexer.tw_hitespace { + text = strconv.v_sprintf('Unexpected %s', t.kind.string()) + t.range.len = isize(0) + } + css_lexer.tb_ad_url, css_lexer.tu_nterminated_string { + text = strconv.v_sprintf('Unexpected %s', t.kind.string()) + } + else { + text = strconv.v_sprintf('Unexpected %q', p.raw()) + } + } + p.log.add_id(logger.msg_id_css_csss_yntax_error, logger.warning, &p.tracker, t.range, + text) + p.prev_error = t.range.loc + } +} + +fn (p &Parser) symbol_for_name(loc logger.Loc, name string) ast.LocRef { + mut kind := 0 + mut scope := map[string]ast.LocRef{} + if p.make_local_symbols { + kind = ast.symbol_local_css + scope = p.local_scope + } else { + kind = ast.symbol_global_css + scope = p.global_scope + } + mut entry, ok := scope[name] + if !ok { + entry = ast.LocRef{ + loc: loc + ref: ast.Ref{ + source_index: p.source.index + inner_index: u32(p.symbols.len) + } + } + p.symbols << ast.Symbol{ + kind: kind + original_name: name + link: ast.invalid_ref + } + scope[name] = entry + if kind == ast.symbol_local_css { + p.local_symbols << entry + } + } + p.symbols[entry.ref.inner_index].use_count_estimate++ + return entry +} + +fn (p &Parser) record_at_layer_rule(layers [][]string) { + if p.anon_layer_count > 0 { + return + } + for _, layer in layers { + if p.enclosing_layer.len > 0 { + mut clone := []string{len: 0, cap: p.EnclosingLayer.len + layer.len} + layer << layer + } + p.layers_post_import << layer + } +} + +struct ruleContext { +pub mut: + is_top_level bool + parse_selectors bool +} + +fn (p &Parser) parse_list_of_rules(context ruleContext) []css_ast.Rule { + mut at_rule_context := AtRuleContext{} + if context.is_top_level { + at_rule_context.charset_validity = at_rule_valid + at_rule_context.import_validity = at_rule_valid + at_rule_context.is_top_level = true + } + mut rules := [] + { + } + mut did_find_at_import := false + mut did_find_at_import_1 := false + if p.options.minify_syntax { + rules = p.mangle_rules(rules, context.is_top_level) + } + return rules +} + +struct listOfDeclarationsOpts { +pub mut: + composes_context &ComposesContext = unsafe { nil } + can_inline_no_op_nesting bool +} + +fn (p &Parser) parse_list_of_declarations(opts listOfDeclarationsOpts) []css_ast.Rule { + list = [] + { + } + mut found_nesting := false + for { + match p.current().kind { + css_lexer.tw_hitespace, css_lexer.ts_emicolon { + p.advance() + } + css_lexer.te_nd_of_file, css_lexer.tc_lose_brace { + list = p.process_declarations(list, opts.composes_context) + if p.options.minify_syntax { + list = p.mangle_rules(list, false) + if opts.can_inline_no_op_nesting { + if found_nesting { + mut inlineDecls := []css_ast.Rule{} + mut n := isize(0) + for _, rule in list { + mut rule_1, ok := rule.data + if ok && rule.selectors.len == 1 { + mut sel := rule.selectors[0] + if sel.selectors.len == 1 && sel.selectors[0].is_single_ampersand() { + inline_decls << rule.rules + continue + } + } + list[n] = rule + n++ + } + list << inline_decls + } + } else { + } + } + return + } + css_lexer.ta_t_keyword { + if p.in_selector_subtree > 0 { + p.nesting_is_present = true + } + list << p.parse_at_rule(AtRuleContext{ + is_declaration_list: true + can_inline_no_op_nesting: opts.can_inline_no_op_nesting + }) + } + else { + mut scan, _ := p.scan_for_end_of_rule() + if scan == end_of_rule_open_brace { + p.nesting_is_present = true + found_nesting = true + mut rule_2 := p.parse_selector_rule(false, ParseSelectorOpts{ + is_declaration_context: true + composes_context: opts.composes_context + }) + mut sel_1, ok_1 := rule.data + if ok && sel.selectors.len == 1 { + mut first := sel.selectors[0] + if first.selectors.len == 1 { + mut first_1 := first.selectors[0] + if first.was_empty_from_local_or_global && first.is_single_ampersand() { + list << sel.rules + continue + } + } + } + list << rule + } else { + list << p.parse_declaration() + } + } + } + } +} + +fn (p &Parser) mangle_rules(rules []css_ast.Rule, isTopLevel bool) []css_ast.Rule { + mut mangled_rules := []css_ast.Rule{len: 0, cap: rules.len} + mut prevNonComment := 0 + if !is_top_level { + mut remover := make_duplicate_rule_mangler(ast.SymbolMap{}) + mangled_rules = remover.remove_duplicate_rules_in_place(p.source.index, mangled_rules, + p.import_records) + } + return mangled_rules +} + +struct ruleEntry { +pub mut: + data css_ast.R + call_counter u32 +} + +struct hashEntry { +pub mut: + rules []ruleEntry +} + +struct callEntry { +pub mut: + import_records []ast.ImportRecord + source_index u32 +} + +struct DuplicateRuleRemover { +pub mut: + entries map[Uint32]HashEntry + calls []callEntry + check css_ast.CrossFileEqualityCheck +} + +pub fn make_duplicate_rule_mangler(symbols ast.SymbolMap) DuplicateRuleRemover { + return DuplicateRuleRemover{ + entries: map[Uint32]HashEntry{} + check: css_ast.CrossFileEqualityCheck{ + symbols: symbols + } + } +} + +pub fn (remover &DuplicateRuleRemover) remove_duplicate_rules_in_place(sourceIndex u32, rules []css_ast.Rule, importRecords []ast.ImportRecord) []css_ast.Rule { + mut call_counter := u32(remover.calls.len) + remover.calls << CallEntry{import_records, source_index} + mut n := rules.len + mut start := n + mut start_1 := n + return rules[start..] +} + +// Reference: https://developer.mozilla.org/en-US/docs/Web/HTML/Element +__global nonDeprecatedElementsSupportedByIE7 = { + 'a': true + 'abbr': true + 'address': true + 'area': true + 'b': true + 'base': true + 'blockquote': true + 'body': true + 'br': true + 'button': true + 'caption': true + 'cite': true + 'code': true + 'col': true + 'colgroup': true + 'dd': true + 'del': true + 'dfn': true + 'div': true + 'dl': true + 'dt': true + 'em': true + 'embed': true + 'fieldset': true + 'form': true + 'h1': true + 'h2': true + 'h3': true + 'h4': true + 'h5': true + 'h6': true + 'head': true + 'hr': true + 'html': true + 'i': true + 'iframe': true + 'img': true + 'input': true + 'ins': true + 'kbd': true + 'label': true + 'legend': true + 'li': true + 'link': true + 'map': true + 'menu': true + 'meta': true + 'noscript': true + 'object': true + 'ol': true + 'optgroup': true + 'option': true + 'p': true + 'param': true + 'pre': true + 'q': true + 'ruby': true + 's': true + 'samp': true + 'script': true + 'select': true + 'small': true + 'span': true + 'strong': true + 'style': true + 'sub': true + 'sup': true + 'table': true + 'tbody': true + 'td': true + 'textarea': true + 'tfoot': true + 'th': true + 'thead': true + 'title': true + 'tr': true + 'u': true + 'ul': true + 'var': true +} +// This only returns true if all of these selectors are considered "safe" which +// means that they are very likely to work in any browser a user might reasonably +// be using. We do NOT want to merge adjacent qualified rules with the same body +// if any of the selectors are unsafe, since then browsers which don't support +// that particular feature would ignore the entire merged qualified rule: +// +// Input: +// a { color: red } +// b { color: red } +// input::-moz-placeholder { color: red } +// +// Valid output: +// a, b { color: red } +// input::-moz-placeholder { color: red } +// +// Invalid output: +// a, b, input::-moz-placeholder { color: red } +// +// This considers IE 7 and above to be a browser that a user could possibly use. +// Versions of IE less than 6 are not considered. +fn is_safe_selectors(complexSelectors []css_ast.ComplexSelector) bool { + for _, complex in complex_selectors { + for _, compound in complex.selectors { + if compound.has_nesting_selector() { + return false + } + if compound.combinator.byte != 0 { + return false + } + if compound.type_selector != nil { + if compound.type_selector.namespace_prefix != nil { + return false + } + if compound.type_selector.name.kind == css_lexer.ti_dent + && !non_deprecated_elements_supported_by_ie_7[compound.type_selector.name.text] { + return false + } + } + for _, ss in compound.subclass_selectors { + mut s := ss.data + match s { + css_ast.SSAttribute { + if s.matcher_modifier != 0 { + return false + } + } + css_ast.SSPseudoClass { + if s.args == nil && !s.is_element { + match s.name { + 'active', 'first-child', 'hover', 'link', 'visited' { + continue + } + } + } + return false + } + css_ast.SSPseudoClassWithSelectorList { + return false + } + } + } + } + } + return true +} + +fn (p &Parser) parse_urlo_r_string() (string, logger.Range, bool) { + mut t := p.current() + match t.kind { + css_lexer.ts_tring { + mut text := p.decoded() + p.advance() + return text, t.range, true + } + css_lexer.turl { + mut text_1 := p.decoded() + p.advance() + return text, t.range, true + } + css_lexer.tf_unction { + if p.decoded().equal_fold('url') { + mut matching_loc := logger.Loc{ + start: p.current().range.end() - 1 + } + mut i := p.index + 1 + for p.at(i).kind == css_lexer.tw_hitespace { + i++ + } + if p.at(i).kind == css_lexer.ts_tring { + mut string_index := i + i++ + for p.at(i).kind == css_lexer.tw_hitespace { + i++ + } + mut close := p.at(i).kind + if close == css_lexer.tc_lose_paren || close == css_lexer.te_nd_of_file { + mut t_1 := p.at(string_index) + mut text_2 := t.decoded_text(p.source.contents) + p.index = i + p.expect_with_matching_loc(css_lexer.tc_lose_paren, matching_loc) + return text, t.range, true + } + } + } + } + } + return '', logger.Range{}, false +} + +fn (p &Parser) expect_urlo_r_string() (string, logger.Range, bool) { + url, r, ok = p.parse_urlo_r_string() + if !ok { + p.expect(css_lexer.turl) + } + return +} + +type atRuleKind = u8 + +enum atRuleKind { + at_rule_unknown + at_rule_declarations + at_rule_inherit_context + at_rule_qualified_or_empty + at_rule_empty +} + +__global specialAtRules = { + 'media': at_rule_inherit_context + 'supports': at_rule_inherit_context + 'font-face': at_rule_declarations + 'page': at_rule_declarations + 'bottom-center': at_rule_declarations + 'bottom-left-corner': at_rule_declarations + 'bottom-left': at_rule_declarations + 'bottom-right-corner': at_rule_declarations + 'bottom-right': at_rule_declarations + 'left-bottom': at_rule_declarations + 'left-middle': at_rule_declarations + 'left-top': at_rule_declarations + 'right-bottom': at_rule_declarations + 'right-middle': at_rule_declarations + 'right-top': at_rule_declarations + 'top-center': at_rule_declarations + 'top-left-corner': at_rule_declarations + 'top-left': at_rule_declarations + 'top-right-corner': at_rule_declarations + 'top-right': at_rule_declarations + 'viewport': at_rule_declarations + '-ms-viewport': at_rule_declarations + 'document': at_rule_inherit_context + '-moz-document': at_rule_inherit_context + 'layer': at_rule_qualified_or_empty + 'scope': at_rule_inherit_context + 'font-palette-values': at_rule_declarations + 'counter-style': at_rule_declarations + 'font-feature-values': at_rule_declarations + 'annotation': at_rule_declarations + 'character-variant': at_rule_declarations + 'historical-forms': at_rule_declarations + 'ornaments': at_rule_declarations + 'styleset': at_rule_declarations + 'stylistic': at_rule_declarations + 'swash': at_rule_declarations + 'container': at_rule_inherit_context + 'starting-style': at_rule_inherit_context + 'position-try': at_rule_declarations +} +__global atKnownRuleCanBeRemovedIfEmpty = { + 'media': true + 'supports': true + 'font-face': true + 'page': true + 'bottom-center': true + 'bottom-left-corner': true + 'bottom-left': true + 'bottom-right-corner': true + 'bottom-right': true + 'left-bottom': true + 'left-middle': true + 'left-top': true + 'right-bottom': true + 'right-middle': true + 'right-top': true + 'top-center': true + 'top-left-corner': true + 'top-left': true + 'top-right-corner': true + 'top-right': true + 'scope': true + 'font-palette-values': true + 'container': true +} +type atRuleValidity = u8 + +enum atRuleValidity { + at_rule_invalid + at_rule_valid + at_rule_invalid_after +} + +struct atRuleContext { +pub mut: + after_loc logger.Loc + charset_validity atRuleValidity + import_validity atRuleValidity + can_inline_no_op_nesting bool + is_declaration_list bool + is_top_level bool +} + +fn (p &Parser) parse_at_rule(context atRuleContext) css_ast.Rule { + mut at_token := p.decoded() + mut at_range := p.current().range + mut lower_at_token := at_token.to_lower() + mut kind := special_at_rules[lower_at_token] + p.advance() + mut prelude_start := p.index + mut prelude_start_1 := p.index + mut prelude_start_2 := p.index + mut prelude := p.convert_tokens(p.tokens[prelude_start..p.index]) + mut block_start := p.index + match kind { + at_rule_empty { + p.expect(css_lexer.ts_emicolon) + p.parse_block(css_lexer.to_pen_brace, css_lexer.tc_lose_brace) + mut block := p.convert_tokens(p.tokens[block_start..p.index]) + return css_ast.Rule{ + loc: at_range.loc + data: &css_ast.RUnknownAt{ + at_token: at_token + prelude: prelude + block: block + } + } + } + at_rule_declarations { + mut matching_loc := p.current().range.loc + p.expect(css_lexer.to_pen_brace) + mut rules := p.parse_list_of_declarations(ListOfDeclarationsOpts{}) + mut close_brace_loc := p.current().range.loc + if !p.expect_with_matching_loc(css_lexer.tc_lose_brace, matching_loc) { + close_brace_loc = logger.Loc{} + } + if prelude.len == 1 && lower_at_token == 'counter-style' { + mut t := &prelude[0] + if t.kind == css_lexer.ti_dent { + t.kind = css_lexer.ts_ymbol + t.payload_index = p.symbol_for_name(t.loc, t.text).ref.inner_index + } + } + return css_ast.Rule{ + loc: at_range.loc + data: &css_ast.RKnownAt{ + at_token: at_token + prelude: prelude + rules: rules + close_brace_loc: close_brace_loc + } + } + } + at_rule_inherit_context { + mut matching_loc_1 := p.current().range.loc + p.expect(css_lexer.to_pen_brace) + mut is_at_media := lower_at_token == 'media' + if is_at_media { + p.enclosing_at_media << prelude + } + if context.is_declaration_list { + rules = p.parse_list_of_declarations(ListOfDeclarationsOpts{ + can_inline_no_op_nesting: context.can_inline_no_op_nesting + }) + } else { + rules = p.parse_list_of_rules(RuleContext{ + parse_selectors: true + }) + } + if is_at_media { + p.enclosing_at_media = p.enclosing_at_media[..p.enclosing_at_media.len - 1] + } + mut close_brace_loc_1 := p.current().range.loc + if !p.expect_with_matching_loc(css_lexer.tc_lose_brace, matching_loc) { + close_brace_loc = logger.Loc{} + } + if prelude.len >= 1 && lower_at_token == 'container' { + mut t_1 := &prelude[0] + if t.kind == css_lexer.ti_dent && t.text.to_lower() != 'not' { + t.kind = css_lexer.ts_ymbol + t.payload_index = p.symbol_for_name(t.loc, t.text).ref.inner_index + } + } + return css_ast.Rule{ + loc: at_range.loc + data: &css_ast.RKnownAt{ + at_token: at_token + prelude: prelude + rules: rules + close_brace_loc: close_brace_loc + } + } + } + at_rule_qualified_or_empty { + mut matching_loc_2 := p.current().range.loc + if p.eat(css_lexer.to_pen_brace) { + mut rules_1 := p.parse_list_of_rules(RuleContext{ + parse_selectors: true + }) + mut close_brace_loc_2 := p.current().range.loc + if !p.expect_with_matching_loc(css_lexer.tc_lose_brace, matching_loc) { + close_brace_loc = logger.Loc{} + } + return css_ast.Rule{ + loc: at_range.loc + data: &css_ast.RKnownAt{ + at_token: at_token + prelude: prelude + rules: rules + close_brace_loc: close_brace_loc + } + } + } + p.expect(css_lexer.ts_emicolon) + return css_ast.Rule{ + loc: at_range.loc + data: &css_ast.RKnownAt{ + at_token: at_token + prelude: prelude + } + } + } + else { + p.parse_block(css_lexer.to_pen_brace, css_lexer.tc_lose_brace) + mut block_1, _ := p.convert_tokens_helper(p.tokens[block_start..p.index], + css_lexer.te_nd_of_file, ConvertTokensOpts{ + allow_imports: true + }) + return css_ast.Rule{ + loc: at_range.loc + data: &css_ast.RUnknownAt{ + at_token: at_token + prelude: prelude + block: block + } + } + } + } +} + +fn (p &Parser) expect_valid_layer_name_ident() (string, bool) { + mut r := p.current().range + mut text := p.decoded() + if !p.expect(css_lexer.ti_dent) { + return '', false + } + match text { + 'initial', 'inherit', 'unset' { + p.log.add_id(logger.msg_id_css_invalid_at_layer, logger.warning, &p.tracker, + r, strconv.v_sprintf('%q cannot be used as a layer name', text)) + p.prev_error = r.loc + return '', false + } + } + return text, true +} + +fn (p &Parser) convert_tokens(tokens []css_lexer.Token) []css_ast.Token { + mut result, _ := p.convert_tokens_helper(tokens, css_lexer.te_nd_of_file, ConvertTokensOpts{}) + return result +} + +struct convertTokensOpts { +pub mut: + allow_imports bool + verbatim_whitespace bool + is_inside_calc_function bool +} + +fn (p &Parser) convert_tokens_helper(tokens []css_lexer.Token, close css_lexer.T, opts convertTokensOpts) ([]css_ast.Token, []css_lexer.Token) { + mut result := [] + { + } + mut nextWhitespace := 0 + if !opts.verbatim_whitespace { + for i, t in tokens { + if t.kind == css_lexer.tw_hitespace { + continue + } + if t.kind == css_lexer.ti_dent && t.decoded_text(p.source.contents).has_prefix('--') { + for _, t_1 in tokens[i + 1..] { + if t.kind == css_lexer.tw_hitespace { + continue + } + if t.kind == css_lexer.tc_olon { + opts.verbatim_whitespace = true + } + break + } + } + break + } + } + if !opts.verbatim_whitespace { + for i_1, t_2 in tokens { + if t.kind == css_lexer.tw_hitespace { + continue + } + if t.kind == css_lexer.ti_dent && t.decoded_text(p.source.contents).has_prefix('--') { + for _, t_3 in tokens[i + 1..] { + if t.kind == css_lexer.tw_hitespace { + continue + } + if t.kind == css_lexer.tc_olon { + opts.verbatim_whitespace = true + } + break + } + } + break + } + } + if !opts.verbatim_whitespace { + for i_2, _ in result { + mut token := &result[i] + if i == 0 { + token.whitespace &= ~css_ast.whitespace_before + } + if i + 1 == result.len { + token.whitespace &= ~css_ast.whitespace_after + } + match token.kind { + css_lexer.tc_omma { + token.whitespace &= ~css_ast.whitespace_before + if i > 0 { + result[i - 1].whitespace &= ~css_ast.whitespace_after + } + if p.options.minify_whitespace { + token.whitespace &= ~css_ast.whitespace_after + if i + 1 < result.len { + result[i + 1].whitespace &= ~css_ast.whitespace_before + } + } else { + token.whitespace |= css_ast.whitespace_after + if i + 1 < result.len { + result[i + 1].whitespace |= css_ast.whitespace_before + } + } + } + } + } + } + if opts.verbatim_whitespace && result.len == 0 && next_whitespace == css_ast.whitespace_before { + result << css_ast.Token{ + kind: css_lexer.tw_hitespace + } + } + return result, tokens +} + +fn shift_dot(text string, dotOffset isize) (string, bool) { + if text.contains_any('eE') { + return '', false + } + mut sign := '' + if text.len > 0 && (text[0] == `-` || text[0] == `+`) { + sign = text[..1] + text = text[1..] + } + mut dot := text.index_byte(`.`) + if dot == -1 { + dot = text.len + } else { + text = text[..dot] + text[dot + 1..] + } + dot += dot_offset + for text.len > 0 && dot > 0 && text[0] == `0` { + text = text[1..] + dot-- + } + for text.len > 0 && text.len > dot && text[text.len - 1] == `0` { + text = text[..text.len - 1] + } + if dot >= text.len { + mut trailing_zeros := '0'.repeat(dot - text.len) + return strconv.v_sprintf('%s%s%s', sign, text, trailing_zeros), true + } + if dot < 0 { + text = '0'.repeat(-dot) + text + dot = isize(0) + } + return strconv.v_sprintf('%s%s.%s', sign, text[..dot], text[dot..]), true +} + +fn mangle_dimension(value string, unit string) (string, string, bool) { + if unit.equal_fold('ms') { + mut shifted, ok := shift_dot(value, -3) + if ok && shifted.len + s_len < value.len + ms_len { + return shifted, 's', true + } + } + if unit.equal_fold('s') { + mut shifted_1, ok_1 := shift_dot(value, 3) + if ok && shifted.len + ms_len < value.len + s_len { + return shifted, 'ms', true + } + } + return '', '', false +} + +fn mangle_number(t string) (string, bool) { + mut original := t + mut dot := t.index_byte(`.`) + if dot != -1 { + for t.len > 0 && t[t.len - 1] == `0` { + t = t[..t.len - 1] + } + if dot + 1 == t.len { + t = t[..dot] + if t == '' || t == '+' || t == '-' { + t += '0' + } + } else { + if t.len >= 3 && t[0] == `0` && t[1] == `.` && t[2] >= `0` && t[2] <= `9` { + t = t[1..] + } else if t.len >= 4 && (t[0] == `+` || t[0] == `-`) && t[1] == `0` && t[2] == `.` && t[3] >= `0` && t[3] <= `9` { + t = t[0..1] + t[2..] + } + } + } + return t, t != original +} + +fn (p &Parser) parse_selector_rule(isTopLevel bool, opts parseSelectorOpts) css_ast.Rule { + mut local := p.make_local_symbols + mut prelude_start := p.index + mut list, ok := p.parse_selector_list(opts) + if ok { + mut can_inline_no_op_nesting := true + for _, sel in list { + if sel.uses_pseudo_element() { + can_inline_no_op_nesting = false + break + } + } + mut selector := css_ast.RSelector{ + selectors: list + } + mut matching_loc := p.current().range.loc + if p.expect(css_lexer.to_pen_brace) { + p.in_selector_subtree++ + mut decl_opts := ListOfDeclarationsOpts{ + can_inline_no_op_nesting: can_inline_no_op_nesting + } + if opts.composes_context != nil && list.len == 1 && list[0].selectors.len == 1 && list[0].selectors[0].is_single_ampersand() { + decl_opts.composes_context = opts.composes_context + } else { + mut composes_context := ComposesContext{ + parent_range: list[0].selectors[0].range() + } + if opts.composes_context != nil { + composes_context.problem_range = opts.composes_context.parent_range + } + for _, sel_1 in list { + mut first := sel.selectors[0] + if first.combinator.byte != 0 { + composes_context.problem_range = logger.Range{ + loc: first.combinator.loc + len: 1 + } + } else if first.type_selector != nil { + composes_context.problem_range = first.type_selector.range() + } else if first.nesting_selector_loc.is_valid() { + composes_context.problem_range = logger.Range{ + loc: logger.Loc{ + start: i32(first.nesting_selector_loc.get_index()) + } + len: 1 + } + } else { + for i, ss in first.subclass_selectors { + mut class, ok_1 := ss.data + if i > 0 || !ok { + composes_context.problem_range = ss.range + } else { + composes_context.parent_refs << class.name.ref + } + } + } + if composes_context.problem_range.len > 0 { + break + } + if sel.selectors.len > 1 { + composes_context.problem_range = sel.selectors[1].range() + break + } + } + decl_opts.composes_context = &composes_context + } + selector.rules = p.parse_list_of_declarations(decl_opts) + p.in_selector_subtree-- + mut close_brace_loc := p.current().range.loc + if p.expect_with_matching_loc(css_lexer.tc_lose_brace, matching_loc) { + selector.close_brace_loc = close_brace_loc + } + p.make_local_symbols = local + return css_ast.Rule{ + loc: p.tokens[prelude_start].range.loc + data: &selector + } + } + } + p.make_local_symbols = local + p.index = prelude_start + return p.parse_qualified_rule(ParseQualifiedRuleOpts{ + is_already_invalid: true + is_top_level: is_top_level + is_declaration_context: opts.is_declaration_context + }) +} + +struct parseQualifiedRuleOpts { +pub mut: + is_already_invalid bool + is_top_level bool + is_declaration_context bool +} + +fn (p &Parser) parse_qualified_rule(opts parseQualifiedRuleOpts) css_ast.Rule { + mut prelude_start := p.index + mut prelude_loc := p.current().range.loc + mut prelude_loc_1 := p.current().range.loc + mut qualified := css_ast.RQualified{ + prelude: p.convert_tokens(p.tokens[prelude_start..p.index]) + } + mut matching_loc := p.current().range.loc + if p.eat(css_lexer.to_pen_brace) { + qualified.rules = p.parse_list_of_declarations(ListOfDeclarationsOpts{}) + mut close_brace_loc := p.current().range.loc + if p.expect_with_matching_loc(css_lexer.tc_lose_brace, matching_loc) { + qualified.close_brace_loc = close_brace_loc + } + } else if !opts.is_already_invalid { + p.expect(css_lexer.to_pen_brace) + } + return css_ast.Rule{ + loc: prelude_loc + data: &qualified + } +} + +type endOfRuleScan = u8 + +enum endOfRuleScan { + end_of_rule_unknown + end_of_rule_semicolon + end_of_rule_open_brace +} + +// Note: This was a late change to the CSS nesting syntax. +// See also: https://github.com/w3c/csswg-drafts/issues/7961 +fn (p &Parser) scan_for_end_of_rule() (endOfRuleScan, isize) { + mut initialStack := []css_lexer.T{} + mut stack := initial_stack[..0] + for i, t in p.tokens[p.index..] { + match t.kind { + css_lexer.ts_emicolon { + if stack.len == 0 { + return end_of_rule_semicolon, p.index + i + } + } + css_lexer.tf_unction, css_lexer.to_pen_paren { + stack << css_lexer.tc_lose_paren + } + css_lexer.to_pen_bracket { + stack << css_lexer.tc_lose_bracket + } + css_lexer.to_pen_brace { + if stack.len == 0 { + return end_of_rule_open_brace, p.index + i + } + stack << css_lexer.tc_lose_brace + } + css_lexer.tc_lose_paren, css_lexer.tc_lose_bracket { + mut n := stack.len + if n > 0 && t.kind == stack[n - 1] { + stack = stack[..n - 1] + } + } + css_lexer.tc_lose_brace { + mut n_1 := stack.len + if n > 0 && t.kind == stack[n - 1] { + stack = stack[..n - 1] + } else { + return end_of_rule_unknown, -1 + } + } + } + } + return end_of_rule_unknown, -1 +} + +fn (p &Parser) parse_declaration() css_ast.Rule { + mut key_start := p.index + mut key_range := p.tokens[key_start].range + mut key_is_ident := p.expect(css_lexer.ti_dent) + mut ok := false + if key_is_ident { + p.eat(css_lexer.tw_hitespace) + ok = p.eat(css_lexer.tc_olon) + } + mut value_start := p.index + mut value_start_1 := p.index + if !ok { + if key_is_ident { + mut end := key_range.end() + if end > p.prev_error.start { + p.prev_error.start = end + mut data := p.tracker.msg_data(logger.Range{ + loc: logger.Loc{ + start: end + } + }, 'Expected ":"') + data.location.suggestion = ':' + p.log.add_msg_id(logger.msg_id_css_csss_yntax_error, logger.Msg{ + kind: logger.warning + data: data + }) + } + } + return css_ast.Rule{ + loc: key_range.loc + data: &css_ast.RBadDeclaration{ + tokens: p.convert_tokens(p.tokens[key_start..p.index]) + } + } + } + mut key_token := p.tokens[key_start] + mut key_text := key_token.decoded_text(p.source.contents) + mut value := p.tokens[value_start..p.index] + mut verbatim_whitespace := key_text.has_prefix('--') + mut important := false + mut i := value.len - 1 + if i >= 0 && value[i].kind == css_lexer.tw_hitespace { + i-- + } + if i >= 0 && value[i].kind == css_lexer.ti_dent && value[i].decoded_text(p.source.contents).equal_fold('important') { + i-- + if i >= 0 && value[i].kind == css_lexer.tw_hitespace { + i-- + } + if i >= 0 && value[i].kind == css_lexer.td_elim_exclamation { + value = value[..i] + important = true + } + } + mut result, _ := p.convert_tokens_helper(value, css_lexer.te_nd_of_file, ConvertTokensOpts{ + allow_imports: true + verbatim_whitespace: verbatim_whitespace + }) + if !verbatim_whitespace && result.len > 0 { + if p.options.minify_whitespace { + result[0].whitespace &= ~css_ast.whitespace_before + } else { + result[0].whitespace |= css_ast.whitespace_before + } + } + mut lower_key_text := key_text.to_lower() + mut key := css_ast.known_declarations[lower_key_text] + if key == css_ast.du_nknown { + mut corrected, ok_1 := css_ast.maybe_correct_declaration_typo(lower_key_text) + if ok { + mut data_1 := p.tracker.msg_data(key_token.range, strconv.v_sprintf('%q is not a known CSS property', + key_text)) + data.location.suggestion = corrected + p.log.add_msg_id(logger.msg_id_css_unsupported_cssp_roperty, logger.Msg{ + kind: logger.warning + data: data + notes: [// UNHANDLED CompositeLit type InvalidExpr strtyp="Expr(InvalidExpr{})" + ] + }) + } + } + return css_ast.Rule{ + loc: key_range.loc + data: &css_ast.RDeclaration{ + key: key + key_text: key_text + key_range: key_token.range + value: result + important: important + } + } +} + +fn (p &Parser) parse_component_value() { + match p.current().kind { + css_lexer.tf_unction { + p.parse_block(css_lexer.tf_unction, css_lexer.tc_lose_paren) + } + css_lexer.to_pen_paren { + p.parse_block(css_lexer.to_pen_paren, css_lexer.tc_lose_paren) + } + css_lexer.to_pen_brace { + p.parse_block(css_lexer.to_pen_brace, css_lexer.tc_lose_brace) + } + css_lexer.to_pen_bracket { + p.parse_block(css_lexer.to_pen_bracket, css_lexer.tc_lose_bracket) + } + css_lexer.te_nd_of_file { + p.unexpected() + } + else { + p.advance() + } + } +} + +fn (p &Parser) parse_block(open css_lexer.T, close css_lexer.T) { + mut current := p.current() + mut matching_start := current.range.end() - 1 + if p.expect(open) { + for !p.eat(close) { + if p.peek(css_lexer.te_nd_of_file) { + p.expect_with_matching_loc(close, logger.Loc{ + start: matching_start + }) + return + } + p.parse_component_value() + } + } +} diff --git a/complex_tests/esbuild/js_parser/js_parser.go b/complex_tests/esbuild/js_parser/js_parser.go new file mode 100644 index 0000000..45daca9 --- /dev/null +++ b/complex_tests/esbuild/js_parser/js_parser.go @@ -0,0 +1,18021 @@ +package js_parser + +import ( + "fmt" + "math" + "regexp" + "sort" + "strings" + "unicode/utf8" + + "github.com/evanw/esbuild/internal/ast" + "github.com/evanw/esbuild/internal/compat" + "github.com/evanw/esbuild/internal/config" + "github.com/evanw/esbuild/internal/helpers" + "github.com/evanw/esbuild/internal/js_ast" + "github.com/evanw/esbuild/internal/js_lexer" + "github.com/evanw/esbuild/internal/logger" + "github.com/evanw/esbuild/internal/renamer" + "github.com/evanw/esbuild/internal/runtime" +) + +// This parser does two passes: +// +// 1. Parse the source into an AST, create the scope tree, and declare symbols. +// +// 2. Visit each node in the AST, bind identifiers to declared symbols, do +// constant folding, substitute compile-time variable definitions, and +// lower certain syntactic constructs as appropriate given the language +// target. +// +// So many things have been put in so few passes because we want to minimize +// the number of full-tree passes to improve performance. However, we need +// to have at least two separate passes to handle variable hoisting. See the +// comment about scopesInOrder below for more information. +type parser struct { + options Options + log logger.Log + source logger.Source + tracker logger.LineColumnTracker + fnOrArrowDataParse fnOrArrowDataParse + fnOnlyDataVisit fnOnlyDataVisit + allocatedNames []string + currentScope *js_ast.Scope + scopesForCurrentPart []*js_ast.Scope + symbols []ast.Symbol + astHelpers js_ast.HelperContext + tsUseCounts []uint32 + injectedDefineSymbols []ast.Ref + injectedSymbolSources map[ast.Ref]injectedSymbolSource + injectedDotNames map[string][]injectedDotName + dropLabelsMap map[string]struct{} + exprComments map[logger.Loc][]string + mangledProps map[string]ast.Ref + reservedProps map[string]bool + symbolUses map[ast.Ref]js_ast.SymbolUse + importSymbolPropertyUses map[ast.Ref]map[string]js_ast.SymbolUse + symbolCallUses map[ast.Ref]js_ast.SymbolCallUse + declaredSymbols []js_ast.DeclaredSymbol + globPatternImports []globPatternImport + runtimeImports map[string]ast.LocRef + duplicateCaseChecker duplicateCaseChecker + unrepresentableIdentifiers map[string]bool + legacyOctalLiterals map[js_ast.E]logger.Range + scopesInOrderForEnum map[logger.Loc][]scopeOrder + binaryExprStack []binaryExprVisitor + + // For strict mode handling + hoistedRefForSloppyModeBlockFn map[ast.Ref]ast.Ref + + // For lowering private methods + privateGetters map[ast.Ref]ast.Ref + privateSetters map[ast.Ref]ast.Ref + + // These are for TypeScript + // + // We build up enough information about the TypeScript namespace hierarchy to + // be able to resolve scope lookups and property accesses for TypeScript enum + // and namespace features. Each JavaScript scope object inside a namespace + // has a reference to a map of exported namespace members from sibling scopes. + // + // In addition, there is a map from each relevant symbol reference to the data + // associated with that namespace or namespace member: "refToTSNamespaceMemberData". + // This gives enough info to be able to resolve queries into the namespace. + // + // When visiting expressions, namespace metadata is associated with the most + // recently visited node. If namespace metadata is present, "tsNamespaceTarget" + // will be set to the most recently visited node (as a way to mark that this + // node has metadata) and "tsNamespaceMemberData" will be set to the metadata. + refToTSNamespaceMemberData map[ast.Ref]js_ast.TSNamespaceMemberData + tsNamespaceTarget js_ast.E + tsNamespaceMemberData js_ast.TSNamespaceMemberData + emittedNamespaceVars map[ast.Ref]bool + isExportedInsideNamespace map[ast.Ref]ast.Ref + localTypeNames map[string]bool + tsEnums map[ast.Ref]map[string]js_ast.TSEnumValue + constValues map[ast.Ref]js_ast.ConstValue + propDerivedCtorValue js_ast.E + propMethodDecoratorScope *js_ast.Scope + + // This is the reference to the generated function argument for the namespace, + // which is different than the reference to the namespace itself: + // + // namespace ns { + // } + // + // The code above is transformed into something like this: + // + // var ns1; + // (function(ns2) { + // })(ns1 || (ns1 = {})); + // + // This variable is "ns2" not "ns1". It is only used during the second + // "visit" pass. + enclosingNamespaceArgRef *ast.Ref + + // Imports (both ES6 and CommonJS) are tracked at the top level + importRecords []ast.ImportRecord + importRecordsForCurrentPart []uint32 + exportStarImportRecords []uint32 + + // These are for handling ES6 imports and exports + importItemsForNamespace map[ast.Ref]namespaceImportItems + isImportItem map[ast.Ref]bool + namedImports map[ast.Ref]js_ast.NamedImport + namedExports map[string]js_ast.NamedExport + topLevelSymbolToParts map[ast.Ref][]uint32 + importNamespaceCCMap map[importNamespaceCall]bool + + // The parser does two passes and we need to pass the scope tree information + // from the first pass to the second pass. That's done by tracking the calls + // to pushScopeForParsePass() and popScope() during the first pass in + // scopesInOrder. + // + // Then, when the second pass calls pushScopeForVisitPass() and popScope(), + // we consume entries from scopesInOrder and make sure they are in the same + // order. This way the second pass can efficiently use the same scope tree + // as the first pass without having to attach the scope tree to the AST. + // + // We need to split this into two passes because the pass that declares the + // symbols must be separate from the pass that binds identifiers to declared + // symbols to handle declaring a hoisted "var" symbol in a nested scope and + // binding a name to it in a parent or sibling scope. + scopesInOrder []scopeOrder + + // These propagate the name from the parent context into an anonymous child + // expression. For example: + // + // let foo = function() {} + // assert.strictEqual(foo.name, 'foo') + // + nameToKeep string + nameToKeepIsFor js_ast.E + + // These properties are for the visit pass, which runs after the parse pass. + // The visit pass binds identifiers to declared symbols, does constant + // folding, substitutes compile-time variable definitions, and lowers certain + // syntactic constructs as appropriate. + stmtExprValue js_ast.E + callTarget js_ast.E + dotOrIndexTarget js_ast.E + templateTag js_ast.E + deleteTarget js_ast.E + loopBody js_ast.S + suspiciousLogicalOperatorInsideArrow js_ast.E + moduleScope *js_ast.Scope + + // This is internal-only data used for the implementation of Yarn PnP + manifestForYarnPnP js_ast.Expr + stringLocalsForYarnPnP map[ast.Ref]stringLocalForYarnPnP + + // This helps recognize the "await import()" pattern. When this is present, + // warnings about non-string import paths will be omitted inside try blocks. + awaitTarget js_ast.E + + // This helps recognize the "import().catch()" pattern. We also try to avoid + // warning about this just like the "try { await import() }" pattern. + thenCatchChain thenCatchChain + + // When bundling, hoisted top-level local variables declared with "var" in + // nested scopes are moved up to be declared in the top-level scope instead. + // The old "var" statements are turned into regular assignments instead. This + // makes it easier to quickly scan the top-level statements for "var" locals + // with the guarantee that all will be found. + relocatedTopLevelVars []ast.LocRef + + // We need to lower private names such as "#foo" if they are used in a brand + // check such as "#foo in x" even if the private name syntax would otherwise + // be supported. This is because private names are a newly-added feature. + // + // However, this parser operates in only two passes for speed. The first pass + // parses things and declares variables, and the second pass lowers things and + // resolves references to declared variables. So the existence of a "#foo in x" + // expression for a specific "#foo" cannot be used to decide to lower "#foo" + // because it's too late by that point. There may be another expression such + // as "x.#foo" before that point and that must be lowered as well even though + // it has already been visited. + // + // Instead what we do is track just the names of fields used in private brand + // checks during the first pass. This tracks the names themselves, not symbol + // references. Then, during the second pass when we are about to enter into + // a class, we conservatively decide to lower all private names in that class + // which are used in a brand check anywhere in the file. + lowerAllOfThesePrivateNames map[string]bool + + // Temporary variables used for lowering + tempLetsToDeclare []ast.Ref + tempRefsToDeclare []tempRef + topLevelTempRefsToDeclare []tempRef + + lexer js_lexer.Lexer + + // Private field access in a decorator lowers all private fields in that class + parseExperimentalDecoratorNesting int + + // Temporary variables used for lowering + tempRefCount int + topLevelTempRefCount int + + // We need to scan over the source contents to recover the line and column offsets + jsxSourceLoc int + jsxSourceLine int + jsxSourceColumn int + + exportsRef ast.Ref + requireRef ast.Ref + moduleRef ast.Ref + importMetaRef ast.Ref + promiseRef ast.Ref + regExpRef ast.Ref + superCtorRef ast.Ref + + // Imports from "react/jsx-runtime" and "react", respectively. + // (Or whatever was specified in the "importSource" option) + jsxRuntimeImports map[string]ast.LocRef + jsxLegacyImports map[string]ast.LocRef + + // For lowering private methods + weakMapRef ast.Ref + weakSetRef ast.Ref + + esmImportStatementKeyword logger.Range + esmImportMeta logger.Range + esmExportKeyword logger.Range + enclosingClassKeyword logger.Range + topLevelAwaitKeyword logger.Range + liveTopLevelAwaitKeyword logger.Range + + latestArrowArgLoc logger.Loc + forbidSuffixAfterAsLoc logger.Loc + firstJSXElementLoc logger.Loc + + fnOrArrowDataVisit fnOrArrowDataVisit + + // ArrowFunction is a special case in the grammar. Although it appears to be + // a PrimaryExpression, it's actually an AssignmentExpression. This means if + // a AssignmentExpression ends up producing an ArrowFunction then nothing can + // come after it other than the comma operator, since the comma operator is + // the only thing above AssignmentExpression under the Expression rule: + // + // AssignmentExpression: + // ArrowFunction + // ConditionalExpression + // LeftHandSideExpression = AssignmentExpression + // LeftHandSideExpression AssignmentOperator AssignmentExpression + // + // Expression: + // AssignmentExpression + // Expression , AssignmentExpression + // + afterArrowBodyLoc logger.Loc + + // Setting this to true disables warnings about code that is very likely to + // be a bug. This is used to ignore issues inside "node_modules" directories. + // This has caught real issues in the past. However, it's not esbuild's job + // to find bugs in other libraries, and these warnings are problematic for + // people using these libraries with esbuild. The only fix is to either + // disable all esbuild warnings and not get warnings about your own code, or + // to try to get the warning fixed in the affected library. This is + // especially annoying if the warning is a false positive as was the case in + // https://github.com/firebase/firebase-js-sdk/issues/3814. So these warnings + // are now disabled for code inside "node_modules" directories. + suppressWarningsAboutWeirdCode bool + + // A file is considered to be an ECMAScript module if it has any of the + // features of one (e.g. the "export" keyword), otherwise it's considered + // a CommonJS module. + // + // However, we have a single exception: a file where the only ESM feature + // is the "import" keyword is allowed to have CommonJS exports. This feature + // is necessary to be able to synchronously import ESM code into CommonJS, + // which we need to enable in a few important cases. Some examples are: + // our runtime code, injected files (the "inject" feature is ESM-only), + // and certain automatically-generated virtual modules from plugins. + isFileConsideredToHaveESMExports bool // Use only for export-related stuff + isFileConsideredESM bool // Use for all other stuff + + // Inside a TypeScript namespace, an "export declare" statement can be used + // to cause a namespace to be emitted even though it has no other observable + // effect. This flag is used to implement this feature. + // + // Specifically, namespaces should be generated for all of the following + // namespaces below except for "f", which should not be generated: + // + // namespace a { export declare const a } + // namespace b { export declare let [[b]] } + // namespace c { export declare function c() } + // namespace d { export declare class d {} } + // namespace e { export declare enum e {} } + // namespace f { export declare namespace f {} } + // + // The TypeScript compiler compiles this into the following code (notice "f" + // is missing): + // + // var a; (function (a_1) {})(a || (a = {})); + // var b; (function (b_1) {})(b || (b = {})); + // var c; (function (c_1) {})(c || (c = {})); + // var d; (function (d_1) {})(d || (d = {})); + // var e; (function (e_1) {})(e || (e = {})); + // + // Note that this should not be implemented by declaring symbols for "export + // declare" statements because the TypeScript compiler doesn't generate any + // code for these statements, so these statements are actually references to + // global variables. There is one exception, which is that local variables + // *should* be declared as symbols because they are replaced with. This seems + // like very arbitrary behavior but it's what the TypeScript compiler does, + // so we try to match it. + // + // Specifically, in the following code below "a" and "b" should be declared + // and should be substituted with "ns.a" and "ns.b" but the other symbols + // shouldn't. References to the other symbols actually refer to global + // variables instead of to symbols that are exported from the namespace. + // This is the case as of TypeScript 4.3. I assume this is a TypeScript bug: + // + // namespace ns { + // export declare const a + // export declare let [[b]] + // export declare function c() + // export declare class d { } + // export declare enum e { } + // console.log(a, b, c, d, e) + // } + // + // The TypeScript compiler compiles this into the following code: + // + // var ns; + // (function (ns) { + // console.log(ns.a, ns.b, c, d, e); + // })(ns || (ns = {})); + // + // Relevant issue: https://github.com/evanw/esbuild/issues/1158 + hasNonLocalExportDeclareInsideNamespace bool + + // When this flag is enabled, we attempt to fold all expressions that + // TypeScript would consider to be "constant expressions". This flag is + // enabled inside each enum body block since TypeScript requires numeric + // constant folding in enum definitions. + // + // We also enable this flag in certain cases in JavaScript files such as when + // parsing "const" declarations at the top of a non-ESM file, but we still + // reuse TypeScript's notion of "constant expressions" for our own convenience. + // + // As of TypeScript 5.0, a "constant expression" is defined as follows: + // + // An expression is considered a constant expression if it is + // + // * a number or string literal, + // * a unary +, -, or ~ applied to a numeric constant expression, + // * a binary +, -, *, /, %, **, <<, >>, >>>, |, &, ^ applied to two numeric constant expressions, + // * a binary + applied to two constant expressions whereof at least one is a string, + // * a template expression where each substitution expression is a constant expression, + // * a parenthesized constant expression, + // * a dotted name (e.g. x.y.z) that references a const variable with a constant expression initializer and no type annotation, + // * a dotted name that references an enum member with an enum literal type, or + // * a dotted name indexed by a string literal (e.g. x.y["z"]) that references an enum member with an enum literal type. + // + // More detail: https://github.com/microsoft/TypeScript/pull/50528. Note that + // we don't implement certain items in this list. For example, we don't do all + // number-to-string conversions since ours might differ from how JavaScript + // would do it, which would be a correctness issue. + shouldFoldTypeScriptConstantExpressions bool + + allowIn bool + allowPrivateIdentifiers bool + hasTopLevelReturn bool + latestReturnHadSemicolon bool + messageAboutThisIsUndefined bool + isControlFlowDead bool + + // If this is true, then all top-level statements are wrapped in a try/catch + willWrapModuleInTryCatchForUsing bool +} + +type globPatternImport struct { + assertOrWith *ast.ImportAssertOrWith + parts []helpers.GlobPart + name string + approximateRange logger.Range + ref ast.Ref + kind ast.ImportKind +} + +type namespaceImportItems struct { + entries map[string]ast.LocRef + importRecordIndex uint32 +} + +type stringLocalForYarnPnP struct { + value []uint16 + loc logger.Loc +} + +type injectedSymbolSource struct { + source logger.Source + loc logger.Loc +} + +type injectedDotName struct { + parts []string + injectedDefineIndex uint32 +} + +type importNamespaceCallKind uint8 + +const ( + exprKindCall importNamespaceCallKind = iota + exprKindNew + exprKindJSXTag +) + +type importNamespaceCall struct { + ref ast.Ref + kind importNamespaceCallKind +} + +type thenCatchChain struct { + nextTarget js_ast.E + catchLoc logger.Loc + hasMultipleArgs bool + hasCatch bool +} + +// This is used as part of an incremental build cache key. Some of these values +// can potentially change between builds if they are derived from nearby +// "package.json" or "tsconfig.json" files that were changed since the last +// build. +type Options struct { + injectedFiles []config.InjectedFile + jsx config.JSXOptions + tsAlwaysStrict *config.TSAlwaysStrict + mangleProps *regexp.Regexp + reserveProps *regexp.Regexp + dropLabels []string + + // This pointer will always be different for each build but the contents + // shouldn't ever behave different semantically. We ignore this field for the + // equality comparison. + defines *config.ProcessedDefines + + // This is an embedded struct. Always access these directly instead of off + // the name "optionsThatSupportStructuralEquality". This is only grouped like + // this to make the equality comparison easier and safer (and hopefully faster). + optionsThatSupportStructuralEquality +} + +type optionsThatSupportStructuralEquality struct { + originalTargetEnv string + moduleTypeData js_ast.ModuleTypeData + unsupportedJSFeatures compat.JSFeature + unsupportedJSFeatureOverrides compat.JSFeature + unsupportedJSFeatureOverridesMask compat.JSFeature + + // Byte-sized values go here (gathered together here to keep this object compact) + ts config.TSOptions + mode config.Mode + platform config.Platform + outputFormat config.Format + asciiOnly bool + keepNames bool + minifySyntax bool + minifyIdentifiers bool + minifyWhitespace bool + omitRuntimeForTests bool + omitJSXRuntimeForTests bool + ignoreDCEAnnotations bool + treeShaking bool + dropDebugger bool + mangleQuoted bool + + // This is an internal-only option used for the implementation of Yarn PnP + decodeHydrateRuntimeStateYarnPnP bool +} + +func OptionsForYarnPnP() Options { + return Options{ + optionsThatSupportStructuralEquality: optionsThatSupportStructuralEquality{ + decodeHydrateRuntimeStateYarnPnP: true, + }, + } +} + +func OptionsFromConfig(options *config.Options) Options { + return Options{ + injectedFiles: options.InjectedFiles, + jsx: options.JSX, + defines: options.Defines, + tsAlwaysStrict: options.TSAlwaysStrict, + mangleProps: options.MangleProps, + reserveProps: options.ReserveProps, + dropLabels: options.DropLabels, + + optionsThatSupportStructuralEquality: optionsThatSupportStructuralEquality{ + unsupportedJSFeatures: options.UnsupportedJSFeatures, + unsupportedJSFeatureOverrides: options.UnsupportedJSFeatureOverrides, + unsupportedJSFeatureOverridesMask: options.UnsupportedJSFeatureOverridesMask, + originalTargetEnv: options.OriginalTargetEnv, + ts: options.TS, + mode: options.Mode, + platform: options.Platform, + outputFormat: options.OutputFormat, + moduleTypeData: options.ModuleTypeData, + asciiOnly: options.ASCIIOnly, + keepNames: options.KeepNames, + minifySyntax: options.MinifySyntax, + minifyIdentifiers: options.MinifyIdentifiers, + minifyWhitespace: options.MinifyWhitespace, + omitRuntimeForTests: options.OmitRuntimeForTests, + omitJSXRuntimeForTests: options.OmitJSXRuntimeForTests, + ignoreDCEAnnotations: options.IgnoreDCEAnnotations, + treeShaking: options.TreeShaking, + dropDebugger: options.DropDebugger, + mangleQuoted: options.MangleQuoted, + }, + } +} + +func (a *Options) Equal(b *Options) bool { + // Compare "optionsThatSupportStructuralEquality" + if a.optionsThatSupportStructuralEquality != b.optionsThatSupportStructuralEquality { + return false + } + + // Compare "tsAlwaysStrict" + if (a.tsAlwaysStrict == nil && b.tsAlwaysStrict != nil) || (a.tsAlwaysStrict != nil && b.tsAlwaysStrict == nil) || + (a.tsAlwaysStrict != nil && b.tsAlwaysStrict != nil && *a.tsAlwaysStrict != *b.tsAlwaysStrict) { + return false + } + + // Compare "mangleProps" and "reserveProps" + if !isSameRegexp(a.mangleProps, b.mangleProps) || !isSameRegexp(a.reserveProps, b.reserveProps) { + return false + } + + // Compare "dropLabels" + if !helpers.StringArraysEqual(a.dropLabels, b.dropLabels) { + return false + } + + // Compare "injectedFiles" + if len(a.injectedFiles) != len(b.injectedFiles) { + return false + } + for i, x := range a.injectedFiles { + y := b.injectedFiles[i] + if x.Source != y.Source || x.DefineName != y.DefineName || len(x.Exports) != len(y.Exports) { + return false + } + for j := range x.Exports { + if x.Exports[j] != y.Exports[j] { + return false + } + } + } + + // Compare "jsx" + if a.jsx.Parse != b.jsx.Parse || !jsxExprsEqual(a.jsx.Factory, b.jsx.Factory) || !jsxExprsEqual(a.jsx.Fragment, b.jsx.Fragment) { + return false + } + + // Do a cheap assert that the defines object hasn't changed + if (a.defines != nil || b.defines != nil) && (a.defines == nil || b.defines == nil || + len(a.defines.IdentifierDefines) != len(b.defines.IdentifierDefines) || + len(a.defines.DotDefines) != len(b.defines.DotDefines)) { + panic("Internal error") + } + + return true +} + +func isSameRegexp(a *regexp.Regexp, b *regexp.Regexp) bool { + if a == nil { + return b == nil + } else { + return b != nil && a.String() == b.String() + } +} + +func jsxExprsEqual(a config.DefineExpr, b config.DefineExpr) bool { + if !helpers.StringArraysEqual(a.Parts, b.Parts) { + return false + } + + if a.Constant != nil { + if b.Constant == nil || !js_ast.ValuesLookTheSame(a.Constant, b.Constant) { + return false + } + } else if b.Constant != nil { + return false + } + + return true +} + +type tempRef struct { + valueOrNil js_ast.Expr + ref ast.Ref +} + +const ( + locModuleScope = -1 +) + +type scopeOrder struct { + scope *js_ast.Scope + loc logger.Loc +} + +type awaitOrYield uint8 + +const ( + // The keyword is used as an identifier, not a special expression + allowIdent awaitOrYield = iota + + // Declaring the identifier is forbidden, and the keyword is used as a special expression + allowExpr + + // Declaring the identifier is forbidden, and using the identifier is also forbidden + forbidAll +) + +// This is function-specific information used during parsing. It is saved and +// restored on the call stack around code that parses nested functions and +// arrow expressions. +type fnOrArrowDataParse struct { + arrowArgErrors *deferredArrowArgErrors + decoratorScope *js_ast.Scope + asyncRange logger.Range + needsAsyncLoc logger.Loc + await awaitOrYield + yield awaitOrYield + allowSuperCall bool + allowSuperProperty bool + isTopLevel bool + isConstructor bool + isTypeScriptDeclare bool + isThisDisallowed bool + isReturnDisallowed bool + + // In TypeScript, forward declarations of functions have no bodies + allowMissingBodyForTypeScript bool +} + +// This is function-specific information used during visiting. It is saved and +// restored on the call stack around code that parses nested functions and +// arrow expressions. +type fnOrArrowDataVisit struct { + // This is used to silence unresolvable imports due to "require" calls inside + // a try/catch statement. The assumption is that the try/catch statement is + // there to handle the case where the reference to "require" crashes. + tryBodyCount int32 + tryCatchLoc logger.Loc + + isArrow bool + isAsync bool + isGenerator bool + isInsideLoop bool + isInsideSwitch bool + isDerivedClassCtor bool + isOutsideFnOrArrow bool + shouldLowerSuperPropertyAccess bool +} + +// This is function-specific information used during visiting. It is saved and +// restored on the call stack around code that parses nested functions (but not +// nested arrow functions). +type fnOnlyDataVisit struct { + // This is a reference to the magic "arguments" variable that exists inside + // functions in JavaScript. It will be non-nil inside functions and nil + // otherwise. + argumentsRef *ast.Ref + + // Arrow functions don't capture the value of "this" and "arguments". Instead, + // the values are inherited from the surrounding context. If arrow functions + // are turned into regular functions due to lowering, we will need to generate + // local variables to capture these values so they are preserved correctly. + thisCaptureRef *ast.Ref + argumentsCaptureRef *ast.Ref + + // If true, we're inside a static class context where "this" expressions + // should be replaced with the class name. + shouldReplaceThisWithInnerClassNameRef bool + + // This is true if "this" is equal to the class name. It's true if we're in a + // static class field initializer, a static class method, or a static class + // block. + isInStaticClassContext bool + + // This is a reference to the enclosing class name if there is one. It's used + // to implement "this" and "super" references. A name is automatically generated + // if one is missing so this will always be present inside a class body. + innerClassNameRef *ast.Ref + + // If we're inside an async arrow function and async functions are not + // supported, then we will have to convert that arrow function to a generator + // function. That means references to "arguments" inside the arrow function + // will have to reference a captured variable instead of the real variable. + isInsideAsyncArrowFn bool + + // If false, disallow "new.target" expressions. We disallow all "new.target" + // expressions at the top-level of the file (i.e. not inside a function or + // a class field). Technically since CommonJS files are wrapped in a function + // you can use "new.target" in node as an alias for "undefined" but we don't + // support that. + isNewTargetAllowed bool + + // If false, the value for "this" is the top-level module scope "this" value. + // That means it's "undefined" for ECMAScript modules and "exports" for + // CommonJS modules. We track this information so that we can substitute the + // correct value for these top-level "this" references at compile time instead + // of passing the "this" expression through to the output and leaving the + // interpretation up to the run-time behavior of the generated code. + // + // If true, the value for "this" is nested inside something (either a function + // or a class declaration). That means the top-level module scope "this" value + // has been shadowed and is now inaccessible. + isThisNested bool + + // Do not warn about "this" being undefined for code that the TypeScript + // compiler generates that looks like this: + // + // var __rest = (this && this.__rest) || function (s, e) { + // ... + // }; + // + silenceMessageAboutThisBeingUndefined bool +} + +const bloomFilterSize = 251 + +type duplicateCaseValue struct { + value js_ast.Expr + hash uint32 +} + +type duplicateCaseChecker struct { + cases []duplicateCaseValue + bloomFilter [(bloomFilterSize + 7) / 8]byte +} + +func (dc *duplicateCaseChecker) reset() { + // Preserve capacity + dc.cases = dc.cases[:0] + + // This should be optimized by the compiler. See this for more information: + // https://github.com/golang/go/issues/5373 + bytes := dc.bloomFilter + for i := range bytes { + bytes[i] = 0 + } +} + +func (dc *duplicateCaseChecker) check(p *parser, expr js_ast.Expr) { + if hash, ok := duplicateCaseHash(expr); ok { + bucket := hash % bloomFilterSize + entry := &dc.bloomFilter[bucket/8] + mask := byte(1) << (bucket % 8) + + // Check for collisions + if (*entry & mask) != 0 { + for _, c := range dc.cases { + if c.hash == hash { + if equals, couldBeIncorrect := duplicateCaseEquals(c.value, expr); equals { + var laterRange logger.Range + var earlierRange logger.Range + if _, ok := expr.Data.(*js_ast.EString); ok { + laterRange = p.source.RangeOfString(expr.Loc) + } else { + laterRange = p.source.RangeOfOperatorBefore(expr.Loc, "case") + } + if _, ok := c.value.Data.(*js_ast.EString); ok { + earlierRange = p.source.RangeOfString(c.value.Loc) + } else { + earlierRange = p.source.RangeOfOperatorBefore(c.value.Loc, "case") + } + text := "This case clause will never be evaluated because it duplicates an earlier case clause" + if couldBeIncorrect { + text = "This case clause may never be evaluated because it likely duplicates an earlier case clause" + } + kind := logger.Warning + if p.suppressWarningsAboutWeirdCode { + kind = logger.Debug + } + p.log.AddIDWithNotes(logger.MsgID_JS_DuplicateCase, kind, &p.tracker, laterRange, text, + []logger.MsgData{p.tracker.MsgData(earlierRange, "The earlier case clause is here:")}) + } + return + } + } + } + + *entry |= mask + dc.cases = append(dc.cases, duplicateCaseValue{hash: hash, value: expr}) + } +} + +func duplicateCaseHash(expr js_ast.Expr) (uint32, bool) { + switch e := expr.Data.(type) { + case *js_ast.EInlinedEnum: + return duplicateCaseHash(e.Value) + + case *js_ast.ENull: + return 0, true + + case *js_ast.EUndefined: + return 1, true + + case *js_ast.EBoolean: + if e.Value { + return helpers.HashCombine(2, 1), true + } + return helpers.HashCombine(2, 0), true + + case *js_ast.ENumber: + bits := math.Float64bits(e.Value) + return helpers.HashCombine(helpers.HashCombine(3, uint32(bits)), uint32(bits>>32)), true + + case *js_ast.EString: + hash := uint32(4) + for _, c := range e.Value { + hash = helpers.HashCombine(hash, uint32(c)) + } + return hash, true + + case *js_ast.EBigInt: + hash := uint32(5) + for _, c := range e.Value { + hash = helpers.HashCombine(hash, uint32(c)) + } + return hash, true + + case *js_ast.EIdentifier: + return helpers.HashCombine(6, e.Ref.InnerIndex), true + + case *js_ast.EDot: + if target, ok := duplicateCaseHash(e.Target); ok { + return helpers.HashCombineString(helpers.HashCombine(7, target), e.Name), true + } + + case *js_ast.EIndex: + if target, ok := duplicateCaseHash(e.Target); ok { + if index, ok := duplicateCaseHash(e.Index); ok { + return helpers.HashCombine(helpers.HashCombine(8, target), index), true + } + } + } + + return 0, false +} + +func duplicateCaseEquals(left js_ast.Expr, right js_ast.Expr) (equals bool, couldBeIncorrect bool) { + if b, ok := right.Data.(*js_ast.EInlinedEnum); ok { + return duplicateCaseEquals(left, b.Value) + } + + switch a := left.Data.(type) { + case *js_ast.EInlinedEnum: + return duplicateCaseEquals(a.Value, right) + + case *js_ast.ENull: + _, ok := right.Data.(*js_ast.ENull) + return ok, false + + case *js_ast.EUndefined: + _, ok := right.Data.(*js_ast.EUndefined) + return ok, false + + case *js_ast.EBoolean: + b, ok := right.Data.(*js_ast.EBoolean) + return ok && a.Value == b.Value, false + + case *js_ast.ENumber: + b, ok := right.Data.(*js_ast.ENumber) + return ok && a.Value == b.Value, false + + case *js_ast.EString: + b, ok := right.Data.(*js_ast.EString) + return ok && helpers.UTF16EqualsUTF16(a.Value, b.Value), false + + case *js_ast.EBigInt: + if b, ok := right.Data.(*js_ast.EBigInt); ok { + equal, ok := js_ast.CheckEqualityBigInt(a.Value, b.Value) + return ok && equal, false + } + + case *js_ast.EIdentifier: + b, ok := right.Data.(*js_ast.EIdentifier) + return ok && a.Ref == b.Ref, false + + case *js_ast.EDot: + if b, ok := right.Data.(*js_ast.EDot); ok && a.OptionalChain == b.OptionalChain && a.Name == b.Name { + equals, _ := duplicateCaseEquals(a.Target, b.Target) + return equals, true + } + + case *js_ast.EIndex: + if b, ok := right.Data.(*js_ast.EIndex); ok && a.OptionalChain == b.OptionalChain { + if equals, _ := duplicateCaseEquals(a.Index, b.Index); equals { + equals, _ := duplicateCaseEquals(a.Target, b.Target) + return equals, true + } + } + } + + return false, false +} + +type duplicatePropertiesIn uint8 + +const ( + duplicatePropertiesInObject duplicatePropertiesIn = iota + duplicatePropertiesInClass +) + +func (p *parser) warnAboutDuplicateProperties(properties []js_ast.Property, in duplicatePropertiesIn) { + if len(properties) < 2 { + return + } + + type keyKind uint8 + type existingKey struct { + loc logger.Loc + kind keyKind + } + const ( + keyMissing keyKind = iota + keyNormal + keyGet + keySet + keyGetAndSet + ) + instanceKeys := make(map[string]existingKey) + staticKeys := make(map[string]existingKey) + + for _, property := range properties { + if property.Kind != js_ast.PropertySpread { + if str, ok := property.Key.Data.(*js_ast.EString); ok { + var keys map[string]existingKey + if property.Flags.Has(js_ast.PropertyIsStatic) { + keys = staticKeys + } else { + keys = instanceKeys + } + key := helpers.UTF16ToString(str.Value) + prevKey := keys[key] + nextKey := existingKey{kind: keyNormal, loc: property.Key.Loc} + + if property.Kind == js_ast.PropertyGetter { + nextKey.kind = keyGet + } else if property.Kind == js_ast.PropertySetter { + nextKey.kind = keySet + } + + if prevKey.kind != keyMissing && (in != duplicatePropertiesInObject || key != "__proto__") && (in != duplicatePropertiesInClass || key != "constructor") { + if (prevKey.kind == keyGet && nextKey.kind == keySet) || (prevKey.kind == keySet && nextKey.kind == keyGet) { + nextKey.kind = keyGetAndSet + } else { + var id logger.MsgID + var what string + var where string + switch in { + case duplicatePropertiesInObject: + id = logger.MsgID_JS_DuplicateObjectKey + what = "key" + where = "object literal" + case duplicatePropertiesInClass: + id = logger.MsgID_JS_DuplicateClassMember + what = "member" + where = "class body" + } + r := js_lexer.RangeOfIdentifier(p.source, property.Key.Loc) + p.log.AddIDWithNotes(id, logger.Warning, &p.tracker, r, + fmt.Sprintf("Duplicate %s %q in %s", what, key, where), + []logger.MsgData{p.tracker.MsgData(js_lexer.RangeOfIdentifier(p.source, prevKey.loc), + fmt.Sprintf("The original %s %q is here:", what, key))}) + } + } + + keys[key] = nextKey + } + } + } +} + +func isJumpStatement(data js_ast.S) bool { + switch data.(type) { + case *js_ast.SBreak, *js_ast.SContinue, *js_ast.SReturn, *js_ast.SThrow: + return true + } + + return false +} + +func jumpStmtsLookTheSame(left js_ast.S, right js_ast.S) bool { + switch a := left.(type) { + case *js_ast.SBreak: + b, ok := right.(*js_ast.SBreak) + return ok && (a.Label == nil) == (b.Label == nil) && (a.Label == nil || a.Label.Ref == b.Label.Ref) + + case *js_ast.SContinue: + b, ok := right.(*js_ast.SContinue) + return ok && (a.Label == nil) == (b.Label == nil) && (a.Label == nil || a.Label.Ref == b.Label.Ref) + + case *js_ast.SReturn: + b, ok := right.(*js_ast.SReturn) + return ok && (a.ValueOrNil.Data == nil) == (b.ValueOrNil.Data == nil) && + (a.ValueOrNil.Data == nil || js_ast.ValuesLookTheSame(a.ValueOrNil.Data, b.ValueOrNil.Data)) + + case *js_ast.SThrow: + b, ok := right.(*js_ast.SThrow) + return ok && js_ast.ValuesLookTheSame(a.Value.Data, b.Value.Data) + } + + return false +} + +func (p *parser) selectLocalKind(kind js_ast.LocalKind) js_ast.LocalKind { + // Use "var" instead of "let" and "const" if the variable declaration may + // need to be separated from the initializer. This allows us to safely move + // this declaration into a nested scope. + if p.currentScope.Parent == nil && (kind == js_ast.LocalLet || kind == js_ast.LocalConst) && + (p.options.mode == config.ModeBundle || p.willWrapModuleInTryCatchForUsing) { + return js_ast.LocalVar + } + + // Optimization: use "let" instead of "const" because it's shorter. This is + // only done when bundling because assigning to "const" is only an error when + // bundling. + if p.options.mode == config.ModeBundle && kind == js_ast.LocalConst && p.options.minifySyntax { + return js_ast.LocalLet + } + + return kind +} + +func (p *parser) pushScopeForParsePass(kind js_ast.ScopeKind, loc logger.Loc) int { + parent := p.currentScope + scope := &js_ast.Scope{ + Kind: kind, + Parent: parent, + Members: make(map[string]js_ast.ScopeMember), + Label: ast.LocRef{Ref: ast.InvalidRef}, + } + if parent != nil { + parent.Children = append(parent.Children, scope) + scope.StrictMode = parent.StrictMode + scope.UseStrictLoc = parent.UseStrictLoc + } + p.currentScope = scope + + // Enforce that scope locations are strictly increasing to help catch bugs + // where the pushed scopes are mismatched between the first and second passes + if len(p.scopesInOrder) > 0 { + prevStart := p.scopesInOrder[len(p.scopesInOrder)-1].loc.Start + if prevStart >= loc.Start { + panic(fmt.Sprintf("Scope location %d must be greater than %d", loc.Start, prevStart)) + } + } + + // Copy down function arguments into the function body scope. That way we get + // errors if a statement in the function body tries to re-declare any of the + // arguments. + if kind == js_ast.ScopeFunctionBody { + if scope.Parent.Kind != js_ast.ScopeFunctionArgs { + panic("Internal error") + } + for name, member := range scope.Parent.Members { + // Don't copy down the optional function expression name. Re-declaring + // the name of a function expression is allowed. + kind := p.symbols[member.Ref.InnerIndex].Kind + if kind != ast.SymbolHoistedFunction { + scope.Members[name] = member + } + } + } + + // Remember the length in case we call popAndDiscardScope() later + scopeIndex := len(p.scopesInOrder) + p.scopesInOrder = append(p.scopesInOrder, scopeOrder{loc: loc, scope: scope}) + return scopeIndex +} + +func (p *parser) popScope() { + // We cannot rename anything inside a scope containing a direct eval() call + if p.currentScope.ContainsDirectEval { + for _, member := range p.currentScope.Members { + // Using direct eval when bundling is not a good idea in general because + // esbuild must assume that it can potentially reach anything in any of + // the containing scopes. We try to make it work but this isn't possible + // in some cases. + // + // For example, symbols imported using an ESM import are a live binding + // to the underlying symbol in another file. This is emulated during + // scope hoisting by erasing the ESM import and just referencing the + // underlying symbol in the flattened bundle directly. However, that + // symbol may have a different name which could break uses of direct + // eval: + // + // // Before bundling + // import { foo as bar } from './foo.js' + // console.log(eval('bar')) + // + // // After bundling + // let foo = 123 // The contents of "foo.js" + // console.log(eval('bar')) + // + // There really isn't any way to fix this. You can't just rename "foo" to + // "bar" in the example above because there may be a third bundled file + // that also contains direct eval and imports the same symbol with a + // different conflicting import alias. And there is no way to store a + // live binding to the underlying symbol in a variable with the import's + // name so that direct eval can access it: + // + // // After bundling + // let foo = 123 // The contents of "foo.js" + // const bar = /* cannot express a live binding to "foo" here */ + // console.log(eval('bar')) + // + // Technically a "with" statement could potentially make this work (with + // a big hit to performance), but they are deprecated and are unavailable + // in strict mode. This is a non-starter since all ESM code is strict mode. + // + // So while we still try to obey the requirement that all symbol names are + // pinned when direct eval is present, we make an exception for top-level + // symbols in an ESM file when bundling is enabled. We make no guarantee + // that "eval" will be able to reach these symbols and we allow them to be + // renamed or removed by tree shaking. + if p.options.mode == config.ModeBundle && p.currentScope.Parent == nil && p.isFileConsideredESM { + continue + } + + p.symbols[member.Ref.InnerIndex].Flags |= ast.MustNotBeRenamed + } + } + + p.currentScope = p.currentScope.Parent +} + +func (p *parser) popAndDiscardScope(scopeIndex int) { + // Unwind any newly-added scopes in reverse order + for i := len(p.scopesInOrder) - 1; i >= scopeIndex; i-- { + scope := p.scopesInOrder[i].scope + parent := scope.Parent + last := len(parent.Children) - 1 + if parent.Children[last] != scope { + panic("Internal error") + } + parent.Children = parent.Children[:last] + } + + // Move up to the parent scope + p.currentScope = p.currentScope.Parent + + // Truncate the scope order where we started to pretend we never saw this scope + p.scopesInOrder = p.scopesInOrder[:scopeIndex] +} + +func (p *parser) popAndFlattenScope(scopeIndex int) { + // Move up to the parent scope + toFlatten := p.currentScope + parent := toFlatten.Parent + p.currentScope = parent + + // Erase this scope from the order. This will shift over the indices of all + // the scopes that were created after us. However, we shouldn't have to + // worry about other code with outstanding scope indices for these scopes. + // These scopes were all created in between this scope's push and pop + // operations, so they should all be child scopes and should all be popped + // by the time we get here. + copy(p.scopesInOrder[scopeIndex:], p.scopesInOrder[scopeIndex+1:]) + p.scopesInOrder = p.scopesInOrder[:len(p.scopesInOrder)-1] + + // Remove the last child from the parent scope + last := len(parent.Children) - 1 + if parent.Children[last] != toFlatten { + panic("Internal error") + } + parent.Children = parent.Children[:last] + + // Reparent our child scopes into our parent + for _, scope := range toFlatten.Children { + scope.Parent = parent + parent.Children = append(parent.Children, scope) + } +} + +// Undo all scopes pushed and popped after this scope index. This assumes that +// the scope stack is at the same level now as it was at the given scope index. +func (p *parser) discardScopesUpTo(scopeIndex int) { + // Remove any direct children from their parent + children := p.currentScope.Children + for _, child := range p.scopesInOrder[scopeIndex:] { + if child.scope.Parent == p.currentScope { + for i := len(children) - 1; i >= 0; i-- { + if children[i] == child.scope { + children = append(children[:i], children[i+1:]...) + break + } + } + } + } + p.currentScope.Children = children + + // Truncate the scope order where we started to pretend we never saw this scope + p.scopesInOrder = p.scopesInOrder[:scopeIndex] +} + +func (p *parser) newSymbol(kind ast.SymbolKind, name string) ast.Ref { + ref := ast.Ref{SourceIndex: p.source.Index, InnerIndex: uint32(len(p.symbols))} + p.symbols = append(p.symbols, ast.Symbol{ + Kind: kind, + OriginalName: name, + Link: ast.InvalidRef, + }) + if p.options.ts.Parse { + p.tsUseCounts = append(p.tsUseCounts, 0) + } + return ref +} + +// This is similar to "ast.MergeSymbols" but it works with this parser's +// one-level symbol map instead of the linker's two-level symbol map. It also +// doesn't handle cycles since they shouldn't come up due to the way this +// function is used. +func (p *parser) mergeSymbols(old ast.Ref, new ast.Ref) ast.Ref { + if old == new { + return new + } + + oldSymbol := &p.symbols[old.InnerIndex] + if oldSymbol.Link != ast.InvalidRef { + oldSymbol.Link = p.mergeSymbols(oldSymbol.Link, new) + return oldSymbol.Link + } + + newSymbol := &p.symbols[new.InnerIndex] + if newSymbol.Link != ast.InvalidRef { + newSymbol.Link = p.mergeSymbols(old, newSymbol.Link) + return newSymbol.Link + } + + oldSymbol.Link = new + newSymbol.MergeContentsWith(oldSymbol) + return new +} + +type mergeResult int + +const ( + mergeForbidden = iota + mergeReplaceWithNew + mergeOverwriteWithNew + mergeKeepExisting + mergeBecomePrivateGetSetPair + mergeBecomePrivateStaticGetSetPair +) + +func (p *parser) canMergeSymbols(scope *js_ast.Scope, existing ast.SymbolKind, new ast.SymbolKind) mergeResult { + if existing == ast.SymbolUnbound { + return mergeReplaceWithNew + } + + // In TypeScript, imports are allowed to silently collide with symbols within + // the module. Presumably this is because the imports may be type-only: + // + // import {Foo} from 'bar' + // class Foo {} + // + if p.options.ts.Parse && existing == ast.SymbolImport { + return mergeReplaceWithNew + } + + // "enum Foo {} enum Foo {}" + if new == ast.SymbolTSEnum && existing == ast.SymbolTSEnum { + return mergeKeepExisting + } + + // "namespace Foo { ... } enum Foo {}" + if new == ast.SymbolTSEnum && existing == ast.SymbolTSNamespace { + return mergeReplaceWithNew + } + + // "namespace Foo { ... } namespace Foo { ... }" + // "function Foo() {} namespace Foo { ... }" + // "enum Foo {} namespace Foo { ... }" + if new == ast.SymbolTSNamespace { + switch existing { + case ast.SymbolTSNamespace, ast.SymbolHoistedFunction, ast.SymbolGeneratorOrAsyncFunction, ast.SymbolTSEnum, ast.SymbolClass: + return mergeKeepExisting + } + } + + // "var foo; var foo;" + // "var foo; function foo() {}" + // "function foo() {} var foo;" + // "function *foo() {} function *foo() {}" but not "{ function *foo() {} function *foo() {} }" + if new.IsHoistedOrFunction() && existing.IsHoistedOrFunction() && + (scope.Kind == js_ast.ScopeEntry || + scope.Kind == js_ast.ScopeFunctionBody || + scope.Kind == js_ast.ScopeFunctionArgs || + (new == existing && new.IsHoisted())) { + return mergeReplaceWithNew + } + + // "get #foo() {} set #foo() {}" + // "set #foo() {} get #foo() {}" + if (existing == ast.SymbolPrivateGet && new == ast.SymbolPrivateSet) || + (existing == ast.SymbolPrivateSet && new == ast.SymbolPrivateGet) { + return mergeBecomePrivateGetSetPair + } + if (existing == ast.SymbolPrivateStaticGet && new == ast.SymbolPrivateStaticSet) || + (existing == ast.SymbolPrivateStaticSet && new == ast.SymbolPrivateStaticGet) { + return mergeBecomePrivateStaticGetSetPair + } + + // "try {} catch (e) { var e }" + if existing == ast.SymbolCatchIdentifier && new == ast.SymbolHoisted { + return mergeReplaceWithNew + } + + // "function() { var arguments }" + if existing == ast.SymbolArguments && new == ast.SymbolHoisted { + return mergeKeepExisting + } + + // "function() { let arguments }" + if existing == ast.SymbolArguments && new != ast.SymbolHoisted { + return mergeOverwriteWithNew + } + + return mergeForbidden +} + +func (p *parser) addSymbolAlreadyDeclaredError(name string, newLoc logger.Loc, oldLoc logger.Loc) { + p.log.AddErrorWithNotes(&p.tracker, + js_lexer.RangeOfIdentifier(p.source, newLoc), + fmt.Sprintf("The symbol %q has already been declared", name), + + []logger.MsgData{p.tracker.MsgData( + js_lexer.RangeOfIdentifier(p.source, oldLoc), + fmt.Sprintf("The symbol %q was originally declared here:", name), + )}, + ) +} + +func (p *parser) declareSymbol(kind ast.SymbolKind, loc logger.Loc, name string) ast.Ref { + p.checkForUnrepresentableIdentifier(loc, name) + + // Allocate a new symbol + ref := p.newSymbol(kind, name) + + // Check for a collision in the declaring scope + if existing, ok := p.currentScope.Members[name]; ok { + symbol := &p.symbols[existing.Ref.InnerIndex] + + switch p.canMergeSymbols(p.currentScope, symbol.Kind, kind) { + case mergeForbidden: + p.addSymbolAlreadyDeclaredError(name, loc, existing.Loc) + return existing.Ref + + case mergeKeepExisting: + ref = existing.Ref + + case mergeReplaceWithNew: + symbol.Link = ref + p.currentScope.Replaced = append(p.currentScope.Replaced, existing) + + // If these are both functions, remove the overwritten declaration + if p.options.minifySyntax && kind.IsFunction() && symbol.Kind.IsFunction() { + symbol.Flags |= ast.RemoveOverwrittenFunctionDeclaration + } + + case mergeBecomePrivateGetSetPair: + ref = existing.Ref + symbol.Kind = ast.SymbolPrivateGetSetPair + + case mergeBecomePrivateStaticGetSetPair: + ref = existing.Ref + symbol.Kind = ast.SymbolPrivateStaticGetSetPair + + case mergeOverwriteWithNew: + } + } + + // Overwrite this name in the declaring scope + p.currentScope.Members[name] = js_ast.ScopeMember{Ref: ref, Loc: loc} + return ref + +} + +// This type is just so we can use Go's native sort function +type scopeMemberArray []js_ast.ScopeMember + +func (a scopeMemberArray) Len() int { return len(a) } +func (a scopeMemberArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } + +func (a scopeMemberArray) Less(i int, j int) bool { + ai := a[i].Ref + bj := a[j].Ref + return ai.InnerIndex < bj.InnerIndex || (ai.InnerIndex == bj.InnerIndex && ai.SourceIndex < bj.SourceIndex) +} + +func (p *parser) hoistSymbols(scope *js_ast.Scope) { + // Duplicate function declarations are forbidden in nested blocks in strict + // mode. Separately, they are also forbidden at the top-level of modules. + // This check needs to be delayed until now instead of being done when the + // functions are declared because we potentially need to scan the whole file + // to know if the file is considered to be in strict mode (or is considered + // to be a module). We might only encounter an "export {}" clause at the end + // of the file. + if (scope.StrictMode != js_ast.SloppyMode && scope.Kind == js_ast.ScopeBlock) || (scope.Parent == nil && p.isFileConsideredESM) { + for _, replaced := range scope.Replaced { + symbol := &p.symbols[replaced.Ref.InnerIndex] + if symbol.Kind.IsFunction() { + if member, ok := scope.Members[symbol.OriginalName]; ok && p.symbols[member.Ref.InnerIndex].Kind.IsFunction() { + var notes []logger.MsgData + if scope.Parent == nil && p.isFileConsideredESM { + _, notes = p.whyESModule() + notes[0].Text = fmt.Sprintf("Duplicate top-level function declarations are not allowed in an ECMAScript module. %s", notes[0].Text) + } else { + var where string + where, notes = p.whyStrictMode(scope) + notes[0].Text = fmt.Sprintf("Duplicate function declarations are not allowed in nested blocks %s. %s", where, notes[0].Text) + } + + p.log.AddErrorWithNotes(&p.tracker, + js_lexer.RangeOfIdentifier(p.source, member.Loc), + fmt.Sprintf("The symbol %q has already been declared", symbol.OriginalName), + + append([]logger.MsgData{p.tracker.MsgData( + js_lexer.RangeOfIdentifier(p.source, replaced.Loc), + fmt.Sprintf("The symbol %q was originally declared here:", symbol.OriginalName), + )}, notes...), + ) + } + } + } + } + + if !scope.Kind.StopsHoisting() { + // We create new symbols in the loop below, so the iteration order of the + // loop must be deterministic to avoid generating different minified names + sortedMembers := make(scopeMemberArray, 0, len(scope.Members)) + for _, member := range scope.Members { + sortedMembers = append(sortedMembers, member) + } + sort.Sort(sortedMembers) + + nextMember: + for _, member := range sortedMembers { + symbol := &p.symbols[member.Ref.InnerIndex] + + // Handle non-hoisted collisions between catch bindings and the catch body. + // This implements "B.3.4 VariableStatements in Catch Blocks" from Annex B + // of the ECMAScript standard version 6+ (except for the hoisted case, which + // is handled later on below): + // + // * It is a Syntax Error if any element of the BoundNames of CatchParameter + // also occurs in the LexicallyDeclaredNames of Block. + // + // * It is a Syntax Error if any element of the BoundNames of CatchParameter + // also occurs in the VarDeclaredNames of Block unless CatchParameter is + // CatchParameter : BindingIdentifier . + // + if scope.Parent.Kind == js_ast.ScopeCatchBinding && symbol.Kind != ast.SymbolHoisted { + if existingMember, ok := scope.Parent.Members[symbol.OriginalName]; ok { + p.addSymbolAlreadyDeclaredError(symbol.OriginalName, member.Loc, existingMember.Loc) + continue + } + } + + if !symbol.Kind.IsHoisted() { + continue + } + + // Implement "Block-Level Function Declarations Web Legacy Compatibility + // Semantics" from Annex B of the ECMAScript standard version 6+ + isSloppyModeBlockLevelFnStmt := false + originalMemberRef := member.Ref + if symbol.Kind == ast.SymbolHoistedFunction { + // Block-level function declarations behave like "let" in strict mode + if scope.StrictMode != js_ast.SloppyMode { + continue + } + + // In sloppy mode, block level functions behave like "let" except with + // an assignment to "var", sort of. This code: + // + // if (x) { + // f(); + // function f() {} + // } + // f(); + // + // behaves like this code: + // + // if (x) { + // let f2 = function() {} + // var f = f2; + // f2(); + // } + // f(); + // + hoistedRef := p.newSymbol(ast.SymbolHoisted, symbol.OriginalName) + scope.Generated = append(scope.Generated, hoistedRef) + if p.hoistedRefForSloppyModeBlockFn == nil { + p.hoistedRefForSloppyModeBlockFn = make(map[ast.Ref]ast.Ref) + } + p.hoistedRefForSloppyModeBlockFn[member.Ref] = hoistedRef + symbol = &p.symbols[hoistedRef.InnerIndex] + member.Ref = hoistedRef + isSloppyModeBlockLevelFnStmt = true + } + + // Check for collisions that would prevent to hoisting "var" symbols up to the enclosing function scope + s := scope.Parent + for { + // Variable declarations hoisted past a "with" statement may actually end + // up overwriting a property on the target of the "with" statement instead + // of initializing the variable. We must not rename them or we risk + // causing a behavior change. + // + // var obj = { foo: 1 } + // with (obj) { var foo = 2 } + // assert(foo === undefined) + // assert(obj.foo === 2) + // + if s.Kind == js_ast.ScopeWith { + symbol.Flags |= ast.MustNotBeRenamed + } + + if existingMember, ok := s.Members[symbol.OriginalName]; ok { + existingSymbol := &p.symbols[existingMember.Ref.InnerIndex] + + // We can hoist the symbol from the child scope into the symbol in + // this scope if: + // + // - The symbol is unbound (i.e. a global variable access) + // - The symbol is also another hoisted variable + // - The symbol is a function of any kind and we're in a function or module scope + // + // Is this unbound (i.e. a global access) or also hoisted? + if existingSymbol.Kind == ast.SymbolUnbound || existingSymbol.Kind == ast.SymbolHoisted || + (existingSymbol.Kind.IsFunction() && (s.Kind == js_ast.ScopeEntry || s.Kind == js_ast.ScopeFunctionBody)) { + // Silently merge this symbol into the existing symbol + symbol.Link = existingMember.Ref + s.Members[symbol.OriginalName] = existingMember + continue nextMember + } + + // Otherwise if this isn't a catch identifier or "arguments", it's a collision + if existingSymbol.Kind != ast.SymbolCatchIdentifier && existingSymbol.Kind != ast.SymbolArguments { + // An identifier binding from a catch statement and a function + // declaration can both silently shadow another hoisted symbol + if symbol.Kind != ast.SymbolCatchIdentifier && symbol.Kind != ast.SymbolHoistedFunction { + if !isSloppyModeBlockLevelFnStmt { + p.addSymbolAlreadyDeclaredError(symbol.OriginalName, member.Loc, existingMember.Loc) + } else if s == scope.Parent { + // Never mind about this, turns out it's not needed after all + delete(p.hoistedRefForSloppyModeBlockFn, originalMemberRef) + } + } + continue nextMember + } + + // If this is a catch identifier, silently merge the existing symbol + // into this symbol but continue hoisting past this catch scope + existingSymbol.Link = member.Ref + s.Members[symbol.OriginalName] = member + } + + if s.Kind.StopsHoisting() { + // Declare the member in the scope that stopped the hoisting + s.Members[symbol.OriginalName] = member + break + } + s = s.Parent + } + } + } + + for _, child := range scope.Children { + p.hoistSymbols(child) + } +} + +func (p *parser) declareBinding(kind ast.SymbolKind, binding js_ast.Binding, opts parseStmtOpts) { + js_ast.ForEachIdentifierBinding(binding, func(loc logger.Loc, b *js_ast.BIdentifier) { + if !opts.isTypeScriptDeclare || (opts.isNamespaceScope && opts.isExport) { + b.Ref = p.declareSymbol(kind, loc, p.loadNameFromRef(b.Ref)) + } + }) +} + +func (p *parser) recordUsage(ref ast.Ref) { + // The use count stored in the symbol is used for generating symbol names + // during minification. These counts shouldn't include references inside dead + // code regions since those will be culled. + if !p.isControlFlowDead { + p.symbols[ref.InnerIndex].UseCountEstimate++ + use := p.symbolUses[ref] + use.CountEstimate++ + p.symbolUses[ref] = use + } + + // The correctness of TypeScript-to-JavaScript conversion relies on accurate + // symbol use counts for the whole file, including dead code regions. This is + // tracked separately in a parser-only data structure. + if p.options.ts.Parse { + p.tsUseCounts[ref.InnerIndex]++ + } +} + +func (p *parser) ignoreUsage(ref ast.Ref) { + // Roll back the use count increment in recordUsage() + if !p.isControlFlowDead { + p.symbols[ref.InnerIndex].UseCountEstimate-- + use := p.symbolUses[ref] + use.CountEstimate-- + if use.CountEstimate == 0 { + delete(p.symbolUses, ref) + } else { + p.symbolUses[ref] = use + } + } + + // Don't roll back the "tsUseCounts" increment. This must be counted even if + // the value is ignored because that's what the TypeScript compiler does. +} + +func (p *parser) ignoreUsageOfIdentifierInDotChain(expr js_ast.Expr) { + for { + switch e := expr.Data.(type) { + case *js_ast.EIdentifier: + p.ignoreUsage(e.Ref) + + case *js_ast.EDot: + expr = e.Target + continue + + case *js_ast.EIndex: + if _, ok := e.Index.Data.(*js_ast.EString); ok { + expr = e.Target + continue + } + } + + return + } +} + +func (p *parser) importFromRuntime(loc logger.Loc, name string) js_ast.Expr { + it, ok := p.runtimeImports[name] + if !ok { + it.Loc = loc + it.Ref = p.newSymbol(ast.SymbolOther, name) + p.moduleScope.Generated = append(p.moduleScope.Generated, it.Ref) + p.runtimeImports[name] = it + } + p.recordUsage(it.Ref) + return js_ast.Expr{Loc: loc, Data: &js_ast.EIdentifier{Ref: it.Ref}} +} + +func (p *parser) callRuntime(loc logger.Loc, name string, args []js_ast.Expr) js_ast.Expr { + return js_ast.Expr{Loc: loc, Data: &js_ast.ECall{ + Target: p.importFromRuntime(loc, name), + Args: args, + }} +} + +type JSXImport uint8 + +const ( + JSXImportJSX JSXImport = iota + JSXImportJSXS + JSXImportFragment + JSXImportCreateElement +) + +func (p *parser) importJSXSymbol(loc logger.Loc, jsx JSXImport) js_ast.Expr { + var symbols map[string]ast.LocRef + var name string + + switch jsx { + case JSXImportJSX: + symbols = p.jsxRuntimeImports + if p.options.jsx.Development { + name = "jsxDEV" + } else { + name = "jsx" + } + + case JSXImportJSXS: + symbols = p.jsxRuntimeImports + if p.options.jsx.Development { + name = "jsxDEV" + } else { + name = "jsxs" + } + + case JSXImportFragment: + symbols = p.jsxRuntimeImports + name = "Fragment" + + case JSXImportCreateElement: + symbols = p.jsxLegacyImports + name = "createElement" + } + + it, ok := symbols[name] + if !ok { + it.Loc = loc + it.Ref = p.newSymbol(ast.SymbolOther, name) + p.moduleScope.Generated = append(p.moduleScope.Generated, it.Ref) + p.isImportItem[it.Ref] = true + symbols[name] = it + } + + p.recordUsage(it.Ref) + return p.handleIdentifier(loc, &js_ast.EIdentifier{Ref: it.Ref}, identifierOpts{ + wasOriginallyIdentifier: true, + }) +} + +func (p *parser) valueToSubstituteForRequire(loc logger.Loc) js_ast.Expr { + if p.source.Index != runtime.SourceIndex && + config.ShouldCallRuntimeRequire(p.options.mode, p.options.outputFormat) { + return p.importFromRuntime(loc, "__require") + } + + p.recordUsage(p.requireRef) + return js_ast.Expr{Loc: loc, Data: &js_ast.EIdentifier{Ref: p.requireRef}} +} + +func (p *parser) makePromiseRef() ast.Ref { + if p.promiseRef == ast.InvalidRef { + p.promiseRef = p.newSymbol(ast.SymbolUnbound, "Promise") + } + return p.promiseRef +} + +func (p *parser) makeRegExpRef() ast.Ref { + if p.regExpRef == ast.InvalidRef { + p.regExpRef = p.newSymbol(ast.SymbolUnbound, "RegExp") + p.moduleScope.Generated = append(p.moduleScope.Generated, p.regExpRef) + } + return p.regExpRef +} + +// The name is temporarily stored in the ref until the scope traversal pass +// happens, at which point a symbol will be generated and the ref will point +// to the symbol instead. +// +// The scope traversal pass will reconstruct the name using one of two methods. +// In the common case, the name is a slice of the file itself. In that case we +// can just store the slice and not need to allocate any extra memory. In the +// rare case, the name is an externally-allocated string. In that case we store +// an index to the string and use that index during the scope traversal pass. +func (p *parser) storeNameInRef(name js_lexer.MaybeSubstring) ast.Ref { + // Is the data in "name" a subset of the data in "p.source.Contents"? + if name.Start.IsValid() { + // The name is a slice of the file contents, so we can just reference it by + // length and don't have to allocate anything. This is the common case. + // + // It's stored as a negative value so we'll crash if we try to use it. That + // way we'll catch cases where we've forgotten to call loadNameFromRef(). + // The length is the negative part because we know it's non-zero. + return ast.Ref{SourceIndex: -uint32(len(name.String)), InnerIndex: uint32(name.Start.GetIndex())} + } else { + // The name is some memory allocated elsewhere. This is either an inline + // string constant in the parser or an identifier with escape sequences + // in the source code, which is very unusual. Stash it away for later. + // This uses allocations but it should hopefully be very uncommon. + ref := ast.Ref{SourceIndex: 0x80000000, InnerIndex: uint32(len(p.allocatedNames))} + p.allocatedNames = append(p.allocatedNames, name.String) + return ref + } +} + +// This is the inverse of storeNameInRef() above +func (p *parser) loadNameFromRef(ref ast.Ref) string { + if ref.SourceIndex == 0x80000000 { + return p.allocatedNames[ref.InnerIndex] + } else { + if (ref.SourceIndex & 0x80000000) == 0 { + panic("Internal error: invalid symbol reference") + } + return p.source.Contents[ref.InnerIndex : int32(ref.InnerIndex)-int32(ref.SourceIndex)] + } +} + +// Due to ES6 destructuring patterns, there are many cases where it's +// impossible to distinguish between an array or object literal and a +// destructuring assignment until we hit the "=" operator later on. +// This object defers errors about being in one state or the other +// until we discover which state we're in. +type deferredErrors struct { + // These are errors for expressions + invalidExprDefaultValue logger.Range + invalidExprAfterQuestion logger.Range + arraySpreadFeature logger.Range + + // These errors are for arrow functions + invalidParens []logger.Range +} + +func (from *deferredErrors) mergeInto(to *deferredErrors) { + if from.invalidExprDefaultValue.Len > 0 { + to.invalidExprDefaultValue = from.invalidExprDefaultValue + } + if from.invalidExprAfterQuestion.Len > 0 { + to.invalidExprAfterQuestion = from.invalidExprAfterQuestion + } + if from.arraySpreadFeature.Len > 0 { + to.arraySpreadFeature = from.arraySpreadFeature + } + if len(from.invalidParens) > 0 { + if len(to.invalidParens) > 0 { + to.invalidParens = append(to.invalidParens, from.invalidParens...) + } else { + to.invalidParens = from.invalidParens + } + } +} + +func (p *parser) logExprErrors(errors *deferredErrors) { + if errors.invalidExprDefaultValue.Len > 0 { + p.log.AddError(&p.tracker, errors.invalidExprDefaultValue, "Unexpected \"=\"") + } + + if errors.invalidExprAfterQuestion.Len > 0 { + r := errors.invalidExprAfterQuestion + p.log.AddError(&p.tracker, r, fmt.Sprintf("Unexpected %q", p.source.Contents[r.Loc.Start:r.Loc.Start+r.Len])) + } + + if errors.arraySpreadFeature.Len > 0 { + p.markSyntaxFeature(compat.ArraySpread, errors.arraySpreadFeature) + } +} + +func (p *parser) logDeferredArrowArgErrors(errors *deferredErrors) { + for _, paren := range errors.invalidParens { + p.log.AddError(&p.tracker, paren, "Invalid binding pattern") + } +} + +func (p *parser) logNullishCoalescingErrorPrecedenceError(op string) { + prevOp := "??" + if p.lexer.Token == js_lexer.TQuestionQuestion { + op, prevOp = prevOp, op + } + // p.log.AddError(&p.tracker, p.lexer.Range(), fmt.Sprintf("The %q operator requires parentheses")) + p.log.AddErrorWithNotes(&p.tracker, p.lexer.Range(), fmt.Sprintf("Cannot use %q with %q without parentheses", op, prevOp), + []logger.MsgData{{Text: fmt.Sprintf("Expressions of the form \"x %s y %s z\" are not allowed in JavaScript. "+ + "You must disambiguate between \"(x %s y) %s z\" and \"x %s (y %s z)\" by adding parentheses.", prevOp, op, prevOp, op, prevOp, op)}}) +} + +func defineValueCanBeUsedInAssignTarget(data js_ast.E) bool { + switch data.(type) { + case *js_ast.EIdentifier, *js_ast.EDot: + return true + } + + // Substituting a constant into an assignment target (e.g. "x = 1" becomes + // "0 = 1") will cause a syntax error, so we avoid doing this. The caller + // will log a warning instead. + return false +} + +func (p *parser) logAssignToDefine(r logger.Range, name string, expr js_ast.Expr) { + // If this is a compound expression, pretty-print it for the error message. + // We don't use a literal slice of the source text in case it contains + // problematic things (e.g. spans multiple lines, has embedded comments). + if expr.Data != nil { + var parts []string + for { + if id, ok := expr.Data.(*js_ast.EIdentifier); ok { + parts = append(parts, p.loadNameFromRef(id.Ref)) + break + } else if dot, ok := expr.Data.(*js_ast.EDot); ok { + parts = append(parts, dot.Name) + parts = append(parts, ".") + expr = dot.Target + } else if index, ok := expr.Data.(*js_ast.EIndex); ok { + if str, ok := index.Index.Data.(*js_ast.EString); ok { + parts = append(parts, "]") + parts = append(parts, string(helpers.QuoteSingle(helpers.UTF16ToString(str.Value), false))) + parts = append(parts, "[") + expr = index.Target + } else { + return + } + } else { + return + } + } + for i, j := 0, len(parts)-1; i < j; i, j = i+1, j-1 { + parts[i], parts[j] = parts[j], parts[i] + } + name = strings.Join(parts, "") + } + + kind := logger.Warning + if p.suppressWarningsAboutWeirdCode { + kind = logger.Debug + } + + p.log.AddIDWithNotes(logger.MsgID_JS_AssignToDefine, kind, &p.tracker, r, + fmt.Sprintf("Suspicious assignment to defined constant %q", name), + []logger.MsgData{{Text: fmt.Sprintf( + "The expression %q has been configured to be replaced with a constant using the \"define\" feature. "+ + "If this expression is supposed to be a compile-time constant, then it doesn't make sense to assign to it here. "+ + "Or if this expression is supposed to change at run-time, this \"define\" substitution should be removed.", name)}}) +} + +// The "await" and "yield" expressions are never allowed in argument lists but +// may or may not be allowed otherwise depending on the details of the enclosing +// function or module. This needs to be handled when parsing an arrow function +// argument list because we don't know if these expressions are not allowed until +// we reach the "=>" token (or discover the absence of one). +// +// Specifically, for await: +// +// // This is ok +// async function foo() { (x = await y) } +// +// // This is an error +// async function foo() { (x = await y) => {} } +// +// And for yield: +// +// // This is ok +// function* foo() { (x = yield y) } +// +// // This is an error +// function* foo() { (x = yield y) => {} } +type deferredArrowArgErrors struct { + invalidExprAwait logger.Range + invalidExprYield logger.Range +} + +func (p *parser) logArrowArgErrors(errors *deferredArrowArgErrors) { + if errors.invalidExprAwait.Len > 0 { + p.log.AddError(&p.tracker, errors.invalidExprAwait, "Cannot use an \"await\" expression here:") + } + + if errors.invalidExprYield.Len > 0 { + p.log.AddError(&p.tracker, errors.invalidExprYield, "Cannot use a \"yield\" expression here:") + } +} + +func (p *parser) keyNameForError(key js_ast.Expr) string { + switch k := key.Data.(type) { + case *js_ast.EString: + return fmt.Sprintf("%q", helpers.UTF16ToString(k.Value)) + case *js_ast.EPrivateIdentifier: + return fmt.Sprintf("%q", p.loadNameFromRef(k.Ref)) + } + return "property" +} + +func (p *parser) checkForLegacyOctalLiteral(e js_ast.E) { + if p.lexer.IsLegacyOctalLiteral { + if p.legacyOctalLiterals == nil { + p.legacyOctalLiterals = make(map[js_ast.E]logger.Range) + } + p.legacyOctalLiterals[e] = p.lexer.Range() + } +} + +func (p *parser) notesForAssertTypeJSON(record *ast.ImportRecord, alias string) []logger.MsgData { + return []logger.MsgData{p.tracker.MsgData( + js_lexer.RangeOfImportAssertOrWith(p.source, *ast.FindAssertOrWithEntry(record.AssertOrWith.Entries, "type"), js_lexer.KeyAndValueRange), + "The JSON import assertion is here:"), + {Text: fmt.Sprintf("You can either keep the import assertion and only use the \"default\" import, "+ + "or you can remove the import assertion and use the %q import.", alias)}} +} + +// This assumes the caller has already checked for TStringLiteral or TNoSubstitutionTemplateLiteral +func (p *parser) parseStringLiteral() js_ast.Expr { + var legacyOctalLoc logger.Loc + loc := p.lexer.Loc() + text := p.lexer.StringLiteral() + + // Enable using a "/* @__KEY__ */" comment to turn a string into a key + hasPropertyKeyComment := (p.lexer.HasCommentBefore & js_lexer.KeyCommentBefore) != 0 + if hasPropertyKeyComment { + if name := helpers.UTF16ToString(text); p.isMangledProp(name) { + value := js_ast.Expr{Loc: loc, Data: &js_ast.ENameOfSymbol{ + Ref: p.storeNameInRef(js_lexer.MaybeSubstring{String: name}), + HasPropertyKeyComment: true, + }} + p.lexer.Next() + return value + } + } + + if p.lexer.LegacyOctalLoc.Start > loc.Start { + legacyOctalLoc = p.lexer.LegacyOctalLoc + } + value := js_ast.Expr{Loc: loc, Data: &js_ast.EString{ + Value: text, + LegacyOctalLoc: legacyOctalLoc, + PreferTemplate: p.lexer.Token == js_lexer.TNoSubstitutionTemplateLiteral, + HasPropertyKeyComment: hasPropertyKeyComment, + }} + p.lexer.Next() + return value +} + +type propertyOpts struct { + decorators []js_ast.Decorator + decoratorScope *js_ast.Scope + decoratorContext decoratorContextFlags + + asyncRange logger.Range + generatorRange logger.Range + tsDeclareRange logger.Range + classKeyword logger.Range + isAsync bool + isGenerator bool + + // Class-related options + isStatic bool + isTSAbstract bool + isClass bool + classHasExtends bool +} + +func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, opts propertyOpts, errors *deferredErrors) (js_ast.Property, bool) { + var flags js_ast.PropertyFlags + var key js_ast.Expr + var closeBracketLoc logger.Loc + keyRange := p.lexer.Range() + + switch p.lexer.Token { + case js_lexer.TNumericLiteral: + key = js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.ENumber{Value: p.lexer.Number}} + p.checkForLegacyOctalLiteral(key.Data) + p.lexer.Next() + + case js_lexer.TStringLiteral: + key = p.parseStringLiteral() + if !p.options.minifySyntax { + flags |= js_ast.PropertyPreferQuotedKey + } + + case js_lexer.TBigIntegerLiteral: + key = js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EBigInt{Value: p.lexer.Identifier.String}} + p.markSyntaxFeature(compat.Bigint, p.lexer.Range()) + p.lexer.Next() + + case js_lexer.TPrivateIdentifier: + if p.options.ts.Parse && p.options.ts.Config.ExperimentalDecorators == config.True && len(opts.decorators) > 0 { + p.log.AddError(&p.tracker, p.lexer.Range(), "TypeScript experimental decorators cannot be used on private identifiers") + } else if !opts.isClass { + p.lexer.Expected(js_lexer.TIdentifier) + } else if opts.tsDeclareRange.Len != 0 { + p.log.AddError(&p.tracker, opts.tsDeclareRange, "\"declare\" cannot be used with a private identifier") + } + name := p.lexer.Identifier + key = js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EPrivateIdentifier{Ref: p.storeNameInRef(name)}} + p.reportPrivateNameUsage(name.String) + p.lexer.Next() + + case js_lexer.TOpenBracket: + flags |= js_ast.PropertyIsComputed + p.markSyntaxFeature(compat.ObjectExtensions, p.lexer.Range()) + p.lexer.Next() + wasIdentifier := p.lexer.Token == js_lexer.TIdentifier + expr := p.parseExpr(js_ast.LComma) + + // Handle index signatures + if p.options.ts.Parse && p.lexer.Token == js_lexer.TColon && wasIdentifier && opts.isClass { + if _, ok := expr.Data.(*js_ast.EIdentifier); ok { + if opts.tsDeclareRange.Len != 0 { + p.log.AddError(&p.tracker, opts.tsDeclareRange, "\"declare\" cannot be used with an index signature") + } + + // "[key: string]: any;" + p.lexer.Next() + p.skipTypeScriptType(js_ast.LLowest) + p.lexer.Expect(js_lexer.TCloseBracket) + p.lexer.Expect(js_lexer.TColon) + p.skipTypeScriptType(js_ast.LLowest) + p.lexer.ExpectOrInsertSemicolon() + + // Skip this property entirely + return js_ast.Property{}, false + } + } + + closeBracketLoc = p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseBracket) + key = expr + + case js_lexer.TAsterisk: + if kind != js_ast.PropertyField && (kind != js_ast.PropertyMethod || opts.isGenerator) { + p.lexer.Unexpected() + } + opts.isGenerator = true + opts.generatorRange = p.lexer.Range() + p.lexer.Next() + return p.parseProperty(startLoc, js_ast.PropertyMethod, opts, errors) + + default: + name := p.lexer.Identifier + raw := p.lexer.Raw() + nameRange := p.lexer.Range() + if !p.lexer.IsIdentifierOrKeyword() { + p.lexer.Expect(js_lexer.TIdentifier) + } + p.lexer.Next() + + // Support contextual keywords + if kind == js_ast.PropertyField { + // Does the following token look like a key? + couldBeModifierKeyword := p.lexer.IsIdentifierOrKeyword() + if !couldBeModifierKeyword { + switch p.lexer.Token { + case js_lexer.TOpenBracket, js_lexer.TNumericLiteral, js_lexer.TStringLiteral, + js_lexer.TAsterisk, js_lexer.TPrivateIdentifier: + couldBeModifierKeyword = true + } + } + + // If so, check for a modifier keyword + if couldBeModifierKeyword { + switch name.String { + case "get": + if !opts.isAsync && raw == name.String { + p.markSyntaxFeature(compat.ObjectAccessors, nameRange) + return p.parseProperty(startLoc, js_ast.PropertyGetter, opts, nil) + } + + case "set": + if !opts.isAsync && raw == name.String { + p.markSyntaxFeature(compat.ObjectAccessors, nameRange) + return p.parseProperty(startLoc, js_ast.PropertySetter, opts, nil) + } + + case "accessor": + if !p.lexer.HasNewlineBefore && !opts.isAsync && opts.isClass && raw == name.String { + return p.parseProperty(startLoc, js_ast.PropertyAutoAccessor, opts, nil) + } + + case "async": + if !p.lexer.HasNewlineBefore && !opts.isAsync && raw == name.String { + opts.isAsync = true + opts.asyncRange = nameRange + return p.parseProperty(startLoc, js_ast.PropertyMethod, opts, nil) + } + + case "static": + if !opts.isStatic && !opts.isAsync && opts.isClass && raw == name.String { + opts.isStatic = true + return p.parseProperty(startLoc, kind, opts, nil) + } + + case "declare": + if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && opts.tsDeclareRange.Len == 0 && raw == name.String { + opts.tsDeclareRange = nameRange + scopeIndex := len(p.scopesInOrder) + + if prop, ok := p.parseProperty(startLoc, kind, opts, nil); ok && + prop.Kind == js_ast.PropertyField && prop.ValueOrNil.Data == nil && + (p.options.ts.Config.ExperimentalDecorators == config.True && len(opts.decorators) > 0) { + // If this is a well-formed class field with the "declare" keyword, + // only keep the declaration to preserve its side-effects when + // there are TypeScript experimental decorators present: + // + // class Foo { + // // Remove this + // declare [(console.log('side effect 1'), 'foo')] + // + // // Keep this + // @decorator(console.log('side effect 2')) declare bar + // } + // + // This behavior is surprisingly somehow valid with TypeScript + // experimental decorators, which was possibly by accident. + // TypeScript does not allow this with JavaScript decorators. + // + // References: + // + // https://github.com/evanw/esbuild/issues/1675 + // https://github.com/microsoft/TypeScript/issues/46345 + // + prop.Kind = js_ast.PropertyDeclareOrAbstract + return prop, true + } + + p.discardScopesUpTo(scopeIndex) + return js_ast.Property{}, false + } + + case "abstract": + if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && !opts.isTSAbstract && raw == name.String { + opts.isTSAbstract = true + scopeIndex := len(p.scopesInOrder) + + if prop, ok := p.parseProperty(startLoc, kind, opts, nil); ok && + prop.Kind == js_ast.PropertyField && prop.ValueOrNil.Data == nil && + (p.options.ts.Config.ExperimentalDecorators == config.True && len(opts.decorators) > 0) { + // If this is a well-formed class field with the "abstract" keyword, + // only keep the declaration to preserve its side-effects when + // there are TypeScript experimental decorators present: + // + // abstract class Foo { + // // Remove this + // abstract [(console.log('side effect 1'), 'foo')] + // + // // Keep this + // @decorator(console.log('side effect 2')) abstract bar + // } + // + // This behavior is valid with TypeScript experimental decorators. + // TypeScript does not allow this with JavaScript decorators. + // + // References: + // + // https://github.com/evanw/esbuild/issues/3684 + // + prop.Kind = js_ast.PropertyDeclareOrAbstract + return prop, true + } + + p.discardScopesUpTo(scopeIndex) + return js_ast.Property{}, false + } + + case "private", "protected", "public", "readonly", "override": + // Skip over TypeScript keywords + if opts.isClass && p.options.ts.Parse && raw == name.String { + return p.parseProperty(startLoc, kind, opts, nil) + } + } + } else if p.lexer.Token == js_lexer.TOpenBrace && name.String == "static" && len(opts.decorators) == 0 { + loc := p.lexer.Loc() + p.lexer.Next() + + oldFnOrArrowDataParse := p.fnOrArrowDataParse + p.fnOrArrowDataParse = fnOrArrowDataParse{ + isReturnDisallowed: true, + allowSuperProperty: true, + await: forbidAll, + } + + p.pushScopeForParsePass(js_ast.ScopeClassStaticInit, loc) + stmts := p.parseStmtsUpTo(js_lexer.TCloseBrace, parseStmtOpts{}) + p.popScope() + + p.fnOrArrowDataParse = oldFnOrArrowDataParse + + closeBraceLoc := p.lexer.Loc() + p.lexer.Expect(js_lexer.TCloseBrace) + return js_ast.Property{ + Kind: js_ast.PropertyClassStaticBlock, + Loc: startLoc, + ClassStaticBlock: &js_ast.ClassStaticBlock{ + Loc: loc, + Block: js_ast.SBlock{Stmts: stmts, CloseBraceLoc: closeBraceLoc}, + }, + }, true + } + } + + if p.isMangledProp(name.String) { + key = js_ast.Expr{Loc: nameRange.Loc, Data: &js_ast.ENameOfSymbol{Ref: p.storeNameInRef(name)}} + } else { + key = js_ast.Expr{Loc: nameRange.Loc, Data: &js_ast.EString{Value: helpers.StringToUTF16(name.String)}} + } + + // Parse a shorthand property + if !opts.isClass && kind == js_ast.PropertyField && p.lexer.Token != js_lexer.TColon && + p.lexer.Token != js_lexer.TOpenParen && p.lexer.Token != js_lexer.TLessThan && + js_lexer.Keywords[name.String] == js_lexer.T(0) { + + // Forbid invalid identifiers + if (p.fnOrArrowDataParse.await != allowIdent && name.String == "await") || + (p.fnOrArrowDataParse.yield != allowIdent && name.String == "yield") { + p.log.AddError(&p.tracker, nameRange, fmt.Sprintf("Cannot use %q as an identifier here:", name.String)) + } + + ref := p.storeNameInRef(name) + value := js_ast.Expr{Loc: key.Loc, Data: &js_ast.EIdentifier{Ref: ref}} + + // Destructuring patterns have an optional default value + var initializerOrNil js_ast.Expr + if errors != nil && p.lexer.Token == js_lexer.TEquals { + errors.invalidExprDefaultValue = p.lexer.Range() + p.lexer.Next() + initializerOrNil = p.parseExpr(js_ast.LComma) + } + + return js_ast.Property{ + Kind: kind, + Loc: startLoc, + Key: key, + ValueOrNil: value, + InitializerOrNil: initializerOrNil, + Flags: js_ast.PropertyWasShorthand, + }, true + } + } + + hasTypeParameters := false + hasDefiniteAssignmentAssertionOperator := false + + if p.options.ts.Parse { + if opts.isClass { + if p.lexer.Token == js_lexer.TQuestion { + // "class X { foo?: number }" + // "class X { foo?(): number }" + p.lexer.Next() + } else if p.lexer.Token == js_lexer.TExclamation && !p.lexer.HasNewlineBefore && + (kind == js_ast.PropertyField || kind == js_ast.PropertyAutoAccessor) { + // "class X { foo!: number }" + p.lexer.Next() + hasDefiniteAssignmentAssertionOperator = true + } + } + + // "class X { foo?(): T }" + // "const x = { foo(): T {} }" + if !hasDefiniteAssignmentAssertionOperator && kind != js_ast.PropertyAutoAccessor { + hasTypeParameters = p.skipTypeScriptTypeParameters(allowConstModifier) != didNotSkipAnything + } + } + + // Parse a class field with an optional initial value + if kind == js_ast.PropertyAutoAccessor || (opts.isClass && kind == js_ast.PropertyField && + !hasTypeParameters && (p.lexer.Token != js_lexer.TOpenParen || hasDefiniteAssignmentAssertionOperator)) { + var initializerOrNil js_ast.Expr + + // Forbid the names "constructor" and "prototype" in some cases + if !flags.Has(js_ast.PropertyIsComputed) { + if str, ok := key.Data.(*js_ast.EString); ok && (helpers.UTF16EqualsString(str.Value, "constructor") || + (opts.isStatic && helpers.UTF16EqualsString(str.Value, "prototype"))) { + p.log.AddError(&p.tracker, keyRange, fmt.Sprintf("Invalid field name %q", helpers.UTF16ToString(str.Value))) + } + } + + // Skip over types + if p.options.ts.Parse && p.lexer.Token == js_lexer.TColon { + p.lexer.Next() + p.skipTypeScriptType(js_ast.LLowest) + } + + if p.lexer.Token == js_lexer.TEquals { + p.lexer.Next() + + // "this" and "super" property access is allowed in field initializers + oldIsThisDisallowed := p.fnOrArrowDataParse.isThisDisallowed + oldAllowSuperProperty := p.fnOrArrowDataParse.allowSuperProperty + p.fnOrArrowDataParse.isThisDisallowed = false + p.fnOrArrowDataParse.allowSuperProperty = true + + initializerOrNil = p.parseExpr(js_ast.LComma) + + p.fnOrArrowDataParse.isThisDisallowed = oldIsThisDisallowed + p.fnOrArrowDataParse.allowSuperProperty = oldAllowSuperProperty + } + + // Special-case private identifiers + if private, ok := key.Data.(*js_ast.EPrivateIdentifier); ok { + name := p.loadNameFromRef(private.Ref) + if name == "#constructor" { + p.log.AddError(&p.tracker, keyRange, fmt.Sprintf("Invalid field name %q", name)) + } + var declare ast.SymbolKind + if kind == js_ast.PropertyAutoAccessor { + if opts.isStatic { + declare = ast.SymbolPrivateStaticGetSetPair + } else { + declare = ast.SymbolPrivateGetSetPair + } + private.Ref = p.declareSymbol(declare, key.Loc, name) + p.privateGetters[private.Ref] = p.newSymbol(ast.SymbolOther, name[1:]+"_get") + p.privateSetters[private.Ref] = p.newSymbol(ast.SymbolOther, name[1:]+"_set") + } else { + if opts.isStatic { + declare = ast.SymbolPrivateStaticField + } else { + declare = ast.SymbolPrivateField + } + private.Ref = p.declareSymbol(declare, key.Loc, name) + } + } + + p.lexer.ExpectOrInsertSemicolon() + if opts.isStatic { + flags |= js_ast.PropertyIsStatic + } + return js_ast.Property{ + Decorators: opts.decorators, + Loc: startLoc, + Kind: kind, + Flags: flags, + Key: key, + InitializerOrNil: initializerOrNil, + CloseBracketLoc: closeBracketLoc, + }, true + } + + // Parse a method expression + if p.lexer.Token == js_lexer.TOpenParen || kind.IsMethodDefinition() || opts.isClass { + hasError := false + + if !hasError && opts.tsDeclareRange.Len != 0 { + what := "method" + if kind == js_ast.PropertyGetter { + what = "getter" + } else if kind == js_ast.PropertySetter { + what = "setter" + } + p.log.AddError(&p.tracker, opts.tsDeclareRange, "\"declare\" cannot be used with a "+what) + hasError = true + } + + if opts.isAsync && p.markAsyncFn(opts.asyncRange, opts.isGenerator) { + hasError = true + } + + if !hasError && opts.isGenerator && p.markSyntaxFeature(compat.Generator, opts.generatorRange) { + hasError = true + } + + if !hasError && p.lexer.Token == js_lexer.TOpenParen && kind != js_ast.PropertyGetter && kind != js_ast.PropertySetter && p.markSyntaxFeature(compat.ObjectExtensions, p.lexer.Range()) { + hasError = true + } + + loc := p.lexer.Loc() + scopeIndex := p.pushScopeForParsePass(js_ast.ScopeFunctionArgs, loc) + isConstructor := false + + // Forbid the names "constructor" and "prototype" in some cases + if opts.isClass && !flags.Has(js_ast.PropertyIsComputed) { + if str, ok := key.Data.(*js_ast.EString); ok { + if !opts.isStatic && helpers.UTF16EqualsString(str.Value, "constructor") { + switch { + case kind == js_ast.PropertyGetter: + p.log.AddError(&p.tracker, keyRange, "Class constructor cannot be a getter") + case kind == js_ast.PropertySetter: + p.log.AddError(&p.tracker, keyRange, "Class constructor cannot be a setter") + case opts.isAsync: + p.log.AddError(&p.tracker, keyRange, "Class constructor cannot be an async function") + case opts.isGenerator: + p.log.AddError(&p.tracker, keyRange, "Class constructor cannot be a generator") + default: + isConstructor = true + } + } else if opts.isStatic && helpers.UTF16EqualsString(str.Value, "prototype") { + p.log.AddError(&p.tracker, keyRange, "Invalid static method name \"prototype\"") + } + } + } + + await := allowIdent + yield := allowIdent + if opts.isAsync { + await = allowExpr + } + if opts.isGenerator { + yield = allowExpr + } + + fn, hadBody := p.parseFn(nil, opts.classKeyword, opts.decoratorContext, fnOrArrowDataParse{ + needsAsyncLoc: key.Loc, + asyncRange: opts.asyncRange, + await: await, + yield: yield, + allowSuperCall: opts.classHasExtends && isConstructor, + allowSuperProperty: true, + decoratorScope: opts.decoratorScope, + isConstructor: isConstructor, + + // Only allow omitting the body if we're parsing TypeScript class + allowMissingBodyForTypeScript: p.options.ts.Parse && opts.isClass, + }) + + // "class Foo { foo(): void; foo(): void {} }" + if !hadBody { + // Skip this property entirely + p.popAndDiscardScope(scopeIndex) + return js_ast.Property{}, false + } + + p.popScope() + fn.IsUniqueFormalParameters = true + value := js_ast.Expr{Loc: loc, Data: &js_ast.EFunction{Fn: fn}} + + // Enforce argument rules for accessors + switch kind { + case js_ast.PropertyGetter: + if len(fn.Args) > 0 { + r := js_lexer.RangeOfIdentifier(p.source, fn.Args[0].Binding.Loc) + p.log.AddError(&p.tracker, r, fmt.Sprintf("Getter %s must have zero arguments", p.keyNameForError(key))) + } + + case js_ast.PropertySetter: + if len(fn.Args) != 1 { + r := js_lexer.RangeOfIdentifier(p.source, key.Loc) + if len(fn.Args) > 1 { + r = js_lexer.RangeOfIdentifier(p.source, fn.Args[1].Binding.Loc) + } + p.log.AddError(&p.tracker, r, fmt.Sprintf("Setter %s must have exactly one argument", p.keyNameForError(key))) + } + + default: + kind = js_ast.PropertyMethod + } + + // Special-case private identifiers + if private, ok := key.Data.(*js_ast.EPrivateIdentifier); ok { + var declare ast.SymbolKind + var suffix string + switch kind { + case js_ast.PropertyGetter: + if opts.isStatic { + declare = ast.SymbolPrivateStaticGet + } else { + declare = ast.SymbolPrivateGet + } + suffix = "_get" + case js_ast.PropertySetter: + if opts.isStatic { + declare = ast.SymbolPrivateStaticSet + } else { + declare = ast.SymbolPrivateSet + } + suffix = "_set" + default: + if opts.isStatic { + declare = ast.SymbolPrivateStaticMethod + } else { + declare = ast.SymbolPrivateMethod + } + suffix = "_fn" + } + name := p.loadNameFromRef(private.Ref) + if name == "#constructor" { + p.log.AddError(&p.tracker, keyRange, fmt.Sprintf("Invalid method name %q", name)) + } + private.Ref = p.declareSymbol(declare, key.Loc, name) + methodRef := p.newSymbol(ast.SymbolOther, name[1:]+suffix) + if kind == js_ast.PropertySetter { + p.privateSetters[private.Ref] = methodRef + } else { + p.privateGetters[private.Ref] = methodRef + } + } + + if opts.isStatic { + flags |= js_ast.PropertyIsStatic + } + return js_ast.Property{ + Decorators: opts.decorators, + Loc: startLoc, + Kind: kind, + Flags: flags, + Key: key, + ValueOrNil: value, + CloseBracketLoc: closeBracketLoc, + }, true + } + + // Parse an object key/value pair + p.lexer.Expect(js_lexer.TColon) + value := p.parseExprOrBindings(js_ast.LComma, errors) + return js_ast.Property{ + Loc: startLoc, + Kind: kind, + Flags: flags, + Key: key, + ValueOrNil: value, + CloseBracketLoc: closeBracketLoc, + }, true +} + +func (p *parser) parsePropertyBinding() js_ast.PropertyBinding { + var key js_ast.Expr + var closeBracketLoc logger.Loc + isComputed := false + preferQuotedKey := false + loc := p.lexer.Loc() + + switch p.lexer.Token { + case js_lexer.TDotDotDot: + p.lexer.Next() + value := js_ast.Binding{Loc: p.saveExprCommentsHere(), Data: &js_ast.BIdentifier{Ref: p.storeNameInRef(p.lexer.Identifier)}} + p.lexer.Expect(js_lexer.TIdentifier) + return js_ast.PropertyBinding{ + Loc: loc, + IsSpread: true, + Value: value, + } + + case js_lexer.TNumericLiteral: + key = js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.ENumber{Value: p.lexer.Number}} + p.checkForLegacyOctalLiteral(key.Data) + p.lexer.Next() + + case js_lexer.TStringLiteral: + key = p.parseStringLiteral() + preferQuotedKey = !p.options.minifySyntax + + case js_lexer.TBigIntegerLiteral: + key = js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EBigInt{Value: p.lexer.Identifier.String}} + p.markSyntaxFeature(compat.Bigint, p.lexer.Range()) + p.lexer.Next() + + case js_lexer.TOpenBracket: + isComputed = true + p.lexer.Next() + key = p.parseExpr(js_ast.LComma) + closeBracketLoc = p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseBracket) + + default: + name := p.lexer.Identifier + nameRange := p.lexer.Range() + if !p.lexer.IsIdentifierOrKeyword() { + p.lexer.Expect(js_lexer.TIdentifier) + } + p.lexer.Next() + if p.isMangledProp(name.String) { + key = js_ast.Expr{Loc: nameRange.Loc, Data: &js_ast.ENameOfSymbol{Ref: p.storeNameInRef(name)}} + } else { + key = js_ast.Expr{Loc: nameRange.Loc, Data: &js_ast.EString{Value: helpers.StringToUTF16(name.String)}} + } + + if p.lexer.Token != js_lexer.TColon && p.lexer.Token != js_lexer.TOpenParen { + // Forbid invalid identifiers + if (p.fnOrArrowDataParse.await != allowIdent && name.String == "await") || + (p.fnOrArrowDataParse.yield != allowIdent && name.String == "yield") { + p.log.AddError(&p.tracker, nameRange, fmt.Sprintf("Cannot use %q as an identifier here:", name.String)) + } + + ref := p.storeNameInRef(name) + value := js_ast.Binding{Loc: nameRange.Loc, Data: &js_ast.BIdentifier{Ref: ref}} + + var defaultValueOrNil js_ast.Expr + if p.lexer.Token == js_lexer.TEquals { + p.lexer.Next() + defaultValueOrNil = p.parseExpr(js_ast.LComma) + } + + return js_ast.PropertyBinding{ + Loc: loc, + Key: key, + Value: value, + DefaultValueOrNil: defaultValueOrNil, + } + } + } + + p.lexer.Expect(js_lexer.TColon) + value := p.parseBinding(parseBindingOpts{}) + + var defaultValueOrNil js_ast.Expr + if p.lexer.Token == js_lexer.TEquals { + p.lexer.Next() + defaultValueOrNil = p.parseExpr(js_ast.LComma) + } + + return js_ast.PropertyBinding{ + Loc: loc, + IsComputed: isComputed, + PreferQuotedKey: preferQuotedKey, + Key: key, + Value: value, + DefaultValueOrNil: defaultValueOrNil, + CloseBracketLoc: closeBracketLoc, + } +} + +// These properties have special semantics in JavaScript. They must not be +// mangled or we could potentially fail to parse valid JavaScript syntax or +// generate invalid JavaScript syntax as output. +// +// This list is only intended to contain properties specific to the JavaScript +// language itself to avoid syntax errors in the generated output. It's not +// intended to contain properties for JavaScript APIs. Those must be provided +// by the user. +var permanentReservedProps = map[string]bool{ + "__proto__": true, + "constructor": true, + "prototype": true, +} + +func (p *parser) isMangledProp(name string) bool { + if p.options.mangleProps == nil { + return false + } + if p.options.mangleProps.MatchString(name) && !permanentReservedProps[name] && (p.options.reserveProps == nil || !p.options.reserveProps.MatchString(name)) { + return true + } + reservedProps := p.reservedProps + if reservedProps == nil { + reservedProps = make(map[string]bool) + p.reservedProps = reservedProps + } + reservedProps[name] = true + return false +} + +func (p *parser) symbolForMangledProp(name string) ast.Ref { + mangledProps := p.mangledProps + if mangledProps == nil { + mangledProps = make(map[string]ast.Ref) + p.mangledProps = mangledProps + } + ref, ok := mangledProps[name] + if !ok { + ref = p.newSymbol(ast.SymbolMangledProp, name) + mangledProps[name] = ref + } + if !p.isControlFlowDead { + p.symbols[ref.InnerIndex].UseCountEstimate++ + } + return ref +} + +type wasOriginallyDotOrIndex uint8 + +const ( + wasOriginallyDot wasOriginallyDotOrIndex = iota + wasOriginallyIndex +) + +func (p *parser) dotOrMangledPropParse( + target js_ast.Expr, + name js_lexer.MaybeSubstring, + nameLoc logger.Loc, + optionalChain js_ast.OptionalChain, + original wasOriginallyDotOrIndex, +) js_ast.E { + if (original != wasOriginallyIndex || p.options.mangleQuoted) && p.isMangledProp(name.String) { + return &js_ast.EIndex{ + Target: target, + Index: js_ast.Expr{Loc: nameLoc, Data: &js_ast.ENameOfSymbol{Ref: p.storeNameInRef(name)}}, + OptionalChain: optionalChain, + } + } + + return &js_ast.EDot{ + Target: target, + Name: name.String, + NameLoc: nameLoc, + OptionalChain: optionalChain, + } +} + +func (p *parser) dotOrMangledPropVisit(target js_ast.Expr, name string, nameLoc logger.Loc) js_ast.E { + if p.isMangledProp(name) { + return &js_ast.EIndex{ + Target: target, + Index: js_ast.Expr{Loc: nameLoc, Data: &js_ast.ENameOfSymbol{Ref: p.symbolForMangledProp(name)}}, + } + } + + return &js_ast.EDot{ + Target: target, + Name: name, + NameLoc: nameLoc, + } +} + +func (p *parser) parseArrowBody(args []js_ast.Arg, data fnOrArrowDataParse) *js_ast.EArrow { + arrowLoc := p.lexer.Loc() + + // Newlines are not allowed before "=>" + if p.lexer.HasNewlineBefore { + p.log.AddError(&p.tracker, p.lexer.Range(), "Unexpected newline before \"=>\"") + panic(js_lexer.LexerPanic{}) + } + + p.lexer.Expect(js_lexer.TEqualsGreaterThan) + + for _, arg := range args { + p.declareBinding(ast.SymbolHoisted, arg.Binding, parseStmtOpts{}) + } + + // The ability to use "this" and "super" is inherited by arrow functions + data.isThisDisallowed = p.fnOrArrowDataParse.isThisDisallowed + data.allowSuperCall = p.fnOrArrowDataParse.allowSuperCall + data.allowSuperProperty = p.fnOrArrowDataParse.allowSuperProperty + + if p.lexer.Token == js_lexer.TOpenBrace { + body := p.parseFnBody(data) + p.afterArrowBodyLoc = p.lexer.Loc() + return &js_ast.EArrow{Args: args, Body: body} + } + + p.pushScopeForParsePass(js_ast.ScopeFunctionBody, arrowLoc) + defer p.popScope() + + oldFnOrArrowData := p.fnOrArrowDataParse + p.fnOrArrowDataParse = data + expr := p.parseExpr(js_ast.LComma) + p.fnOrArrowDataParse = oldFnOrArrowData + return &js_ast.EArrow{ + Args: args, + PreferExpr: true, + Body: js_ast.FnBody{Loc: arrowLoc, Block: js_ast.SBlock{Stmts: []js_ast.Stmt{{Loc: expr.Loc, Data: &js_ast.SReturn{ValueOrNil: expr}}}}}, + } +} + +func (p *parser) checkForArrowAfterTheCurrentToken() bool { + oldLexer := p.lexer + p.lexer.IsLogDisabled = true + + // Implement backtracking by restoring the lexer's memory to its original state + defer func() { + r := recover() + if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic { + p.lexer = oldLexer + } else if r != nil { + panic(r) + } + }() + + p.lexer.Next() + isArrowAfterThisToken := p.lexer.Token == js_lexer.TEqualsGreaterThan + + p.lexer = oldLexer + return isArrowAfterThisToken +} + +// This parses an expression. This assumes we've already parsed the "async" +// keyword and are currently looking at the following token. +func (p *parser) parseAsyncPrefixExpr(asyncRange logger.Range, level js_ast.L, flags exprFlag) js_ast.Expr { + // "async function() {}" + if !p.lexer.HasNewlineBefore && p.lexer.Token == js_lexer.TFunction { + return p.parseFnExpr(asyncRange.Loc, true /* isAsync */, asyncRange) + } + + // Check the precedence level to avoid parsing an arrow function in + // "new async () => {}". This also avoids parsing "new async()" as + // "new (async())()" instead. + if !p.lexer.HasNewlineBefore && level < js_ast.LMember { + switch p.lexer.Token { + // "async => {}" + case js_lexer.TEqualsGreaterThan: + if level <= js_ast.LAssign { + arg := js_ast.Arg{Binding: js_ast.Binding{Loc: asyncRange.Loc, Data: &js_ast.BIdentifier{ + Ref: p.storeNameInRef(js_lexer.MaybeSubstring{String: "async"})}}} + + p.pushScopeForParsePass(js_ast.ScopeFunctionArgs, asyncRange.Loc) + defer p.popScope() + + return js_ast.Expr{Loc: asyncRange.Loc, Data: p.parseArrowBody([]js_ast.Arg{arg}, fnOrArrowDataParse{ + needsAsyncLoc: asyncRange.Loc, + })} + } + + // "async x => {}" + case js_lexer.TIdentifier: + if level <= js_ast.LAssign { + // See https://github.com/tc39/ecma262/issues/2034 for details + isArrowFn := true + if (flags&exprFlagForLoopInit) != 0 && p.lexer.Identifier.String == "of" { + // "for (async of" is only an arrow function if the next token is "=>" + isArrowFn = p.checkForArrowAfterTheCurrentToken() + + // Do not allow "for (async of []) ;" but do allow "for await (async of []) ;" + if !isArrowFn && (flags&exprFlagForAwaitLoopInit) == 0 && p.lexer.Raw() == "of" { + r := logger.Range{Loc: asyncRange.Loc, Len: p.lexer.Range().End() - asyncRange.Loc.Start} + p.log.AddError(&p.tracker, r, "For loop initializers cannot start with \"async of\"") + panic(js_lexer.LexerPanic{}) + } + } + + if isArrowFn { + p.markAsyncFn(asyncRange, false) + ref := p.storeNameInRef(p.lexer.Identifier) + arg := js_ast.Arg{Binding: js_ast.Binding{Loc: p.lexer.Loc(), Data: &js_ast.BIdentifier{Ref: ref}}} + p.lexer.Next() + + p.pushScopeForParsePass(js_ast.ScopeFunctionArgs, asyncRange.Loc) + defer p.popScope() + + arrow := p.parseArrowBody([]js_ast.Arg{arg}, fnOrArrowDataParse{ + needsAsyncLoc: arg.Binding.Loc, + await: allowExpr, + }) + arrow.IsAsync = true + return js_ast.Expr{Loc: asyncRange.Loc, Data: arrow} + } + } + + // "async()" + // "async () => {}" + case js_lexer.TOpenParen: + p.lexer.Next() + return p.parseParenExpr(asyncRange.Loc, level, parenExprOpts{asyncRange: asyncRange}) + + // "async()" + // "async () => {}" + case js_lexer.TLessThan: + if p.options.ts.Parse && (!p.options.jsx.Parse || p.isTSArrowFnJSX()) { + if result := p.trySkipTypeScriptTypeParametersThenOpenParenWithBacktracking(); result != didNotSkipAnything { + p.lexer.Next() + return p.parseParenExpr(asyncRange.Loc, level, parenExprOpts{ + asyncRange: asyncRange, + forceArrowFn: result == definitelyTypeParameters, + }) + } + } + } + } + + // "async" + // "async + 1" + return js_ast.Expr{Loc: asyncRange.Loc, Data: &js_ast.EIdentifier{ + Ref: p.storeNameInRef(js_lexer.MaybeSubstring{String: "async"})}} +} + +func (p *parser) parseFnExpr(loc logger.Loc, isAsync bool, asyncRange logger.Range) js_ast.Expr { + p.lexer.Next() + isGenerator := p.lexer.Token == js_lexer.TAsterisk + hasError := false + if isAsync { + hasError = p.markAsyncFn(asyncRange, isGenerator) + } + if isGenerator { + if !hasError { + p.markSyntaxFeature(compat.Generator, p.lexer.Range()) + } + p.lexer.Next() + } + var name *ast.LocRef + + p.pushScopeForParsePass(js_ast.ScopeFunctionArgs, loc) + defer p.popScope() + + // The name is optional + if p.lexer.Token == js_lexer.TIdentifier { + // Don't declare the name "arguments" since it's shadowed and inaccessible + name = &ast.LocRef{Loc: p.lexer.Loc()} + if text := p.lexer.Identifier.String; text != "arguments" { + name.Ref = p.declareSymbol(ast.SymbolHoistedFunction, name.Loc, text) + } else { + name.Ref = p.newSymbol(ast.SymbolHoistedFunction, text) + } + p.lexer.Next() + } + + // Even anonymous functions can have TypeScript type parameters + if p.options.ts.Parse { + p.skipTypeScriptTypeParameters(allowConstModifier) + } + + await := allowIdent + yield := allowIdent + if isAsync { + await = allowExpr + } + if isGenerator { + yield = allowExpr + } + + fn, _ := p.parseFn(name, logger.Range{}, 0, fnOrArrowDataParse{ + needsAsyncLoc: loc, + asyncRange: asyncRange, + await: await, + yield: yield, + }) + p.validateFunctionName(fn, fnExpr) + return js_ast.Expr{Loc: loc, Data: &js_ast.EFunction{Fn: fn}} +} + +type parenExprOpts struct { + asyncRange logger.Range + forceArrowFn bool +} + +// This assumes that the open parenthesis has already been parsed by the caller +func (p *parser) parseParenExpr(loc logger.Loc, level js_ast.L, opts parenExprOpts) js_ast.Expr { + items := []js_ast.Expr{} + errors := deferredErrors{} + arrowArgErrors := deferredArrowArgErrors{} + spreadRange := logger.Range{} + typeColonRange := logger.Range{} + commaAfterSpread := logger.Loc{} + isAsync := opts.asyncRange.Len > 0 + + // Push a scope assuming this is an arrow function. It may not be, in which + // case we'll need to roll this change back. This has to be done ahead of + // parsing the arguments instead of later on when we hit the "=>" token and + // we know it's an arrow function because the arguments may have default + // values that introduce new scopes and declare new symbols. If this is an + // arrow function, then those new scopes will need to be parented under the + // scope of the arrow function itself. + scopeIndex := p.pushScopeForParsePass(js_ast.ScopeFunctionArgs, loc) + + // Allow "in" inside parentheses + oldAllowIn := p.allowIn + p.allowIn = true + + // Forbid "await" and "yield", but only for arrow functions + oldFnOrArrowData := p.fnOrArrowDataParse + p.fnOrArrowDataParse.arrowArgErrors = &arrowArgErrors + + // Scan over the comma-separated arguments or expressions + for p.lexer.Token != js_lexer.TCloseParen { + itemLoc := p.lexer.Loc() + isSpread := p.lexer.Token == js_lexer.TDotDotDot + + if isSpread { + spreadRange = p.lexer.Range() + p.markSyntaxFeature(compat.RestArgument, spreadRange) + p.lexer.Next() + } + + // We don't know yet whether these are arguments or expressions, so parse + // a superset of the expression syntax. Errors about things that are valid + // in one but not in the other are deferred. + p.latestArrowArgLoc = p.lexer.Loc() + item := p.parseExprOrBindings(js_ast.LComma, &errors) + + if isSpread { + item = js_ast.Expr{Loc: itemLoc, Data: &js_ast.ESpread{Value: item}} + } + + // Skip over types + if p.options.ts.Parse && p.lexer.Token == js_lexer.TColon { + typeColonRange = p.lexer.Range() + p.lexer.Next() + p.skipTypeScriptType(js_ast.LLowest) + } + + // There may be a "=" after the type (but not after an "as" cast) + if p.options.ts.Parse && p.lexer.Token == js_lexer.TEquals && p.lexer.Loc() != p.forbidSuffixAfterAsLoc { + p.lexer.Next() + item = js_ast.Assign(item, p.parseExpr(js_ast.LComma)) + } + + items = append(items, item) + if p.lexer.Token != js_lexer.TComma { + break + } + + // Spread arguments must come last. If there's a spread argument followed + // by a comma, throw an error if we use these expressions as bindings. + if isSpread { + commaAfterSpread = p.lexer.Loc() + } + + // Eat the comma token + p.lexer.Next() + } + + // The parenthetical construct must end with a close parenthesis + p.lexer.Expect(js_lexer.TCloseParen) + + // Restore "in" operator status before we parse the arrow function body + p.allowIn = oldAllowIn + + // Also restore "await" and "yield" expression errors + p.fnOrArrowDataParse = oldFnOrArrowData + + // Are these arguments to an arrow function? + if p.lexer.Token == js_lexer.TEqualsGreaterThan || opts.forceArrowFn || (p.options.ts.Parse && p.lexer.Token == js_lexer.TColon) { + // Arrow functions are not allowed inside certain expressions + if level > js_ast.LAssign { + p.lexer.Unexpected() + } + + var invalidLog invalidLog + args := []js_ast.Arg{} + + if isAsync { + p.markAsyncFn(opts.asyncRange, false) + } + + // First, try converting the expressions to bindings + for _, item := range items { + isSpread := false + if spread, ok := item.Data.(*js_ast.ESpread); ok { + item = spread.Value + isSpread = true + } + binding, initializerOrNil, log := p.convertExprToBindingAndInitializer(item, invalidLog, isSpread) + invalidLog = log + args = append(args, js_ast.Arg{Binding: binding, DefaultOrNil: initializerOrNil}) + } + + // Avoid parsing TypeScript code like "a ? (1 + 2) : (3 + 4)" as an arrow + // function. The ":" after the ")" may be a return type annotation, so we + // attempt to convert the expressions to bindings first before deciding + // whether this is an arrow function, and only pick an arrow function if + // there were no conversion errors. + if p.lexer.Token == js_lexer.TEqualsGreaterThan || (len(invalidLog.invalidTokens) == 0 && + p.trySkipTypeScriptArrowReturnTypeWithBacktracking()) || opts.forceArrowFn { + if commaAfterSpread.Start != 0 { + p.log.AddError(&p.tracker, logger.Range{Loc: commaAfterSpread, Len: 1}, "Unexpected \",\" after rest pattern") + } + p.logArrowArgErrors(&arrowArgErrors) + p.logDeferredArrowArgErrors(&errors) + + // Now that we've decided we're an arrow function, report binding pattern + // conversion errors + if len(invalidLog.invalidTokens) > 0 { + for _, token := range invalidLog.invalidTokens { + p.log.AddError(&p.tracker, token, "Invalid binding pattern") + } + panic(js_lexer.LexerPanic{}) + } + + // Also report syntax features used in bindings + for _, entry := range invalidLog.syntaxFeatures { + p.markSyntaxFeature(entry.feature, entry.token) + } + + await := allowIdent + if isAsync { + await = allowExpr + } + + arrow := p.parseArrowBody(args, fnOrArrowDataParse{ + needsAsyncLoc: loc, + await: await, + }) + arrow.IsAsync = isAsync + arrow.HasRestArg = spreadRange.Len > 0 + p.popScope() + return js_ast.Expr{Loc: loc, Data: arrow} + } + } + + // If we get here, it's not an arrow function so undo the pushing of the + // scope we did earlier. This needs to flatten any child scopes into the + // parent scope as if the scope was never pushed in the first place. + p.popAndFlattenScope(scopeIndex) + + // If this isn't an arrow function, then types aren't allowed + if typeColonRange.Len > 0 { + p.log.AddError(&p.tracker, typeColonRange, "Unexpected \":\"") + panic(js_lexer.LexerPanic{}) + } + + // Are these arguments for a call to a function named "async"? + if isAsync { + p.logExprErrors(&errors) + async := js_ast.Expr{Loc: loc, Data: &js_ast.EIdentifier{ + Ref: p.storeNameInRef(js_lexer.MaybeSubstring{String: "async"})}} + return js_ast.Expr{Loc: loc, Data: &js_ast.ECall{ + Target: async, + Args: items, + }} + } + + // Is this a chain of expressions and comma operators? + if len(items) > 0 { + p.logExprErrors(&errors) + if spreadRange.Len > 0 { + p.log.AddError(&p.tracker, spreadRange, "Unexpected \"...\"") + panic(js_lexer.LexerPanic{}) + } + value := js_ast.JoinAllWithComma(items) + p.markExprAsParenthesized(value, loc, isAsync) + return value + } + + // Indicate that we expected an arrow function + p.lexer.Expected(js_lexer.TEqualsGreaterThan) + return js_ast.Expr{} +} + +type invalidLog struct { + invalidTokens []logger.Range + syntaxFeatures []syntaxFeature +} + +type syntaxFeature struct { + feature compat.JSFeature + token logger.Range +} + +func (p *parser) convertExprToBindingAndInitializer( + expr js_ast.Expr, invalidLog invalidLog, isSpread bool, +) (js_ast.Binding, js_ast.Expr, invalidLog) { + var initializerOrNil js_ast.Expr + if assign, ok := expr.Data.(*js_ast.EBinary); ok && assign.Op == js_ast.BinOpAssign { + initializerOrNil = assign.Right + expr = assign.Left + } + binding, invalidLog := p.convertExprToBinding(expr, invalidLog) + if initializerOrNil.Data != nil { + equalsRange := p.source.RangeOfOperatorBefore(initializerOrNil.Loc, "=") + if isSpread { + p.log.AddError(&p.tracker, equalsRange, "A rest argument cannot have a default initializer") + } else { + invalidLog.syntaxFeatures = append(invalidLog.syntaxFeatures, syntaxFeature{ + feature: compat.DefaultArgument, + token: equalsRange, + }) + } + } + return binding, initializerOrNil, invalidLog +} + +// Note: do not write to "p.log" in this function. Any errors due to conversion +// from expression to binding should be written to "invalidLog" instead. That +// way we can potentially keep this as an expression if it turns out it's not +// needed as a binding after all. +func (p *parser) convertExprToBinding(expr js_ast.Expr, invalidLog invalidLog) (js_ast.Binding, invalidLog) { + switch e := expr.Data.(type) { + case *js_ast.EMissing: + return js_ast.Binding{Loc: expr.Loc, Data: js_ast.BMissingShared}, invalidLog + + case *js_ast.EIdentifier: + return js_ast.Binding{Loc: expr.Loc, Data: &js_ast.BIdentifier{Ref: e.Ref}}, invalidLog + + case *js_ast.EArray: + if e.CommaAfterSpread.Start != 0 { + invalidLog.invalidTokens = append(invalidLog.invalidTokens, logger.Range{Loc: e.CommaAfterSpread, Len: 1}) + } + invalidLog.syntaxFeatures = append(invalidLog.syntaxFeatures, + syntaxFeature{feature: compat.Destructuring, token: p.source.RangeOfOperatorAfter(expr.Loc, "[")}) + items := []js_ast.ArrayBinding{} + isSpread := false + for _, item := range e.Items { + if i, ok := item.Data.(*js_ast.ESpread); ok { + isSpread = true + item = i.Value + if _, ok := item.Data.(*js_ast.EIdentifier); !ok { + p.markSyntaxFeature(compat.NestedRestBinding, p.source.RangeOfOperatorAfter(item.Loc, "[")) + } + } + binding, initializerOrNil, log := p.convertExprToBindingAndInitializer(item, invalidLog, isSpread) + invalidLog = log + items = append(items, js_ast.ArrayBinding{ + Binding: binding, + DefaultValueOrNil: initializerOrNil, + Loc: item.Loc, + }) + } + return js_ast.Binding{Loc: expr.Loc, Data: &js_ast.BArray{ + Items: items, + HasSpread: isSpread, + IsSingleLine: e.IsSingleLine, + CloseBracketLoc: e.CloseBracketLoc, + }}, invalidLog + + case *js_ast.EObject: + if e.CommaAfterSpread.Start != 0 { + invalidLog.invalidTokens = append(invalidLog.invalidTokens, logger.Range{Loc: e.CommaAfterSpread, Len: 1}) + } + invalidLog.syntaxFeatures = append(invalidLog.syntaxFeatures, + syntaxFeature{feature: compat.Destructuring, token: p.source.RangeOfOperatorAfter(expr.Loc, "{")}) + properties := []js_ast.PropertyBinding{} + for _, property := range e.Properties { + if property.Kind.IsMethodDefinition() { + invalidLog.invalidTokens = append(invalidLog.invalidTokens, js_lexer.RangeOfIdentifier(p.source, property.Key.Loc)) + continue + } + binding, initializerOrNil, log := p.convertExprToBindingAndInitializer(property.ValueOrNil, invalidLog, false) + invalidLog = log + if initializerOrNil.Data == nil { + initializerOrNil = property.InitializerOrNil + } + properties = append(properties, js_ast.PropertyBinding{ + Loc: property.Loc, + IsSpread: property.Kind == js_ast.PropertySpread, + IsComputed: property.Flags.Has(js_ast.PropertyIsComputed), + Key: property.Key, + Value: binding, + DefaultValueOrNil: initializerOrNil, + }) + } + return js_ast.Binding{Loc: expr.Loc, Data: &js_ast.BObject{ + Properties: properties, + IsSingleLine: e.IsSingleLine, + CloseBraceLoc: e.CloseBraceLoc, + }}, invalidLog + + default: + invalidLog.invalidTokens = append(invalidLog.invalidTokens, logger.Range{Loc: expr.Loc}) + return js_ast.Binding{}, invalidLog + } +} + +func (p *parser) saveExprCommentsHere() logger.Loc { + loc := p.lexer.Loc() + if p.exprComments != nil && len(p.lexer.CommentsBeforeToken) > 0 { + comments := make([]string, len(p.lexer.CommentsBeforeToken)) + for i, comment := range p.lexer.CommentsBeforeToken { + comments[i] = p.source.CommentTextWithoutIndent(comment) + } + p.exprComments[loc] = comments + p.lexer.CommentsBeforeToken = p.lexer.CommentsBeforeToken[0:] + } + return loc +} + +type exprFlag uint8 + +const ( + exprFlagDecorator exprFlag = 1 << iota + exprFlagForLoopInit + exprFlagForAwaitLoopInit +) + +func (p *parser) parsePrefix(level js_ast.L, errors *deferredErrors, flags exprFlag) js_ast.Expr { + loc := p.saveExprCommentsHere() + + switch p.lexer.Token { + case js_lexer.TSuper: + superRange := p.lexer.Range() + p.lexer.Next() + + switch p.lexer.Token { + case js_lexer.TOpenParen: + if level < js_ast.LCall && p.fnOrArrowDataParse.allowSuperCall { + return js_ast.Expr{Loc: loc, Data: js_ast.ESuperShared} + } + + case js_lexer.TDot, js_lexer.TOpenBracket: + if p.fnOrArrowDataParse.allowSuperProperty { + return js_ast.Expr{Loc: loc, Data: js_ast.ESuperShared} + } + } + + p.log.AddError(&p.tracker, superRange, "Unexpected \"super\"") + return js_ast.Expr{Loc: loc, Data: js_ast.ESuperShared} + + case js_lexer.TOpenParen: + if errors != nil { + errors.invalidParens = append(errors.invalidParens, p.lexer.Range()) + } + + p.lexer.Next() + + // Arrow functions aren't allowed in the middle of expressions + if level > js_ast.LAssign { + // Allow "in" inside parentheses + oldAllowIn := p.allowIn + p.allowIn = true + + value := p.parseExpr(js_ast.LLowest) + p.markExprAsParenthesized(value, loc, false) + p.lexer.Expect(js_lexer.TCloseParen) + + p.allowIn = oldAllowIn + return value + } + + value := p.parseParenExpr(loc, level, parenExprOpts{}) + return value + + case js_lexer.TFalse: + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: false}} + + case js_lexer.TTrue: + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: true}} + + case js_lexer.TNull: + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: js_ast.ENullShared} + + case js_lexer.TThis: + if p.fnOrArrowDataParse.isThisDisallowed { + p.log.AddError(&p.tracker, p.lexer.Range(), "Cannot use \"this\" here:") + } + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: js_ast.EThisShared} + + case js_lexer.TPrivateIdentifier: + if !p.allowPrivateIdentifiers || !p.allowIn || level >= js_ast.LCompare { + p.lexer.Unexpected() + } + + name := p.lexer.Identifier + p.lexer.Next() + + // Check for "#foo in bar" + if p.lexer.Token != js_lexer.TIn { + p.lexer.Expected(js_lexer.TIn) + } + + // Make sure to lower all matching private names + if p.options.unsupportedJSFeatures.Has(compat.ClassPrivateBrandCheck) { + if p.lowerAllOfThesePrivateNames == nil { + p.lowerAllOfThesePrivateNames = make(map[string]bool) + } + p.lowerAllOfThesePrivateNames[name.String] = true + } + + return js_ast.Expr{Loc: loc, Data: &js_ast.EPrivateIdentifier{Ref: p.storeNameInRef(name)}} + + case js_lexer.TIdentifier: + name := p.lexer.Identifier + nameRange := p.lexer.Range() + raw := p.lexer.Raw() + p.lexer.Next() + + // Handle async and await expressions + switch name.String { + case "async": + if raw == "async" { + return p.parseAsyncPrefixExpr(nameRange, level, flags) + } + + case "await": + switch p.fnOrArrowDataParse.await { + case forbidAll: + p.log.AddError(&p.tracker, nameRange, "The keyword \"await\" cannot be used here:") + + case allowExpr: + if raw != "await" { + p.log.AddError(&p.tracker, nameRange, "The keyword \"await\" cannot be escaped") + } else { + if p.fnOrArrowDataParse.isTopLevel { + p.topLevelAwaitKeyword = nameRange + } + if p.fnOrArrowDataParse.arrowArgErrors != nil { + p.fnOrArrowDataParse.arrowArgErrors.invalidExprAwait = nameRange + } + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + return js_ast.Expr{Loc: loc, Data: &js_ast.EAwait{Value: value}} + } + + case allowIdent: + p.lexer.PrevTokenWasAwaitKeyword = true + p.lexer.AwaitKeywordLoc = loc + p.lexer.FnOrArrowStartLoc = p.fnOrArrowDataParse.needsAsyncLoc + } + + case "yield": + switch p.fnOrArrowDataParse.yield { + case forbidAll: + p.log.AddError(&p.tracker, nameRange, "The keyword \"yield\" cannot be used here:") + + case allowExpr: + if raw != "yield" { + p.log.AddError(&p.tracker, nameRange, "The keyword \"yield\" cannot be escaped") + } else { + if level > js_ast.LAssign { + p.log.AddError(&p.tracker, nameRange, "Cannot use a \"yield\" expression here without parentheses:") + } + if p.fnOrArrowDataParse.arrowArgErrors != nil { + p.fnOrArrowDataParse.arrowArgErrors.invalidExprYield = nameRange + } + return p.parseYieldExpr(loc) + } + + case allowIdent: + if !p.lexer.HasNewlineBefore { + // Try to gracefully recover if "yield" is used in the wrong place + switch p.lexer.Token { + case js_lexer.TNull, js_lexer.TIdentifier, js_lexer.TFalse, js_lexer.TTrue, + js_lexer.TNumericLiteral, js_lexer.TBigIntegerLiteral, js_lexer.TStringLiteral: + p.log.AddError(&p.tracker, nameRange, "Cannot use \"yield\" outside a generator function") + return p.parseYieldExpr(loc) + } + } + } + } + + // Handle the start of an arrow expression + if p.lexer.Token == js_lexer.TEqualsGreaterThan && level <= js_ast.LAssign { + ref := p.storeNameInRef(name) + arg := js_ast.Arg{Binding: js_ast.Binding{Loc: loc, Data: &js_ast.BIdentifier{Ref: ref}}} + + p.pushScopeForParsePass(js_ast.ScopeFunctionArgs, loc) + defer p.popScope() + + return js_ast.Expr{Loc: loc, Data: p.parseArrowBody([]js_ast.Arg{arg}, fnOrArrowDataParse{ + needsAsyncLoc: loc, + })} + } + + ref := p.storeNameInRef(name) + return js_ast.Expr{Loc: loc, Data: &js_ast.EIdentifier{Ref: ref}} + + case js_lexer.TStringLiteral, js_lexer.TNoSubstitutionTemplateLiteral: + return p.parseStringLiteral() + + case js_lexer.TTemplateHead: + var legacyOctalLoc logger.Loc + headLoc := p.lexer.Loc() + head := p.lexer.StringLiteral() + if p.lexer.LegacyOctalLoc.Start > loc.Start { + legacyOctalLoc = p.lexer.LegacyOctalLoc + } + parts, tailLegacyOctalLoc := p.parseTemplateParts(false /* includeRaw */) + if tailLegacyOctalLoc.Start > 0 { + legacyOctalLoc = tailLegacyOctalLoc + } + return js_ast.Expr{Loc: loc, Data: &js_ast.ETemplate{ + HeadLoc: headLoc, + HeadCooked: head, + Parts: parts, + LegacyOctalLoc: legacyOctalLoc, + }} + + case js_lexer.TNumericLiteral: + value := js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: p.lexer.Number}} + p.checkForLegacyOctalLiteral(value.Data) + p.lexer.Next() + return value + + case js_lexer.TBigIntegerLiteral: + value := p.lexer.Identifier + p.markSyntaxFeature(compat.Bigint, p.lexer.Range()) + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.EBigInt{Value: value.String}} + + case js_lexer.TSlash, js_lexer.TSlashEquals: + p.lexer.ScanRegExp() + value := p.lexer.Raw() + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.ERegExp{Value: value}} + + case js_lexer.TVoid: + p.lexer.Next() + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{Op: js_ast.UnOpVoid, Value: value}} + + case js_lexer.TTypeof: + p.lexer.Next() + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + _, valueIsIdentifier := value.Data.(*js_ast.EIdentifier) + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{ + Op: js_ast.UnOpTypeof, + Value: value, + WasOriginallyTypeofIdentifier: valueIsIdentifier, + }} + + case js_lexer.TDelete: + p.lexer.Next() + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + if index, ok := value.Data.(*js_ast.EIndex); ok { + if private, ok := index.Index.Data.(*js_ast.EPrivateIdentifier); ok { + name := p.loadNameFromRef(private.Ref) + r := logger.Range{Loc: index.Index.Loc, Len: int32(len(name))} + p.log.AddError(&p.tracker, r, fmt.Sprintf("Deleting the private name %q is forbidden", name)) + } + } + _, valueIsIdentifier := value.Data.(*js_ast.EIdentifier) + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{ + Op: js_ast.UnOpDelete, + Value: value, + WasOriginallyDeleteOfIdentifierOrPropertyAccess: valueIsIdentifier || js_ast.IsPropertyAccess(value), + }} + + case js_lexer.TPlus: + p.lexer.Next() + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{Op: js_ast.UnOpPos, Value: value}} + + case js_lexer.TMinus: + p.lexer.Next() + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{Op: js_ast.UnOpNeg, Value: value}} + + case js_lexer.TTilde: + p.lexer.Next() + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{Op: js_ast.UnOpCpl, Value: value}} + + case js_lexer.TExclamation: + p.lexer.Next() + value := p.parseExpr(js_ast.LPrefix) + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{Op: js_ast.UnOpNot, Value: value}} + + case js_lexer.TMinusMinus: + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{Op: js_ast.UnOpPreDec, Value: p.parseExpr(js_ast.LPrefix)}} + + case js_lexer.TPlusPlus: + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.EUnary{Op: js_ast.UnOpPreInc, Value: p.parseExpr(js_ast.LPrefix)}} + + case js_lexer.TFunction: + return p.parseFnExpr(loc, false /* isAsync */, logger.Range{}) + + case js_lexer.TClass: + return p.parseClassExpr(nil) + + case js_lexer.TAt: + // Parse decorators before class expressions + decorators := p.parseDecorators(p.currentScope, logger.Range{}, decoratorBeforeClassExpr) + return p.parseClassExpr(decorators) + + case js_lexer.TNew: + p.lexer.Next() + + // Special-case the weird "new.target" expression here + if p.lexer.Token == js_lexer.TDot { + p.lexer.Next() + if p.lexer.Token != js_lexer.TIdentifier || p.lexer.Raw() != "target" { + p.lexer.Unexpected() + } + r := logger.Range{Loc: loc, Len: p.lexer.Range().End() - loc.Start} + p.markSyntaxFeature(compat.NewTarget, r) + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.ENewTarget{Range: r}} + } + + target := p.parseExprWithFlags(js_ast.LMember, flags) + args := []js_ast.Expr{} + var closeParenLoc logger.Loc + var isMultiLine bool + + if p.lexer.Token == js_lexer.TOpenParen { + args, closeParenLoc, isMultiLine = p.parseCallArgs() + } + + return js_ast.Expr{Loc: loc, Data: &js_ast.ENew{ + Target: target, + Args: args, + CloseParenLoc: closeParenLoc, + IsMultiLine: isMultiLine, + }} + + case js_lexer.TOpenBracket: + p.lexer.Next() + isSingleLine := !p.lexer.HasNewlineBefore + items := []js_ast.Expr{} + selfErrors := deferredErrors{} + commaAfterSpread := logger.Loc{} + + // Allow "in" inside arrays + oldAllowIn := p.allowIn + p.allowIn = true + + for p.lexer.Token != js_lexer.TCloseBracket { + switch p.lexer.Token { + case js_lexer.TComma: + items = append(items, js_ast.Expr{Loc: p.lexer.Loc(), Data: js_ast.EMissingShared}) + + case js_lexer.TDotDotDot: + if errors != nil { + errors.arraySpreadFeature = p.lexer.Range() + } else { + p.markSyntaxFeature(compat.ArraySpread, p.lexer.Range()) + } + dotsLoc := p.saveExprCommentsHere() + p.lexer.Next() + item := p.parseExprOrBindings(js_ast.LComma, &selfErrors) + items = append(items, js_ast.Expr{Loc: dotsLoc, Data: &js_ast.ESpread{Value: item}}) + + // Commas are not allowed here when destructuring + if p.lexer.Token == js_lexer.TComma { + commaAfterSpread = p.lexer.Loc() + } + + default: + item := p.parseExprOrBindings(js_ast.LComma, &selfErrors) + items = append(items, item) + } + + if p.lexer.Token != js_lexer.TComma { + break + } + if p.lexer.HasNewlineBefore { + isSingleLine = false + } + p.lexer.Next() + if p.lexer.HasNewlineBefore { + isSingleLine = false + } + } + + if p.lexer.HasNewlineBefore { + isSingleLine = false + } + closeBracketLoc := p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseBracket) + p.allowIn = oldAllowIn + + if p.willNeedBindingPattern() { + // Is this a binding pattern? + } else if errors == nil { + // Is this an expression? + p.logExprErrors(&selfErrors) + } else { + // In this case, we can't distinguish between the two yet + selfErrors.mergeInto(errors) + } + + return js_ast.Expr{Loc: loc, Data: &js_ast.EArray{ + Items: items, + CommaAfterSpread: commaAfterSpread, + IsSingleLine: isSingleLine, + CloseBracketLoc: closeBracketLoc, + }} + + case js_lexer.TOpenBrace: + p.lexer.Next() + isSingleLine := !p.lexer.HasNewlineBefore + properties := []js_ast.Property{} + selfErrors := deferredErrors{} + commaAfterSpread := logger.Loc{} + + // Allow "in" inside object literals + oldAllowIn := p.allowIn + p.allowIn = true + + for p.lexer.Token != js_lexer.TCloseBrace { + if p.lexer.Token == js_lexer.TDotDotDot { + dotLoc := p.saveExprCommentsHere() + p.lexer.Next() + value := p.parseExprOrBindings(js_ast.LComma, &selfErrors) + properties = append(properties, js_ast.Property{ + Kind: js_ast.PropertySpread, + Loc: dotLoc, + ValueOrNil: value, + }) + + // Commas are not allowed here when destructuring + if p.lexer.Token == js_lexer.TComma { + commaAfterSpread = p.lexer.Loc() + } + } else { + // This property may turn out to be a type in TypeScript, which should be ignored + if property, ok := p.parseProperty(p.saveExprCommentsHere(), js_ast.PropertyField, propertyOpts{}, &selfErrors); ok { + properties = append(properties, property) + } + } + + if p.lexer.Token != js_lexer.TComma { + break + } + if p.lexer.HasNewlineBefore { + isSingleLine = false + } + p.lexer.Next() + if p.lexer.HasNewlineBefore { + isSingleLine = false + } + } + + if p.lexer.HasNewlineBefore { + isSingleLine = false + } + closeBraceLoc := p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseBrace) + p.allowIn = oldAllowIn + + if p.willNeedBindingPattern() { + // Is this a binding pattern? + } else if errors == nil { + // Is this an expression? + p.logExprErrors(&selfErrors) + } else { + // In this case, we can't distinguish between the two yet + selfErrors.mergeInto(errors) + } + + return js_ast.Expr{Loc: loc, Data: &js_ast.EObject{ + Properties: properties, + CommaAfterSpread: commaAfterSpread, + IsSingleLine: isSingleLine, + CloseBraceLoc: closeBraceLoc, + }} + + case js_lexer.TLessThan: + // This is a very complicated and highly ambiguous area of TypeScript + // syntax. Many similar-looking things are overloaded. + // + // TS: + // + // A type cast: + // (x) + // <[]>(x) + // (x) + // (x) + // + // An arrow function with type parameters: + // (x) => {} + // (x) => {} + // (x) => {} + // (x) => {} + // (x) => {} + // (x) => {} + // + // A syntax error: + // <>() => {} + // + // TSX: + // + // A JSX element: + // <>() => {} + // (x) => {} + // + // (x) => {} + // (x) => {} + // + // (x) => {} + // + // An arrow function with type parameters: + // (x) => {} + // (x) => {} + // (x) => {} + // (x) => {} + // (x) + // (x) => {} + // + // A syntax error: + // <[]>(x) + // (x) + // <>() => {} + // (x) => {} + + if p.options.ts.Parse && p.options.jsx.Parse && p.isTSArrowFnJSX() { + p.skipTypeScriptTypeParameters(allowConstModifier) + p.lexer.Expect(js_lexer.TOpenParen) + return p.parseParenExpr(loc, level, parenExprOpts{forceArrowFn: true}) + } + + // Print a friendly error message when parsing JSX as JavaScript + if !p.options.jsx.Parse && !p.options.ts.Parse { + var how string + switch logger.API { + case logger.CLIAPI: + how = " You can use \"--loader:.js=jsx\" to do that." + case logger.JSAPI: + how = " You can use \"loader: { '.js': 'jsx' }\" to do that." + case logger.GoAPI: + how = " You can use 'Loader: map[string]api.Loader{\".js\": api.LoaderJSX}' to do that." + } + p.log.AddErrorWithNotes(&p.tracker, p.lexer.Range(), "The JSX syntax extension is not currently enabled", []logger.MsgData{{ + Text: "The esbuild loader for this file is currently set to \"js\" but it must be set to \"jsx\" to be able to parse JSX syntax." + how}}) + p.options.jsx.Parse = true + } + + if p.options.jsx.Parse { + // Use NextInsideJSXElement() instead of Next() so we parse "<<" as "<" + p.lexer.NextInsideJSXElement() + element := p.parseJSXElement(loc) + + // The call to parseJSXElement() above doesn't consume the last + // TGreaterThan because the caller knows what Next() function to call. + // Use Next() instead of NextInsideJSXElement() here since the next + // token is an expression. + p.lexer.Next() + return element + } + + if p.options.ts.Parse { + // This is either an old-style type cast or a generic lambda function + + // TypeScript 4.5 introduced the ".mts" and ".cts" extensions that forbid + // the use of an expression starting with "<" that would be ambiguous + // when the file is in JSX mode. + if p.options.ts.NoAmbiguousLessThan && !p.isTSArrowFnJSX() { + p.log.AddError(&p.tracker, p.lexer.Range(), + "This syntax is not allowed in files with the \".mts\" or \".cts\" extension") + } + + // "(x)" + // "(x) => {}" + if result := p.trySkipTypeScriptTypeParametersThenOpenParenWithBacktracking(); result != didNotSkipAnything { + p.lexer.Expect(js_lexer.TOpenParen) + return p.parseParenExpr(loc, level, parenExprOpts{ + forceArrowFn: result == definitelyTypeParameters, + }) + } + + // "x" + p.lexer.Next() + p.skipTypeScriptType(js_ast.LLowest) + p.lexer.ExpectGreaterThan(false /* isInsideJSXElement */) + value := p.parsePrefix(level, errors, flags) + return value + } + + p.lexer.Unexpected() + return js_ast.Expr{} + + case js_lexer.TImport: + p.lexer.Next() + return p.parseImportExpr(loc, level) + + default: + p.lexer.Unexpected() + return js_ast.Expr{} + } +} + +func (p *parser) parseYieldExpr(loc logger.Loc) js_ast.Expr { + // Parse a yield-from expression, which yields from an iterator + isStar := p.lexer.Token == js_lexer.TAsterisk + if isStar && !p.lexer.HasNewlineBefore { + p.lexer.Next() + } + + var valueOrNil js_ast.Expr + + // The yield expression only has a value in certain cases + if isStar { + valueOrNil = p.parseExpr(js_ast.LYield) + } else { + switch p.lexer.Token { + case js_lexer.TCloseBrace, js_lexer.TCloseBracket, js_lexer.TCloseParen, + js_lexer.TColon, js_lexer.TComma, js_lexer.TSemicolon: + + default: + if !p.lexer.HasNewlineBefore { + valueOrNil = p.parseExpr(js_ast.LYield) + } + } + } + + return js_ast.Expr{Loc: loc, Data: &js_ast.EYield{ValueOrNil: valueOrNil, IsStar: isStar}} +} + +func (p *parser) willNeedBindingPattern() bool { + switch p.lexer.Token { + case js_lexer.TEquals: + // "[a] = b;" + return true + + case js_lexer.TIn: + // "for ([a] in b) {}" + return !p.allowIn + + case js_lexer.TIdentifier: + // "for ([a] of b) {}" + return !p.allowIn && p.lexer.IsContextualKeyword("of") + + default: + return false + } +} + +// Note: The caller has already parsed the "import" keyword +func (p *parser) parseImportExpr(loc logger.Loc, level js_ast.L) js_ast.Expr { + // Parse an "import.meta" expression + if p.lexer.Token == js_lexer.TDot { + p.lexer.Next() + if !p.lexer.IsContextualKeyword("meta") { + p.lexer.ExpectedString("\"meta\"") + } + p.esmImportMeta = logger.Range{Loc: loc, Len: p.lexer.Range().End() - loc.Start} + p.lexer.Next() + return js_ast.Expr{Loc: loc, Data: &js_ast.EImportMeta{RangeLen: p.esmImportMeta.Len}} + } + + if level > js_ast.LCall { + r := js_lexer.RangeOfIdentifier(p.source, loc) + p.log.AddError(&p.tracker, r, "Cannot use an \"import\" expression here without parentheses:") + } + + // Allow "in" inside call arguments + oldAllowIn := p.allowIn + p.allowIn = true + + p.lexer.Expect(js_lexer.TOpenParen) + + value := p.parseExpr(js_ast.LComma) + var optionsOrNil js_ast.Expr + + if p.lexer.Token == js_lexer.TComma { + // "import('./foo.json', )" + p.lexer.Next() + + if p.lexer.Token != js_lexer.TCloseParen { + // "import('./foo.json', { assert: { type: 'json' } })" + optionsOrNil = p.parseExpr(js_ast.LComma) + + if p.lexer.Token == js_lexer.TComma { + // "import('./foo.json', { assert: { type: 'json' } }, )" + p.lexer.Next() + } + } + } + + closeParenLoc := p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseParen) + + p.allowIn = oldAllowIn + return js_ast.Expr{Loc: loc, Data: &js_ast.EImportCall{ + Expr: value, + OptionsOrNil: optionsOrNil, + CloseParenLoc: closeParenLoc, + }} +} + +func (p *parser) parseExprOrBindings(level js_ast.L, errors *deferredErrors) js_ast.Expr { + return p.parseExprCommon(level, errors, 0) +} + +func (p *parser) parseExpr(level js_ast.L) js_ast.Expr { + return p.parseExprCommon(level, nil, 0) +} + +func (p *parser) parseExprWithFlags(level js_ast.L, flags exprFlag) js_ast.Expr { + return p.parseExprCommon(level, nil, flags) +} + +func (p *parser) parseExprCommon(level js_ast.L, errors *deferredErrors, flags exprFlag) js_ast.Expr { + lexerCommentFlags := p.lexer.HasCommentBefore + expr := p.parsePrefix(level, errors, flags) + + if (lexerCommentFlags&(js_lexer.PureCommentBefore|js_lexer.NoSideEffectsCommentBefore)) != 0 && !p.options.ignoreDCEAnnotations { + if (lexerCommentFlags & js_lexer.NoSideEffectsCommentBefore) != 0 { + switch e := expr.Data.(type) { + case *js_ast.EArrow: + e.HasNoSideEffectsComment = true + case *js_ast.EFunction: + e.Fn.HasNoSideEffectsComment = true + } + } + + // There is no formal spec for "__PURE__" comments but from reverse- + // engineering, it looks like they apply to the next CallExpression or + // NewExpression. So in "/* @__PURE__ */ a().b() + c()" the comment applies + // to the expression "a().b()". + if (lexerCommentFlags&js_lexer.PureCommentBefore) != 0 && level < js_ast.LCall { + expr = p.parseSuffix(expr, js_ast.LCall-1, errors, flags) + switch e := expr.Data.(type) { + case *js_ast.ECall: + e.CanBeUnwrappedIfUnused = true + case *js_ast.ENew: + e.CanBeUnwrappedIfUnused = true + } + } + } + + return p.parseSuffix(expr, level, errors, flags) +} + +func (p *parser) parseSuffix(left js_ast.Expr, level js_ast.L, errors *deferredErrors, flags exprFlag) js_ast.Expr { + optionalChain := js_ast.OptionalChainNone + + for { + if p.lexer.Loc() == p.afterArrowBodyLoc { + for { + switch p.lexer.Token { + case js_lexer.TComma: + if level >= js_ast.LComma { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpComma, Left: left, Right: p.parseExpr(js_ast.LComma)}} + + default: + return left + } + } + } + + // Stop now if this token is forbidden to follow a TypeScript "as" cast + if p.lexer.Loc() == p.forbidSuffixAfterAsLoc { + return left + } + + // Reset the optional chain flag by default. That way we won't accidentally + // treat "c.d" as OptionalChainContinue in "a?.b + c.d". + oldOptionalChain := optionalChain + optionalChain = js_ast.OptionalChainNone + + switch p.lexer.Token { + case js_lexer.TDot: + p.lexer.Next() + + if p.lexer.Token == js_lexer.TPrivateIdentifier && p.allowPrivateIdentifiers { + // "a.#b" + // "a?.b.#c" + if _, ok := left.Data.(*js_ast.ESuper); ok { + p.lexer.Expected(js_lexer.TIdentifier) + } + name := p.lexer.Identifier + nameLoc := p.lexer.Loc() + p.reportPrivateNameUsage(name.String) + p.lexer.Next() + ref := p.storeNameInRef(name) + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EIndex{ + Target: left, + Index: js_ast.Expr{Loc: nameLoc, Data: &js_ast.EPrivateIdentifier{Ref: ref}}, + OptionalChain: oldOptionalChain, + }} + } else { + // "a.b" + // "a?.b.c" + if !p.lexer.IsIdentifierOrKeyword() { + p.lexer.Expect(js_lexer.TIdentifier) + } + name := p.lexer.Identifier + nameLoc := p.lexer.Loc() + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: p.dotOrMangledPropParse(left, name, nameLoc, oldOptionalChain, wasOriginallyDot)} + } + + optionalChain = oldOptionalChain + + case js_lexer.TQuestionDot: + p.lexer.Next() + optionalStart := js_ast.OptionalChainStart + + // Remove unnecessary optional chains + if p.options.minifySyntax { + if isNullOrUndefined, _, ok := js_ast.ToNullOrUndefinedWithSideEffects(left.Data); ok && !isNullOrUndefined { + optionalStart = js_ast.OptionalChainNone + } + } + + switch p.lexer.Token { + case js_lexer.TOpenBracket: + // "a?.[b]" + p.lexer.Next() + + // Allow "in" inside the brackets + oldAllowIn := p.allowIn + p.allowIn = true + + index := p.parseExpr(js_ast.LLowest) + + p.allowIn = oldAllowIn + + closeBracketLoc := p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseBracket) + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EIndex{ + Target: left, + Index: index, + OptionalChain: optionalStart, + CloseBracketLoc: closeBracketLoc, + }} + + case js_lexer.TOpenParen: + // "a?.()" + if level >= js_ast.LCall { + return left + } + kind := js_ast.NormalCall + if js_ast.IsPropertyAccess(left) { + kind = js_ast.TargetWasOriginallyPropertyAccess + } + args, closeParenLoc, isMultiLine := p.parseCallArgs() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.ECall{ + Target: left, + Args: args, + CloseParenLoc: closeParenLoc, + OptionalChain: optionalStart, + IsMultiLine: isMultiLine, + Kind: kind, + }} + + case js_lexer.TLessThan, js_lexer.TLessThanLessThan: + // "a?.()" + // "a?.<() => T>()" + if !p.options.ts.Parse { + p.lexer.Expected(js_lexer.TIdentifier) + } + p.skipTypeScriptTypeArguments(skipTypeScriptTypeArgumentsOpts{}) + if p.lexer.Token != js_lexer.TOpenParen { + p.lexer.Expected(js_lexer.TOpenParen) + } + if level >= js_ast.LCall { + return left + } + kind := js_ast.NormalCall + if js_ast.IsPropertyAccess(left) { + kind = js_ast.TargetWasOriginallyPropertyAccess + } + args, closeParenLoc, isMultiLine := p.parseCallArgs() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.ECall{ + Target: left, + Args: args, + CloseParenLoc: closeParenLoc, + OptionalChain: optionalStart, + IsMultiLine: isMultiLine, + Kind: kind, + }} + + default: + if p.lexer.Token == js_lexer.TPrivateIdentifier && p.allowPrivateIdentifiers { + // "a?.#b" + name := p.lexer.Identifier + nameLoc := p.lexer.Loc() + p.reportPrivateNameUsage(name.String) + p.lexer.Next() + ref := p.storeNameInRef(name) + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EIndex{ + Target: left, + Index: js_ast.Expr{Loc: nameLoc, Data: &js_ast.EPrivateIdentifier{Ref: ref}}, + OptionalChain: optionalStart, + }} + } else { + // "a?.b" + if !p.lexer.IsIdentifierOrKeyword() { + p.lexer.Expect(js_lexer.TIdentifier) + } + name := p.lexer.Identifier + nameLoc := p.lexer.Loc() + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: p.dotOrMangledPropParse(left, name, nameLoc, optionalStart, wasOriginallyDot)} + } + } + + // Only continue if we have started + if optionalStart == js_ast.OptionalChainStart { + optionalChain = js_ast.OptionalChainContinue + } + + case js_lexer.TNoSubstitutionTemplateLiteral: + if oldOptionalChain != js_ast.OptionalChainNone { + p.log.AddError(&p.tracker, p.lexer.Range(), "Template literals cannot have an optional chain as a tag") + } + headLoc := p.lexer.Loc() + headCooked, headRaw := p.lexer.CookedAndRawTemplateContents() + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.ETemplate{ + TagOrNil: left, + HeadLoc: headLoc, + HeadCooked: headCooked, + HeadRaw: headRaw, + TagWasOriginallyPropertyAccess: js_ast.IsPropertyAccess(left), + }} + + case js_lexer.TTemplateHead: + if oldOptionalChain != js_ast.OptionalChainNone { + p.log.AddError(&p.tracker, p.lexer.Range(), "Template literals cannot have an optional chain as a tag") + } + headLoc := p.lexer.Loc() + headCooked, headRaw := p.lexer.CookedAndRawTemplateContents() + parts, _ := p.parseTemplateParts(true /* includeRaw */) + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.ETemplate{ + TagOrNil: left, + HeadLoc: headLoc, + HeadCooked: headCooked, + HeadRaw: headRaw, + Parts: parts, + TagWasOriginallyPropertyAccess: js_ast.IsPropertyAccess(left), + }} + + case js_lexer.TOpenBracket: + // When parsing a decorator, ignore EIndex expressions since they may be + // part of a computed property: + // + // class Foo { + // @foo ['computed']() {} + // } + // + // This matches the behavior of the TypeScript compiler. + if (flags & exprFlagDecorator) != 0 { + return left + } + + p.lexer.Next() + + // Allow "in" inside the brackets + oldAllowIn := p.allowIn + p.allowIn = true + + index := p.parseExpr(js_ast.LLowest) + + p.allowIn = oldAllowIn + + closeBracketLoc := p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseBracket) + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EIndex{ + Target: left, + Index: index, + OptionalChain: oldOptionalChain, + CloseBracketLoc: closeBracketLoc, + }} + optionalChain = oldOptionalChain + + case js_lexer.TOpenParen: + if level >= js_ast.LCall { + return left + } + kind := js_ast.NormalCall + if js_ast.IsPropertyAccess(left) { + kind = js_ast.TargetWasOriginallyPropertyAccess + } + args, closeParenLoc, isMultiLine := p.parseCallArgs() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.ECall{ + Target: left, + Args: args, + CloseParenLoc: closeParenLoc, + OptionalChain: oldOptionalChain, + IsMultiLine: isMultiLine, + Kind: kind, + }} + optionalChain = oldOptionalChain + + case js_lexer.TQuestion: + if level >= js_ast.LConditional { + return left + } + p.lexer.Next() + + // Stop now if we're parsing one of these: + // "(a?) => {}" + // "(a?: b) => {}" + // "(a?, b?) => {}" + if p.options.ts.Parse && left.Loc == p.latestArrowArgLoc && (p.lexer.Token == js_lexer.TColon || + p.lexer.Token == js_lexer.TCloseParen || p.lexer.Token == js_lexer.TComma) { + if errors == nil { + p.lexer.Unexpected() + } + errors.invalidExprAfterQuestion = p.lexer.Range() + return left + } + + // Allow "in" in between "?" and ":" + oldAllowIn := p.allowIn + p.allowIn = true + + yes := p.parseExpr(js_ast.LComma) + + p.allowIn = oldAllowIn + + p.lexer.Expect(js_lexer.TColon) + no := p.parseExpr(js_ast.LComma) + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EIf{Test: left, Yes: yes, No: no}} + + case js_lexer.TExclamation: + // Skip over TypeScript non-null assertions + if p.lexer.HasNewlineBefore { + return left + } + if !p.options.ts.Parse { + p.lexer.Unexpected() + } + p.lexer.Next() + optionalChain = oldOptionalChain + + case js_lexer.TMinusMinus: + if p.lexer.HasNewlineBefore || level >= js_ast.LPostfix { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EUnary{Op: js_ast.UnOpPostDec, Value: left}} + + case js_lexer.TPlusPlus: + if p.lexer.HasNewlineBefore || level >= js_ast.LPostfix { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EUnary{Op: js_ast.UnOpPostInc, Value: left}} + + case js_lexer.TComma: + if level >= js_ast.LComma { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpComma, Left: left, Right: p.parseExpr(js_ast.LComma)}} + + case js_lexer.TPlus: + if level >= js_ast.LAdd { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpAdd, Left: left, Right: p.parseExpr(js_ast.LAdd)}} + + case js_lexer.TPlusEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpAddAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TMinus: + if level >= js_ast.LAdd { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpSub, Left: left, Right: p.parseExpr(js_ast.LAdd)}} + + case js_lexer.TMinusEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpSubAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TAsterisk: + if level >= js_ast.LMultiply { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpMul, Left: left, Right: p.parseExpr(js_ast.LMultiply)}} + + case js_lexer.TAsteriskAsterisk: + if level >= js_ast.LExponentiation { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpPow, Left: left, Right: p.parseExpr(js_ast.LExponentiation - 1)}} + + case js_lexer.TAsteriskAsteriskEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpPowAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TAsteriskEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpMulAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TPercent: + if level >= js_ast.LMultiply { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpRem, Left: left, Right: p.parseExpr(js_ast.LMultiply)}} + + case js_lexer.TPercentEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpRemAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TSlash: + if level >= js_ast.LMultiply { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpDiv, Left: left, Right: p.parseExpr(js_ast.LMultiply)}} + + case js_lexer.TSlashEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpDivAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TEqualsEquals: + if level >= js_ast.LEquals { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLooseEq, Left: left, Right: p.parseExpr(js_ast.LEquals)}} + + case js_lexer.TExclamationEquals: + if level >= js_ast.LEquals { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLooseNe, Left: left, Right: p.parseExpr(js_ast.LEquals)}} + + case js_lexer.TEqualsEqualsEquals: + if level >= js_ast.LEquals { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpStrictEq, Left: left, Right: p.parseExpr(js_ast.LEquals)}} + + case js_lexer.TExclamationEqualsEquals: + if level >= js_ast.LEquals { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpStrictNe, Left: left, Right: p.parseExpr(js_ast.LEquals)}} + + case js_lexer.TLessThan: + // TypeScript allows type arguments to be specified with angle brackets + // inside an expression. Unlike in other languages, this unfortunately + // appears to require backtracking to parse. + if p.options.ts.Parse && p.trySkipTypeArgumentsInExpressionWithBacktracking() { + optionalChain = oldOptionalChain + continue + } + + if level >= js_ast.LCompare { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLt, Left: left, Right: p.parseExpr(js_ast.LCompare)}} + + case js_lexer.TLessThanEquals: + if level >= js_ast.LCompare { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLe, Left: left, Right: p.parseExpr(js_ast.LCompare)}} + + case js_lexer.TGreaterThan: + if level >= js_ast.LCompare { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpGt, Left: left, Right: p.parseExpr(js_ast.LCompare)}} + + case js_lexer.TGreaterThanEquals: + if level >= js_ast.LCompare { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpGe, Left: left, Right: p.parseExpr(js_ast.LCompare)}} + + case js_lexer.TLessThanLessThan: + // TypeScript allows type arguments to be specified with angle brackets + // inside an expression. Unlike in other languages, this unfortunately + // appears to require backtracking to parse. + if p.options.ts.Parse && p.trySkipTypeArgumentsInExpressionWithBacktracking() { + optionalChain = oldOptionalChain + continue + } + + if level >= js_ast.LShift { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpShl, Left: left, Right: p.parseExpr(js_ast.LShift)}} + + case js_lexer.TLessThanLessThanEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpShlAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TGreaterThanGreaterThan: + if level >= js_ast.LShift { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpShr, Left: left, Right: p.parseExpr(js_ast.LShift)}} + + case js_lexer.TGreaterThanGreaterThanEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpShrAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TGreaterThanGreaterThanGreaterThan: + if level >= js_ast.LShift { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpUShr, Left: left, Right: p.parseExpr(js_ast.LShift)}} + + case js_lexer.TGreaterThanGreaterThanGreaterThanEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpUShrAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TQuestionQuestion: + if level >= js_ast.LNullishCoalescing { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpNullishCoalescing, Left: left, Right: p.parseExpr(js_ast.LNullishCoalescing)}} + + case js_lexer.TQuestionQuestionEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpNullishCoalescingAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TBarBar: + if level >= js_ast.LLogicalOr { + return left + } + + // Prevent "||" inside "??" from the right + if level == js_ast.LNullishCoalescing { + p.logNullishCoalescingErrorPrecedenceError("||") + } + + p.lexer.Next() + right := p.parseExpr(js_ast.LLogicalOr) + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLogicalOr, Left: left, Right: right}} + + // Prevent "||" inside "??" from the left + if level < js_ast.LNullishCoalescing { + left = p.parseSuffix(left, js_ast.LNullishCoalescing+1, nil, flags) + if p.lexer.Token == js_lexer.TQuestionQuestion { + p.logNullishCoalescingErrorPrecedenceError("||") + } + } + + case js_lexer.TBarBarEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLogicalOrAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TAmpersandAmpersand: + if level >= js_ast.LLogicalAnd { + return left + } + + // Prevent "&&" inside "??" from the right + if level == js_ast.LNullishCoalescing { + p.logNullishCoalescingErrorPrecedenceError("&&") + } + + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLogicalAnd, Left: left, Right: p.parseExpr(js_ast.LLogicalAnd)}} + + // Prevent "&&" inside "??" from the left + if level < js_ast.LNullishCoalescing { + left = p.parseSuffix(left, js_ast.LNullishCoalescing+1, nil, flags) + if p.lexer.Token == js_lexer.TQuestionQuestion { + p.logNullishCoalescingErrorPrecedenceError("&&") + } + } + + case js_lexer.TAmpersandAmpersandEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpLogicalAndAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TBar: + if level >= js_ast.LBitwiseOr { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpBitwiseOr, Left: left, Right: p.parseExpr(js_ast.LBitwiseOr)}} + + case js_lexer.TBarEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpBitwiseOrAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TAmpersand: + if level >= js_ast.LBitwiseAnd { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpBitwiseAnd, Left: left, Right: p.parseExpr(js_ast.LBitwiseAnd)}} + + case js_lexer.TAmpersandEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpBitwiseAndAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TCaret: + if level >= js_ast.LBitwiseXor { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpBitwiseXor, Left: left, Right: p.parseExpr(js_ast.LBitwiseXor)}} + + case js_lexer.TCaretEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpBitwiseXorAssign, Left: left, Right: p.parseExpr(js_ast.LAssign - 1)}} + + case js_lexer.TEquals: + if level >= js_ast.LAssign { + return left + } + p.lexer.Next() + left = js_ast.Assign(left, p.parseExpr(js_ast.LAssign-1)) + + case js_lexer.TIn: + if level >= js_ast.LCompare || !p.allowIn { + return left + } + + // Warn about "!a in b" instead of "!(a in b)" + kind := logger.Warning + if p.suppressWarningsAboutWeirdCode { + kind = logger.Debug + } + if e, ok := left.Data.(*js_ast.EUnary); ok && e.Op == js_ast.UnOpNot { + r := logger.Range{Loc: left.Loc, Len: p.source.LocBeforeWhitespace(p.lexer.Loc()).Start - left.Loc.Start} + data := p.tracker.MsgData(r, "Suspicious use of the \"!\" operator inside the \"in\" operator") + data.Location.Suggestion = fmt.Sprintf("(%s)", p.source.TextForRange(r)) + p.log.AddMsgID(logger.MsgID_JS_SuspiciousBooleanNot, logger.Msg{ + Kind: kind, + Data: data, + Notes: []logger.MsgData{{Text: "The code \"!x in y\" is parsed as \"(!x) in y\". " + + "You need to insert parentheses to get \"!(x in y)\" instead."}}, + }) + } + + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpIn, Left: left, Right: p.parseExpr(js_ast.LCompare)}} + + case js_lexer.TInstanceof: + if level >= js_ast.LCompare { + return left + } + + // Warn about "!a instanceof b" instead of "!(a instanceof b)". Here's an + // example of code with this problem: https://github.com/mrdoob/three.js/pull/11182. + kind := logger.Warning + if p.suppressWarningsAboutWeirdCode { + kind = logger.Debug + } + if e, ok := left.Data.(*js_ast.EUnary); ok && e.Op == js_ast.UnOpNot { + r := logger.Range{Loc: left.Loc, Len: p.source.LocBeforeWhitespace(p.lexer.Loc()).Start - left.Loc.Start} + data := p.tracker.MsgData(r, "Suspicious use of the \"!\" operator inside the \"instanceof\" operator") + data.Location.Suggestion = fmt.Sprintf("(%s)", p.source.TextForRange(r)) + p.log.AddMsgID(logger.MsgID_JS_SuspiciousBooleanNot, logger.Msg{ + Kind: kind, + Data: data, + Notes: []logger.MsgData{{Text: "The code \"!x instanceof y\" is parsed as \"(!x) instanceof y\". " + + "You need to insert parentheses to get \"!(x instanceof y)\" instead."}}, + }) + } + + p.lexer.Next() + left = js_ast.Expr{Loc: left.Loc, Data: &js_ast.EBinary{Op: js_ast.BinOpInstanceof, Left: left, Right: p.parseExpr(js_ast.LCompare)}} + + default: + // Handle the TypeScript "as"/"satisfies" operator + if p.options.ts.Parse && level < js_ast.LCompare && !p.lexer.HasNewlineBefore && (p.lexer.IsContextualKeyword("as") || p.lexer.IsContextualKeyword("satisfies")) { + p.lexer.Next() + p.skipTypeScriptType(js_ast.LLowest) + + // These tokens are not allowed to follow a cast expression. This isn't + // an outright error because it may be on a new line, in which case it's + // the start of a new expression when it's after a cast: + // + // x = y as z + // (something); + // + switch p.lexer.Token { + case js_lexer.TPlusPlus, js_lexer.TMinusMinus, js_lexer.TNoSubstitutionTemplateLiteral, + js_lexer.TTemplateHead, js_lexer.TOpenParen, js_lexer.TOpenBracket, js_lexer.TQuestionDot: + p.forbidSuffixAfterAsLoc = p.lexer.Loc() + return left + } + if p.lexer.Token.IsAssign() { + p.forbidSuffixAfterAsLoc = p.lexer.Loc() + return left + } + continue + } + + return left + } + } +} + +func (p *parser) parseExprOrLetOrUsingStmt(opts parseStmtOpts) (js_ast.Expr, js_ast.Stmt, []js_ast.Decl) { + couldBeLet := false + couldBeUsing := false + couldBeAwaitUsing := false + tokenRange := p.lexer.Range() + + if p.lexer.Token == js_lexer.TIdentifier { + raw := p.lexer.Raw() + couldBeLet = raw == "let" + couldBeUsing = raw == "using" + couldBeAwaitUsing = raw == "await" && p.fnOrArrowDataParse.await == allowExpr + } + + if !couldBeLet && !couldBeUsing && !couldBeAwaitUsing { + var flags exprFlag + if opts.isForLoopInit { + flags |= exprFlagForLoopInit + } + if opts.isForAwaitLoopInit { + flags |= exprFlagForAwaitLoopInit + } + return p.parseExprCommon(js_ast.LLowest, nil, flags), js_ast.Stmt{}, nil + } + + name := p.lexer.Identifier + p.lexer.Next() + + if couldBeLet { + isLet := opts.isExport + switch p.lexer.Token { + case js_lexer.TIdentifier, js_lexer.TOpenBracket, js_lexer.TOpenBrace: + if opts.lexicalDecl == lexicalDeclAllowAll || !p.lexer.HasNewlineBefore || p.lexer.Token == js_lexer.TOpenBracket { + isLet = true + } + } + if isLet { + // Handle a "let" declaration + if opts.lexicalDecl != lexicalDeclAllowAll { + p.forbidLexicalDecl(tokenRange.Loc) + } + p.markSyntaxFeature(compat.ConstAndLet, tokenRange) + decls := p.parseAndDeclareDecls(ast.SymbolOther, opts) + return js_ast.Expr{}, js_ast.Stmt{Loc: tokenRange.Loc, Data: &js_ast.SLocal{ + Kind: js_ast.LocalLet, + Decls: decls, + IsExport: opts.isExport, + }}, decls + } + } else if couldBeUsing && p.lexer.Token == js_lexer.TIdentifier && !p.lexer.HasNewlineBefore && (!opts.isForLoopInit || p.lexer.Raw() != "of") { + // Handle a "using" declaration + if opts.lexicalDecl != lexicalDeclAllowAll { + p.forbidLexicalDecl(tokenRange.Loc) + } + opts.isUsingStmt = true + decls := p.parseAndDeclareDecls(ast.SymbolConst, opts) + if !opts.isForLoopInit { + p.requireInitializers(js_ast.LocalUsing, decls) + } + return js_ast.Expr{}, js_ast.Stmt{Loc: tokenRange.Loc, Data: &js_ast.SLocal{ + Kind: js_ast.LocalUsing, + Decls: decls, + IsExport: opts.isExport, + }}, decls + } else if couldBeAwaitUsing { + // Handle an "await using" declaration + if p.fnOrArrowDataParse.isTopLevel { + p.topLevelAwaitKeyword = tokenRange + } + var value js_ast.Expr + if p.lexer.Token == js_lexer.TIdentifier && p.lexer.Raw() == "using" { + usingLoc := p.saveExprCommentsHere() + usingRange := p.lexer.Range() + p.lexer.Next() + if p.lexer.Token == js_lexer.TIdentifier && !p.lexer.HasNewlineBefore { + // It's an "await using" declaration if we get here + if opts.lexicalDecl != lexicalDeclAllowAll { + p.forbidLexicalDecl(usingRange.Loc) + } + opts.isUsingStmt = true + decls := p.parseAndDeclareDecls(ast.SymbolConst, opts) + if !opts.isForLoopInit { + p.requireInitializers(js_ast.LocalAwaitUsing, decls) + } + return js_ast.Expr{}, js_ast.Stmt{Loc: tokenRange.Loc, Data: &js_ast.SLocal{ + Kind: js_ast.LocalAwaitUsing, + Decls: decls, + IsExport: opts.isExport, + }}, decls + } + value = js_ast.Expr{Loc: usingLoc, Data: &js_ast.EIdentifier{Ref: p.storeNameInRef(js_lexer.MaybeSubstring{String: "using"})}} + } else { + value = p.parseExpr(js_ast.LPrefix) + } + if p.lexer.Token == js_lexer.TAsteriskAsterisk { + p.lexer.Unexpected() + } + value = p.parseSuffix(value, js_ast.LPrefix, nil, 0) + expr := js_ast.Expr{Loc: tokenRange.Loc, Data: &js_ast.EAwait{Value: value}} + return p.parseSuffix(expr, js_ast.LLowest, nil, 0), js_ast.Stmt{}, nil + } + + // Parse the remainder of this expression that starts with an identifier + expr := js_ast.Expr{Loc: tokenRange.Loc, Data: &js_ast.EIdentifier{Ref: p.storeNameInRef(name)}} + return p.parseSuffix(expr, js_ast.LLowest, nil, 0), js_ast.Stmt{}, nil +} + +func (p *parser) parseCallArgs() (args []js_ast.Expr, closeParenLoc logger.Loc, isMultiLine bool) { + // Allow "in" inside call arguments + oldAllowIn := p.allowIn + p.allowIn = true + + p.lexer.Expect(js_lexer.TOpenParen) + + for p.lexer.Token != js_lexer.TCloseParen { + if p.lexer.HasNewlineBefore { + isMultiLine = true + } + loc := p.lexer.Loc() + isSpread := p.lexer.Token == js_lexer.TDotDotDot + if isSpread { + p.markSyntaxFeature(compat.RestArgument, p.lexer.Range()) + p.lexer.Next() + } + arg := p.parseExpr(js_ast.LComma) + if isSpread { + arg = js_ast.Expr{Loc: loc, Data: &js_ast.ESpread{Value: arg}} + } + args = append(args, arg) + if p.lexer.Token != js_lexer.TComma { + break + } + if p.lexer.HasNewlineBefore { + isMultiLine = true + } + p.lexer.Next() + } + + if p.lexer.HasNewlineBefore { + isMultiLine = true + } + closeParenLoc = p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TCloseParen) + p.allowIn = oldAllowIn + return +} + +func (p *parser) parseJSXNamespacedName() (logger.Range, js_lexer.MaybeSubstring) { + nameRange := p.lexer.Range() + name := p.lexer.Identifier + p.lexer.ExpectInsideJSXElement(js_lexer.TIdentifier) + + // Parse JSX namespaces. These are not supported by React or TypeScript + // but someone using JSX syntax in more obscure ways may find a use for + // them. A namespaced name is just always turned into a string so you + // can't use this feature to reference JavaScript identifiers. + if p.lexer.Token == js_lexer.TColon { + // Parse the colon + nameRange.Len = p.lexer.Range().End() - nameRange.Loc.Start + ns := name.String + ":" + p.lexer.NextInsideJSXElement() + + // Parse the second identifier + if p.lexer.Token == js_lexer.TIdentifier { + nameRange.Len = p.lexer.Range().End() - nameRange.Loc.Start + ns += p.lexer.Identifier.String + p.lexer.NextInsideJSXElement() + } else { + p.log.AddError(&p.tracker, logger.Range{Loc: logger.Loc{Start: nameRange.End()}}, + fmt.Sprintf("Expected identifier after %q in namespaced JSX name", ns)) + panic(js_lexer.LexerPanic{}) + } + return nameRange, js_lexer.MaybeSubstring{String: ns} + } + + return nameRange, name +} + +func tagOrFragmentHelpText(tag string) string { + if tag == "" { + return "fragment tag" + } + return fmt.Sprintf("%q tag", tag) +} + +func (p *parser) parseJSXTag() (logger.Range, string, js_ast.Expr) { + loc := p.lexer.Loc() + + // A missing tag is a fragment + if p.lexer.Token == js_lexer.TGreaterThan { + return logger.Range{Loc: loc, Len: 0}, "", js_ast.Expr{} + } + + // The tag is an identifier + tagRange, tagName := p.parseJSXNamespacedName() + + // Certain identifiers are strings + if strings.ContainsAny(tagName.String, "-:") || (p.lexer.Token != js_lexer.TDot && tagName.String[0] >= 'a' && tagName.String[0] <= 'z') { + return tagRange, tagName.String, js_ast.Expr{Loc: loc, Data: &js_ast.EString{Value: helpers.StringToUTF16(tagName.String)}} + } + + // Otherwise, this is an identifier + tag := js_ast.Expr{Loc: loc, Data: &js_ast.EIdentifier{Ref: p.storeNameInRef(tagName)}} + + // Parse a member expression chain + chain := tagName.String + for p.lexer.Token == js_lexer.TDot { + p.lexer.NextInsideJSXElement() + memberRange := p.lexer.Range() + member := p.lexer.Identifier + p.lexer.ExpectInsideJSXElement(js_lexer.TIdentifier) + + // Dashes are not allowed in member expression chains + index := strings.IndexByte(member.String, '-') + if index >= 0 { + p.log.AddError(&p.tracker, logger.Range{Loc: logger.Loc{Start: memberRange.Loc.Start + int32(index)}}, + "Unexpected \"-\"") + panic(js_lexer.LexerPanic{}) + } + + chain += "." + member.String + tag = js_ast.Expr{Loc: loc, Data: p.dotOrMangledPropParse(tag, member, memberRange.Loc, js_ast.OptionalChainNone, wasOriginallyDot)} + tagRange.Len = memberRange.Loc.Start + memberRange.Len - tagRange.Loc.Start + } + + return tagRange, chain, tag +} + +func (p *parser) parseJSXElement(loc logger.Loc) js_ast.Expr { + // Keep track of the location of the first JSX element for error messages + if p.firstJSXElementLoc.Start == -1 { + p.firstJSXElementLoc = loc + } + + // Parse the tag + startRange, startText, startTagOrNil := p.parseJSXTag() + + // The tag may have TypeScript type arguments: "/>" + if p.options.ts.Parse { + // Pass a flag to the type argument skipper because we need to call + // js_lexer.NextInsideJSXElement() after we hit the closing ">". The next + // token after the ">" might be an attribute name with a dash in it + // like this: " data-disabled/>" + p.skipTypeScriptTypeArguments(skipTypeScriptTypeArgumentsOpts{isInsideJSXElement: true}) + } + + // Parse attributes + var previousStringWithBackslashLoc logger.Loc + properties := []js_ast.Property{} + isSingleLine := true + if startTagOrNil.Data != nil { + parseAttributes: + for { + if p.lexer.HasNewlineBefore { + isSingleLine = false + } + + switch p.lexer.Token { + case js_lexer.TIdentifier: + // Parse the key + keyRange, keyName := p.parseJSXNamespacedName() + var key js_ast.Expr + if p.isMangledProp(keyName.String) && !strings.ContainsRune(keyName.String, ':') { + key = js_ast.Expr{Loc: keyRange.Loc, Data: &js_ast.ENameOfSymbol{Ref: p.storeNameInRef(keyName)}} + } else { + key = js_ast.Expr{Loc: keyRange.Loc, Data: &js_ast.EString{Value: helpers.StringToUTF16(keyName.String)}} + } + + // Parse the value + var value js_ast.Expr + var flags js_ast.PropertyFlags + if p.lexer.Token != js_lexer.TEquals { + // Implicitly true value + flags |= js_ast.PropertyWasShorthand + value = js_ast.Expr{Loc: logger.Loc{Start: keyRange.Loc.Start + keyRange.Len}, Data: &js_ast.EBoolean{Value: true}} + } else { + // Use NextInsideJSXElement() not Next() so we can parse a JSX-style string literal + p.lexer.NextInsideJSXElement() + if p.lexer.Token == js_lexer.TStringLiteral { + stringLoc := p.lexer.Loc() + if p.lexer.PreviousBackslashQuoteInJSX.Loc.Start > stringLoc.Start { + previousStringWithBackslashLoc = stringLoc + } + if p.options.jsx.Preserve { + value = js_ast.Expr{Loc: stringLoc, Data: &js_ast.EJSXText{Raw: p.lexer.Raw()}} + } else { + value = js_ast.Expr{Loc: stringLoc, Data: &js_ast.EString{Value: p.lexer.StringLiteral()}} + } + p.lexer.NextInsideJSXElement() + } else if p.lexer.Token == js_lexer.TLessThan { + // This may be removed in the future: https://github.com/facebook/jsx/issues/53 + loc := p.lexer.Loc() + p.lexer.NextInsideJSXElement() + flags |= js_ast.PropertyWasShorthand + value = p.parseJSXElement(loc) + + // The call to parseJSXElement() above doesn't consume the last + // TGreaterThan because the caller knows what Next() function to call. + // Use NextJSXElementChild() here since the next token is inside a JSX + // element. + p.lexer.NextInsideJSXElement() + } else { + // Use Expect() not ExpectInsideJSXElement() so we can parse expression tokens + p.lexer.Expect(js_lexer.TOpenBrace) + value = p.parseExpr(js_ast.LLowest) + p.lexer.ExpectInsideJSXElement(js_lexer.TCloseBrace) + } + } + + // Add a property + properties = append(properties, js_ast.Property{ + Loc: keyRange.Loc, + Key: key, + ValueOrNil: value, + Flags: flags, + }) + + case js_lexer.TOpenBrace: + // Use Next() not ExpectInsideJSXElement() so we can parse "..." + p.lexer.Next() + dotLoc := p.saveExprCommentsHere() + p.lexer.Expect(js_lexer.TDotDotDot) + value := p.parseExpr(js_ast.LComma) + properties = append(properties, js_ast.Property{ + Kind: js_ast.PropertySpread, + Loc: dotLoc, + ValueOrNil: value, + }) + + // Use NextInsideJSXElement() not Next() so we can parse ">>" as ">" + p.lexer.NextInsideJSXElement() + + default: + break parseAttributes + } + } + + // Check for and warn about duplicate attributes + if len(properties) > 1 && !p.suppressWarningsAboutWeirdCode { + keys := make(map[string]logger.Loc) + for _, property := range properties { + if property.Kind != js_ast.PropertySpread { + if str, ok := property.Key.Data.(*js_ast.EString); ok { + key := helpers.UTF16ToString(str.Value) + if prevLoc, ok := keys[key]; ok { + r := js_lexer.RangeOfIdentifier(p.source, property.Key.Loc) + p.log.AddIDWithNotes(logger.MsgID_JS_DuplicateObjectKey, logger.Warning, &p.tracker, r, + fmt.Sprintf("Duplicate %q attribute in JSX element", key), + []logger.MsgData{p.tracker.MsgData(js_lexer.RangeOfIdentifier(p.source, prevLoc), + fmt.Sprintf("The original %q attribute is here:", key))}) + } + keys[key] = property.Key.Loc + } + } + } + } + } + + // People sometimes try to use the output of "JSON.stringify()" as a JSX + // attribute when automatically-generating JSX code. Doing so is incorrect + // because JSX strings work like XML instead of like JS (since JSX is XML-in- + // JS). Specifically, using a backslash before a quote does not cause it to + // be escaped: + // + // JSX ends the "content" attribute here and sets "content" to 'some so-called \\' + // v + //