Skip to content

Commit e8ac551

Browse files
update
1 parent 769030c commit e8ac551

File tree

2 files changed

+45
-20
lines changed

2 files changed

+45
-20
lines changed

crates/biome_markdown_parser/src/lexer/mod.rs

+7-12
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ mod tests;
33

44
use biome_markdown_syntax::MarkdownSyntaxKind;
55
use biome_markdown_syntax::MarkdownSyntaxKind::*;
6+
use biome_markdown_syntax::T;
67
use biome_parser::diagnostic::ParseDiagnostic;
78
use biome_parser::lexer::{
89
LexContext, Lexer, LexerCheckpoint, LexerWithCheckpoint, ReLexer, TokenFlags,
@@ -187,21 +188,15 @@ impl<'src> MarkdownLexer<'src> {
187188
fn consume_header(&mut self) -> MarkdownSyntaxKind {
188189
self.assert_at_char_boundary();
189190

190-
let mut level = 0;
191-
while matches!(self.current_byte(), Some(b'#')) {
191+
// Just consume a single hash character and return its token
192+
if matches!(self.current_byte(), Some(b'#')) {
192193
self.advance(1);
193-
level += 1;
194+
return T![#];
194195
}
195196

196-
match level {
197-
1 => MD_HEADER1,
198-
2 => MD_HEADER2,
199-
3 => MD_HEADER3,
200-
4 => MD_HEADER4,
201-
5 => MD_HEADER5,
202-
6 => MD_HEADER6,
203-
_ => ERROR_TOKEN,
204-
}
197+
// This shouldn't be reached if this function is called correctly
198+
// but handle the error case anyway
199+
self.consume_textual()
205200
}
206201

207202
fn text_position(&self) -> TextSize {

crates/biome_markdown_parser/src/lexer/tests.rs

+38-8
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ macro_rules! assert_lex {
2525
tokens.push((lexer.current(), lexer.current_range()));
2626
}
2727

28+
// TODO: remove this debug print
29+
println!("tokens: {:#?}", tokens);
2830

2931
$(
3032
assert_eq!(
@@ -145,7 +147,7 @@ fn whitespace() {
145147
fn heading_level_1() {
146148
assert_lex! {
147149
"# Heading 1",
148-
MD_HEADER1:1,
150+
HASH:1,
149151
WHITESPACE:1,
150152
MD_TEXTUAL_LITERAL:9,
151153
}
@@ -155,7 +157,7 @@ fn heading_level_1() {
155157
fn heading_level_1_with_newline() {
156158
assert_lex! {
157159
"# Heading 1\n",
158-
MD_HEADER1:1,
160+
HASH:1,
159161
WHITESPACE:1,
160162
MD_TEXTUAL_LITERAL:9,
161163
NEWLINE:1,
@@ -166,7 +168,8 @@ fn heading_level_1_with_newline() {
166168
fn heading_level_2() {
167169
assert_lex! {
168170
"## Heading 2",
169-
MD_HEADER2:2,
171+
HASH:1,
172+
HASH:1,
170173
WHITESPACE:1,
171174
MD_TEXTUAL_LITERAL:9,
172175
}
@@ -176,7 +179,9 @@ fn heading_level_2() {
176179
fn heading_level_3() {
177180
assert_lex! {
178181
"### Heading 3",
179-
MD_HEADER3:3,
182+
HASH:1,
183+
HASH:1,
184+
HASH:1,
180185
WHITESPACE:1,
181186
MD_TEXTUAL_LITERAL:9,
182187
}
@@ -186,7 +191,10 @@ fn heading_level_3() {
186191
fn heading_level_4() {
187192
assert_lex! {
188193
"#### Heading 4",
189-
MD_HEADER4:4,
194+
HASH:1,
195+
HASH:1,
196+
HASH:1,
197+
HASH:1,
190198
WHITESPACE:1,
191199
MD_TEXTUAL_LITERAL:9,
192200
}
@@ -196,7 +204,11 @@ fn heading_level_4() {
196204
fn heading_level_5() {
197205
assert_lex! {
198206
"##### Heading 5",
199-
MD_HEADER5:5,
207+
HASH:1,
208+
HASH:1,
209+
HASH:1,
210+
HASH:1,
211+
HASH:1,
200212
WHITESPACE:1,
201213
MD_TEXTUAL_LITERAL:9,
202214
}
@@ -206,7 +218,12 @@ fn heading_level_5() {
206218
fn heading_level_6() {
207219
assert_lex! {
208220
"###### Heading 6",
209-
MD_HEADER6:6,
221+
HASH:1,
222+
HASH:1,
223+
HASH:1,
224+
HASH:1,
225+
HASH:1,
226+
HASH:1,
210227
WHITESPACE:1,
211228
MD_TEXTUAL_LITERAL:9,
212229
}
@@ -217,7 +234,20 @@ fn heading_level_6() {
217234
fn not_a_heading() {
218235
assert_lex! {
219236
"############## not-heading",
220-
ERROR_TOKEN:14,
237+
HASH:1,
238+
HASH:1,
239+
HASH:1,
240+
HASH:1,
241+
HASH:1,
242+
HASH:1,
243+
HASH:1,
244+
HASH:1,
245+
HASH:1,
246+
HASH:1,
247+
HASH:1,
248+
HASH:1,
249+
HASH:1,
250+
HASH:1,
221251
WHITESPACE:1,
222252
MD_TEXTUAL_LITERAL:3,
223253
ERROR_TOKEN:1,

0 commit comments

Comments
 (0)