Skip to content

Commit

Permalink
Make the parser drive the lexer
Browse files Browse the repository at this point in the history
  • Loading branch information
neuroevolutus authored and tekknolagi committed Jan 27, 2025
1 parent 00845e9 commit c4f975b
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions scrapscript.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from dataclasses import dataclass
from enum import auto
from types import ModuleType
from typing import Any, Callable, Dict, Iterator, Mapping, Optional, Set, Tuple, Union
from typing import Any, Callable, Dict, Generator, Iterator, Mapping, Optional, Set, Tuple, Union

readline: Optional[ModuleType]
try:
Expand Down Expand Up @@ -218,6 +218,10 @@ def make_token(self, cls: type, *args: Any) -> Token:

return result

def read_tokens(self) -> Generator[Token, None, None]:
while (token := self.read_token()) and not isinstance(token, EOF):
yield token

def read_token(self) -> Token:
# Consume all whitespace
while self.has_input():
Expand Down Expand Up @@ -407,10 +411,7 @@ def test_next_on_empty_peekable_raises_stop_iteration(self) -> None:

def tokenize(x: str) -> Peekable:
lexer = Lexer(x)
tokens = []
while (token := lexer.read_token()) and not isinstance(token, EOF):
tokens.append(token)
return Peekable(iter(tokens))
return Peekable(lexer.read_tokens())


@dataclass(frozen=True)
Expand Down

0 comments on commit c4f975b

Please sign in to comment.