|
| 1 | +from typing import Type, Any, Generator, Mapping |
| 2 | + |
| 3 | +from typeit import TypeConstructor |
| 4 | +from typeit.tokenizer import iter_tokens, Token |
| 5 | + |
| 6 | + |
| 7 | +translate_begin_type = lambda x: f'{x.python_name} {{' |
| 8 | +translate_begin_type_inner = lambda x: '{' |
| 9 | +translate_end_type = lambda x: '}' |
| 10 | +translate_begin_attribute = lambda x: f'{x.wire_name}' |
| 11 | +translate_end_attribute = lambda x: ' ' |
| 12 | + |
| 13 | + |
| 14 | +translation_map: Mapping[Token, str] = { |
| 15 | + Token.BeginType: translate_begin_type, |
| 16 | + Token.EndType: translate_end_type, |
| 17 | + Token.BeginAttribute: translate_begin_attribute, |
| 18 | + Token.EndAttribute: translate_end_attribute, |
| 19 | +} |
| 20 | + |
| 21 | + |
| 22 | +def translate_tokens_to_graphql(typ: Type[Any]) -> Generator[str, None, None]: |
| 23 | + """ for graphql queries BeginType should be translated only once - for the topmost type |
| 24 | + """ |
| 25 | + query_type_began = False |
| 26 | + for token in iter_tokens(typ, typer=TypeConstructor): |
| 27 | + for token_type, do_translate in translation_map.items(): |
| 28 | + if isinstance(token, token_type): |
| 29 | + if token_type is Token.BeginType: |
| 30 | + if query_type_began: |
| 31 | + yield translate_begin_type_inner(token) |
| 32 | + else: |
| 33 | + query_type_began = True |
| 34 | + yield do_translate(token) |
| 35 | + else: |
| 36 | + yield do_translate(token) |
| 37 | + break |
| 38 | + else: |
| 39 | + raise ValueError(f'Unhandled token: {token}') |
| 40 | + |
| 41 | + |
| 42 | +def translate(typ: Type[Any]) -> str: |
| 43 | + return ''.join(translate_tokens_to_graphql(typ)) |
0 commit comments