L3.parse
1from collections.abc import Sequence 2from pathlib import Path 3 4from lark import Lark, Token, Transformer 5from lark.visitors import v_args # pyright: ignore[reportUnknownVariableType] 6 7from .syntax import ( 8 Abstract, 9 Allocate, 10 Apply, 11 Begin, 12 Branch, 13 Identifier, 14 Immediate, 15 Let, 16 LetRec, 17 Load, 18 Primitive, 19 Program, 20 Reference, 21 Store, 22 Term, 23) 24 25 26class AstTransformer(Transformer[Token, Program | Term]): 27 @v_args(inline=True) 28 def program( 29 self, 30 _program: Token, 31 parameters: Sequence[Identifier], 32 body: Term, 33 ) -> Program: 34 return Program( 35 parameters=parameters, 36 body=body, 37 ) 38 39 def parameters( 40 self, 41 parameters: Sequence[Token], 42 ) -> Sequence[Identifier]: 43 return [str(p) for p in parameters] 44 45 @v_args(inline=True) 46 def term( 47 self, 48 term: Term, 49 ) -> Term: 50 return term 51 52 @v_args(inline=True) 53 def let( 54 self, 55 _let: Token, 56 bindings: Sequence[tuple[Identifier, Term]], 57 body: Term, 58 ) -> Term: 59 return Let( 60 bindings=bindings, 61 body=body, 62 ) 63 64 @v_args(inline=True) 65 def letrec( 66 self, 67 _letrec: Token, 68 bindings: Sequence[tuple[Identifier, Term]], 69 body: Term, 70 ) -> Term: 71 return LetRec( 72 bindings=bindings, 73 body=body, 74 ) 75 76 def bindings( 77 self, 78 bindings: Sequence[tuple[Identifier, Term]], 79 ) -> Sequence[tuple[Identifier, Term]]: 80 return bindings 81 82 @v_args(inline=True) 83 def binding( 84 self, 85 name: Token, 86 value: Term, 87 ) -> tuple[Identifier, Term]: 88 return str(name), value 89 90 @v_args(inline=True) 91 def reference( 92 self, 93 name: Token, 94 ) -> Term: 95 return Reference(name=str(name)) 96 97 @v_args(inline=True) 98 def abstract( 99 self, 100 _lambda: Token, 101 parameters: Sequence[Identifier], 102 body: Term, 103 ) -> Term: 104 return Abstract( 105 parameters=parameters, 106 body=body, 107 ) 108 109 @v_args(inline=True) 110 def apply( 111 self, 112 target: Term, 113 *arguments: Term, 114 ) -> Term: 115 return Apply( 116 target=target, 117 arguments=list(arguments), 118 ) 119 120 @v_args(inline=True) 121 def immediate( 122 self, 123 value: Token, 124 ) -> Term: 125 return Immediate(value=int(value)) 126 127 @v_args(inline=True) 128 def primitive( 129 self, 130 operator: Token, 131 left: Term, 132 right: Term, 133 ) -> Term: 134 return Primitive( 135 operator=str(operator), # type: ignore 136 left=left, 137 right=right, 138 ) 139 140 @v_args(inline=True) 141 def branch( 142 self, 143 _if: Token, 144 operator: Token, 145 left: Term, 146 right: Term, 147 consequent: Term, 148 otherwise: Term, 149 ) -> Term: 150 return Branch( 151 operator=str(operator), # type: ignore 152 left=left, 153 right=right, 154 consequent=consequent, 155 otherwise=otherwise, 156 ) 157 158 @v_args(inline=True) 159 def allocate( 160 self, 161 _allocate: Token, 162 count: Immediate, 163 ) -> Term: 164 return Allocate( 165 count=count.value, 166 ) 167 168 @v_args(inline=True) 169 def load( 170 self, 171 _load: Token, 172 base: Term, 173 index: Immediate, 174 ) -> Term: 175 return Load( 176 base=base, 177 index=index.value, 178 ) 179 180 @v_args(inline=True) 181 def store( 182 self, 183 _store: Token, 184 base: Term, 185 index: Immediate, 186 value: Term, 187 ) -> Term: 188 return Store( 189 base=base, 190 index=index.value, 191 value=value, 192 ) 193 194 def begin( 195 self, 196 args: Sequence[Token | Term], 197 ) -> Term: 198 # Filter out Token objects (like BEGIN token), keep only Terms 199 terms = [arg for arg in args if not isinstance(arg, Token)] 200 if len(terms) == 0: 201 raise ValueError("begin requires at least one term") 202 return Begin( 203 effects=terms[:-1], 204 value=terms[-1], 205 ) 206 207 208def parse_term(source: str) -> Term: 209 grammar = Path(__file__).with_name("L3.lark").read_text() 210 parser = Lark(grammar, start="term") 211 tree = parser.parse(source) # pyright: ignore[reportUnknownMemberType] 212 return AstTransformer().transform(tree) # pyright: ignore[reportReturnType] 213 214 215def parse_program(source: str) -> Program: 216 grammar = Path(__file__).with_name("L3.lark").read_text() 217 parser = Lark(grammar, start="program") 218 tree = parser.parse(source) # pyright: ignore[reportUnknownMemberType] 219 return AstTransformer().transform(tree) # pyright: ignore[reportReturnType]
27class AstTransformer(Transformer[Token, Program | Term]): 28 @v_args(inline=True) 29 def program( 30 self, 31 _program: Token, 32 parameters: Sequence[Identifier], 33 body: Term, 34 ) -> Program: 35 return Program( 36 parameters=parameters, 37 body=body, 38 ) 39 40 def parameters( 41 self, 42 parameters: Sequence[Token], 43 ) -> Sequence[Identifier]: 44 return [str(p) for p in parameters] 45 46 @v_args(inline=True) 47 def term( 48 self, 49 term: Term, 50 ) -> Term: 51 return term 52 53 @v_args(inline=True) 54 def let( 55 self, 56 _let: Token, 57 bindings: Sequence[tuple[Identifier, Term]], 58 body: Term, 59 ) -> Term: 60 return Let( 61 bindings=bindings, 62 body=body, 63 ) 64 65 @v_args(inline=True) 66 def letrec( 67 self, 68 _letrec: Token, 69 bindings: Sequence[tuple[Identifier, Term]], 70 body: Term, 71 ) -> Term: 72 return LetRec( 73 bindings=bindings, 74 body=body, 75 ) 76 77 def bindings( 78 self, 79 bindings: Sequence[tuple[Identifier, Term]], 80 ) -> Sequence[tuple[Identifier, Term]]: 81 return bindings 82 83 @v_args(inline=True) 84 def binding( 85 self, 86 name: Token, 87 value: Term, 88 ) -> tuple[Identifier, Term]: 89 return str(name), value 90 91 @v_args(inline=True) 92 def reference( 93 self, 94 name: Token, 95 ) -> Term: 96 return Reference(name=str(name)) 97 98 @v_args(inline=True) 99 def abstract( 100 self, 101 _lambda: Token, 102 parameters: Sequence[Identifier], 103 body: Term, 104 ) -> Term: 105 return Abstract( 106 parameters=parameters, 107 body=body, 108 ) 109 110 @v_args(inline=True) 111 def apply( 112 self, 113 target: Term, 114 *arguments: Term, 115 ) -> Term: 116 return Apply( 117 target=target, 118 arguments=list(arguments), 119 ) 120 121 @v_args(inline=True) 122 def immediate( 123 self, 124 value: Token, 125 ) -> Term: 126 return Immediate(value=int(value)) 127 128 @v_args(inline=True) 129 def primitive( 130 self, 131 operator: Token, 132 left: Term, 133 right: Term, 134 ) -> Term: 135 return Primitive( 136 operator=str(operator), # type: ignore 137 left=left, 138 right=right, 139 ) 140 141 @v_args(inline=True) 142 def branch( 143 self, 144 _if: Token, 145 operator: Token, 146 left: Term, 147 right: Term, 148 consequent: Term, 149 otherwise: Term, 150 ) -> Term: 151 return Branch( 152 operator=str(operator), # type: ignore 153 left=left, 154 right=right, 155 consequent=consequent, 156 otherwise=otherwise, 157 ) 158 159 @v_args(inline=True) 160 def allocate( 161 self, 162 _allocate: Token, 163 count: Immediate, 164 ) -> Term: 165 return Allocate( 166 count=count.value, 167 ) 168 169 @v_args(inline=True) 170 def load( 171 self, 172 _load: Token, 173 base: Term, 174 index: Immediate, 175 ) -> Term: 176 return Load( 177 base=base, 178 index=index.value, 179 ) 180 181 @v_args(inline=True) 182 def store( 183 self, 184 _store: Token, 185 base: Term, 186 index: Immediate, 187 value: Term, 188 ) -> Term: 189 return Store( 190 base=base, 191 index=index.value, 192 value=value, 193 ) 194 195 def begin( 196 self, 197 args: Sequence[Token | Term], 198 ) -> Term: 199 # Filter out Token objects (like BEGIN token), keep only Terms 200 terms = [arg for arg in args if not isinstance(arg, Token)] 201 if len(terms) == 0: 202 raise ValueError("begin requires at least one term") 203 return Begin( 204 effects=terms[:-1], 205 value=terms[-1], 206 )
Transformers work bottom-up (or depth-first), starting with visiting the leaves and working their way up until ending at the root of the tree.
For each node visited, the transformer will call the appropriate method (callbacks), according to the
node's data, and use the returned value to replace the node, thereby creating a new tree structure.
Transformers can be used to implement map & reduce patterns. Because nodes are reduced from leaf to root, at any point the callbacks may assume the children have already been transformed (if applicable).
If the transformer cannot find a method with the right name, it will instead call __default__, which by
default creates a copy of the node.
To discard a node, return Discard (lark.visitors.Discard).
Transformer can do anything Visitor can do, but because it reconstructs the tree,
it is slightly less efficient.
A transformer without methods essentially performs a non-memoized partial deepcopy.
All these classes implement the transformer interface:
Transformer- Recursively transforms the tree. This is the one you probably want.Transformer_InPlace- Non-recursive. Changes the tree in-place instead of returning new instancesTransformer_InPlaceRecursive- Recursive. Changes the tree in-place instead of returning new instances
Parameters:
visit_tokens (bool, optional): Should the transformer visit tokens in addition to rules.
Setting this to False is slightly faster. Defaults to True.
(For processing ignored tokens, use the lexer_callbacks options)
141 @v_args(inline=True) 142 def branch( 143 self, 144 _if: Token, 145 operator: Token, 146 left: Term, 147 right: Term, 148 consequent: Term, 149 otherwise: Term, 150 ) -> Term: 151 return Branch( 152 operator=str(operator), # type: ignore 153 left=left, 154 right=right, 155 consequent=consequent, 156 otherwise=otherwise, 157 )
195 def begin( 196 self, 197 args: Sequence[Token | Term], 198 ) -> Term: 199 # Filter out Token objects (like BEGIN token), keep only Terms 200 terms = [arg for arg in args if not isinstance(arg, Token)] 201 if len(terms) == 0: 202 raise ValueError("begin requires at least one term") 203 return Begin( 204 effects=terms[:-1], 205 value=terms[-1], 206 )
216def parse_program(source: str) -> Program: 217 grammar = Path(__file__).with_name("L3.lark").read_text() 218 parser = Lark(grammar, start="program") 219 tree = parser.parse(source) # pyright: ignore[reportUnknownMemberType] 220 return AstTransformer().transform(tree) # pyright: ignore[reportReturnType]