rustpython-ast-0.2.0/.cargo_vcs_info.json0000644000000001520000000000100140550ustar { "git": { "sha1": "c7faae9b22ce31a3ba1f2cc1cd3ad759b54ce100" }, "path_in_vcs": "compiler/ast" }rustpython-ast-0.2.0/Cargo.toml0000644000000017470000000000100120660ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "rustpython-ast" version = "0.2.0" authors = ["RustPython Team"] description = "AST definitions for RustPython" license = "MIT" repository = "https://github.com/RustPython/RustPython" [dependencies.num-bigint] version = "0.4.3" [dependencies.rustpython-common] version = "0.2.0" optional = true [dependencies.rustpython-compiler-core] version = "0.2.0" [features] constant-optimization = ["fold"] default = [ "constant-optimization", "fold", ] fold = [] unparse = ["rustpython-common"] rustpython-ast-0.2.0/Cargo.toml.orig000064400000000000000000000010271046102023000155360ustar 00000000000000[package] name = "rustpython-ast" version = "0.2.0" description = "AST definitions for RustPython" authors = ["RustPython Team"] edition = "2021" repository = "https://github.com/RustPython/RustPython" license = "MIT" [features] default = ["constant-optimization", "fold"] constant-optimization = ["fold"] fold = [] unparse = ["rustpython-common"] [dependencies] num-bigint = "0.4.3" rustpython-compiler-core = { path = "../core", version = "0.2.0" } rustpython-common = { path = "../../common", version = "0.2.0", optional = true } rustpython-ast-0.2.0/Python.asdl000064400000000000000000000140211046102023000147730ustar 00000000000000-- ASDL's 4 builtin types are: -- identifier, int, string, constant module Python { mod = Module(stmt* body, type_ignore* type_ignores) | Interactive(stmt* body) | Expression(expr body) | FunctionType(expr* argtypes, expr returns) stmt = FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment) | AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment) | ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list) | Return(expr? value) | Delete(expr* targets) | Assign(expr* targets, expr value, string? type_comment) | AugAssign(expr target, operator op, expr value) -- 'simple' indicates that we annotate simple name without parens | AnnAssign(expr target, expr annotation, expr? value, int simple) -- use 'orelse' because else is a keyword in target languages | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) | While(expr test, stmt* body, stmt* orelse) | If(expr test, stmt* body, stmt* orelse) | With(withitem* items, stmt* body, string? type_comment) | AsyncWith(withitem* items, stmt* body, string? type_comment) | Match(expr subject, match_case* cases) | Raise(expr? exc, expr? cause) | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) | Assert(expr test, expr? msg) | Import(alias* names) | ImportFrom(identifier? module, alias* names, int? level) | Global(identifier* names) | Nonlocal(identifier* names) | Expr(expr value) | Pass | Break | Continue -- col_offset is the byte offset in the utf8 string the parser uses attributes (int lineno, int col_offset, int? end_lineno, int? end_col_offset) -- BoolOp() can use left & right? expr = BoolOp(boolop op, expr* values) | NamedExpr(expr target, expr value) | BinOp(expr left, operator op, expr right) | UnaryOp(unaryop op, expr operand) | Lambda(arguments args, expr body) | IfExp(expr test, expr body, expr orelse) | Dict(expr* keys, expr* values) | Set(expr* elts) | ListComp(expr elt, comprehension* generators) | SetComp(expr elt, comprehension* generators) | DictComp(expr key, expr value, comprehension* generators) | GeneratorExp(expr elt, comprehension* generators) -- the grammar constrains where yield expressions can occur | Await(expr value) | Yield(expr? value) | YieldFrom(expr value) -- need sequences for compare to distinguish between -- x < 4 < 3 and (x < 4) < 3 | Compare(expr left, cmpop* ops, expr* comparators) | Call(expr func, expr* args, keyword* keywords) | FormattedValue(expr value, int conversion, expr? format_spec) | JoinedStr(expr* values) | Constant(constant value, string? kind) -- the following expression can appear in assignment context | Attribute(expr value, identifier attr, expr_context ctx) | Subscript(expr value, expr slice, expr_context ctx) | Starred(expr value, expr_context ctx) | Name(identifier id, expr_context ctx) | List(expr* elts, expr_context ctx) | Tuple(expr* elts, expr_context ctx) -- can appear only in Subscript | Slice(expr? lower, expr? upper, expr? step) -- col_offset is the byte offset in the utf8 string the parser uses attributes (int lineno, int col_offset, int? end_lineno, int? end_col_offset) expr_context = Load | Store | Del boolop = And | Or operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift | RShift | BitOr | BitXor | BitAnd | FloorDiv unaryop = Invert | Not | UAdd | USub cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn comprehension = (expr target, expr iter, expr* ifs, int is_async) excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body) attributes (int lineno, int col_offset, int? end_lineno, int? end_col_offset) arguments = (arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, arg? kwarg, expr* defaults) arg = (identifier arg, expr? annotation, string? type_comment) attributes (int lineno, int col_offset, int? end_lineno, int? end_col_offset) -- keyword arguments supplied to call (NULL identifier for **kwargs) keyword = (identifier? arg, expr value) attributes (int lineno, int col_offset, int? end_lineno, int? end_col_offset) -- import name with optional 'as' alias. alias = (identifier name, identifier? asname) attributes (int lineno, int col_offset, int? end_lineno, int? end_col_offset) withitem = (expr context_expr, expr? optional_vars) match_case = (pattern pattern, expr? guard, stmt* body) pattern = MatchValue(expr value) | MatchSingleton(constant value) | MatchSequence(pattern* patterns) | MatchMapping(expr* keys, pattern* patterns, identifier? rest) | MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns) | MatchStar(identifier? name) -- The optional "rest" MatchMapping parameter handles capturing extra mapping keys | MatchAs(pattern? pattern, identifier? name) | MatchOr(pattern* patterns) attributes (int lineno, int col_offset, int end_lineno, int end_col_offset) type_ignore = TypeIgnore(int lineno, string tag) } rustpython-ast-0.2.0/asdl.py000064400000000000000000000314111046102023000141440ustar 00000000000000#------------------------------------------------------------------------------- # Parser for ASDL [1] definition files. Reads in an ASDL description and parses # it into an AST that describes it. # # The EBNF we're parsing here: Figure 1 of the paper [1]. Extended to support # modules and attributes after a product. Words starting with Capital letters # are terminals. Literal tokens are in "double quotes". Others are # non-terminals. Id is either TokenId or ConstructorId. # # module ::= "module" Id "{" [definitions] "}" # definitions ::= { TypeId "=" type } # type ::= product | sum # product ::= fields ["attributes" fields] # fields ::= "(" { field, "," } field ")" # field ::= TypeId ["?" | "*"] [Id] # sum ::= constructor { "|" constructor } ["attributes" fields] # constructor ::= ConstructorId [fields] # # [1] "The Zephyr Abstract Syntax Description Language" by Wang, et. al. See # http://asdl.sourceforge.net/ #------------------------------------------------------------------------------- from collections import namedtuple import re __all__ = [ 'builtin_types', 'parse', 'AST', 'Module', 'Type', 'Constructor', 'Field', 'Sum', 'Product', 'VisitorBase', 'Check', 'check'] # The following classes define nodes into which the ASDL description is parsed. # Note: this is a "meta-AST". ASDL files (such as Python.asdl) describe the AST # structure used by a programming language. But ASDL files themselves need to be # parsed. This module parses ASDL files and uses a simple AST to represent them. # See the EBNF at the top of the file to understand the logical connection # between the various node types. builtin_types = {'identifier', 'string', 'int', 'constant'} class AST: def __repr__(self): raise NotImplementedError class Module(AST): def __init__(self, name, dfns): self.name = name self.dfns = dfns self.types = {type.name: type.value for type in dfns} def __repr__(self): return 'Module({0.name}, {0.dfns})'.format(self) class Type(AST): def __init__(self, name, value): self.name = name self.value = value def __repr__(self): return 'Type({0.name}, {0.value})'.format(self) class Constructor(AST): def __init__(self, name, fields=None): self.name = name self.fields = fields or [] def __repr__(self): return 'Constructor({0.name}, {0.fields})'.format(self) class Field(AST): def __init__(self, type, name=None, seq=False, opt=False): self.type = type self.name = name self.seq = seq self.opt = opt def __str__(self): if self.seq: extra = "*" elif self.opt: extra = "?" else: extra = "" return "{}{} {}".format(self.type, extra, self.name) def __repr__(self): if self.seq: extra = ", seq=True" elif self.opt: extra = ", opt=True" else: extra = "" if self.name is None: return 'Field({0.type}{1})'.format(self, extra) else: return 'Field({0.type}, {0.name}{1})'.format(self, extra) class Sum(AST): def __init__(self, types, attributes=None): self.types = types self.attributes = attributes or [] def __repr__(self): if self.attributes: return 'Sum({0.types}, {0.attributes})'.format(self) else: return 'Sum({0.types})'.format(self) class Product(AST): def __init__(self, fields, attributes=None): self.fields = fields self.attributes = attributes or [] def __repr__(self): if self.attributes: return 'Product({0.fields}, {0.attributes})'.format(self) else: return 'Product({0.fields})'.format(self) # A generic visitor for the meta-AST that describes ASDL. This can be used by # emitters. Note that this visitor does not provide a generic visit method, so a # subclass needs to define visit methods from visitModule to as deep as the # interesting node. # We also define a Check visitor that makes sure the parsed ASDL is well-formed. class VisitorBase(object): """Generic tree visitor for ASTs.""" def __init__(self): self.cache = {} def visit(self, obj, *args): klass = obj.__class__ meth = self.cache.get(klass) if meth is None: methname = "visit" + klass.__name__ meth = getattr(self, methname, None) self.cache[klass] = meth if meth: try: meth(obj, *args) except Exception as e: print("Error visiting %r: %s" % (obj, e)) raise class Check(VisitorBase): """A visitor that checks a parsed ASDL tree for correctness. Errors are printed and accumulated. """ def __init__(self): super(Check, self).__init__() self.cons = {} self.errors = 0 self.types = {} def visitModule(self, mod): for dfn in mod.dfns: self.visit(dfn) def visitType(self, type): self.visit(type.value, str(type.name)) def visitSum(self, sum, name): for t in sum.types: self.visit(t, name) def visitConstructor(self, cons, name): key = str(cons.name) conflict = self.cons.get(key) if conflict is None: self.cons[key] = name else: print('Redefinition of constructor {}'.format(key)) print('Defined in {} and {}'.format(conflict, name)) self.errors += 1 for f in cons.fields: self.visit(f, key) def visitField(self, field, name): key = str(field.type) l = self.types.setdefault(key, []) l.append(name) def visitProduct(self, prod, name): for f in prod.fields: self.visit(f, name) def check(mod): """Check the parsed ASDL tree for correctness. Return True if success. For failure, the errors are printed out and False is returned. """ v = Check() v.visit(mod) for t in v.types: if t not in mod.types and not t in builtin_types: v.errors += 1 uses = ", ".join(v.types[t]) print('Undefined type {}, used in {}'.format(t, uses)) return not v.errors # The ASDL parser itself comes next. The only interesting external interface # here is the top-level parse function. def parse(filename): """Parse ASDL from the given file and return a Module node describing it.""" with open(filename, encoding="utf-8") as f: parser = ASDLParser() return parser.parse(f.read()) # Types for describing tokens in an ASDL specification. class TokenKind: """TokenKind is provides a scope for enumerated token kinds.""" (ConstructorId, TypeId, Equals, Comma, Question, Pipe, Asterisk, LParen, RParen, LBrace, RBrace) = range(11) operator_table = { '=': Equals, ',': Comma, '?': Question, '|': Pipe, '(': LParen, ')': RParen, '*': Asterisk, '{': LBrace, '}': RBrace} Token = namedtuple('Token', 'kind value lineno') class ASDLSyntaxError(Exception): def __init__(self, msg, lineno=None): self.msg = msg self.lineno = lineno or '' def __str__(self): return 'Syntax error on line {0.lineno}: {0.msg}'.format(self) def tokenize_asdl(buf): """Tokenize the given buffer. Yield Token objects.""" for lineno, line in enumerate(buf.splitlines(), 1): for m in re.finditer(r'\s*(\w+|--.*|.)', line.strip()): c = m.group(1) if c[0].isalpha(): # Some kind of identifier if c[0].isupper(): yield Token(TokenKind.ConstructorId, c, lineno) else: yield Token(TokenKind.TypeId, c, lineno) elif c[:2] == '--': # Comment break else: # Operators try: op_kind = TokenKind.operator_table[c] except KeyError: raise ASDLSyntaxError('Invalid operator %s' % c, lineno) yield Token(op_kind, c, lineno) class ASDLParser: """Parser for ASDL files. Create, then call the parse method on a buffer containing ASDL. This is a simple recursive descent parser that uses tokenize_asdl for the lexing. """ def __init__(self): self._tokenizer = None self.cur_token = None def parse(self, buf): """Parse the ASDL in the buffer and return an AST with a Module root. """ self._tokenizer = tokenize_asdl(buf) self._advance() return self._parse_module() def _parse_module(self): if self._at_keyword('module'): self._advance() else: raise ASDLSyntaxError( 'Expected "module" (found {})'.format(self.cur_token.value), self.cur_token.lineno) name = self._match(self._id_kinds) self._match(TokenKind.LBrace) defs = self._parse_definitions() self._match(TokenKind.RBrace) return Module(name, defs) def _parse_definitions(self): defs = [] while self.cur_token.kind == TokenKind.TypeId: typename = self._advance() self._match(TokenKind.Equals) type = self._parse_type() defs.append(Type(typename, type)) return defs def _parse_type(self): if self.cur_token.kind == TokenKind.LParen: # If we see a (, it's a product return self._parse_product() else: # Otherwise it's a sum. Look for ConstructorId sumlist = [Constructor(self._match(TokenKind.ConstructorId), self._parse_optional_fields())] while self.cur_token.kind == TokenKind.Pipe: # More constructors self._advance() sumlist.append(Constructor( self._match(TokenKind.ConstructorId), self._parse_optional_fields())) return Sum(sumlist, self._parse_optional_attributes()) def _parse_product(self): return Product(self._parse_fields(), self._parse_optional_attributes()) def _parse_fields(self): fields = [] self._match(TokenKind.LParen) while self.cur_token.kind == TokenKind.TypeId: typename = self._advance() is_seq, is_opt = self._parse_optional_field_quantifier() id = (self._advance() if self.cur_token.kind in self._id_kinds else None) fields.append(Field(typename, id, seq=is_seq, opt=is_opt)) if self.cur_token.kind == TokenKind.RParen: break elif self.cur_token.kind == TokenKind.Comma: self._advance() self._match(TokenKind.RParen) return fields def _parse_optional_fields(self): if self.cur_token.kind == TokenKind.LParen: return self._parse_fields() else: return None def _parse_optional_attributes(self): if self._at_keyword('attributes'): self._advance() return self._parse_fields() else: return None def _parse_optional_field_quantifier(self): is_seq, is_opt = False, False if self.cur_token.kind == TokenKind.Asterisk: is_seq = True self._advance() elif self.cur_token.kind == TokenKind.Question: is_opt = True self._advance() return is_seq, is_opt def _advance(self): """ Return the value of the current token and read the next one into self.cur_token. """ cur_val = None if self.cur_token is None else self.cur_token.value try: self.cur_token = next(self._tokenizer) except StopIteration: self.cur_token = None return cur_val _id_kinds = (TokenKind.ConstructorId, TokenKind.TypeId) def _match(self, kind): """The 'match' primitive of RD parsers. * Verifies that the current token is of the given kind (kind can be a tuple, in which the kind must match one of its members). * Returns the value of the current token * Reads in the next token """ if (isinstance(kind, tuple) and self.cur_token.kind in kind or self.cur_token.kind == kind ): value = self.cur_token.value self._advance() return value else: raise ASDLSyntaxError( 'Unmatched {} (found {})'.format(kind, self.cur_token.kind), self.cur_token.lineno) def _at_keyword(self, keyword): return (self.cur_token.kind == TokenKind.TypeId and self.cur_token.value == keyword) rustpython-ast-0.2.0/asdl_rs.py000075500000000000000000000607021046102023000146600ustar 00000000000000#! /usr/bin/env python """Generate Rust code from an ASDL description.""" import sys import json import textwrap from argparse import ArgumentParser from pathlib import Path import asdl TABSIZE = 4 AUTOGEN_MESSAGE = "// File automatically generated by {}.\n" builtin_type_mapping = { "identifier": "Ident", "string": "String", "int": "usize", "constant": "Constant", } assert builtin_type_mapping.keys() == asdl.builtin_types def get_rust_type(name): """Return a string for the C name of the type. This function special cases the default types provided by asdl. """ if name in asdl.builtin_types: return builtin_type_mapping[name] elif name.islower(): return "".join(part.capitalize() for part in name.split("_")) else: return name def is_simple(sum): """Return True if a sum is a simple. A sum is simple if its types have no fields, e.g. unaryop = Invert | Not | UAdd | USub """ for t in sum.types: if t.fields: return False return True def asdl_of(name, obj): if isinstance(obj, asdl.Product) or isinstance(obj, asdl.Constructor): fields = ", ".join(map(str, obj.fields)) if fields: fields = "({})".format(fields) return "{}{}".format(name, fields) else: if is_simple(obj): types = " | ".join(type.name for type in obj.types) else: sep = "\n{}| ".format(" " * (len(name) + 1)) types = sep.join(asdl_of(type.name, type) for type in obj.types) return "{} = {}".format(name, types) class EmitVisitor(asdl.VisitorBase): """Visit that emits lines""" def __init__(self, file): self.file = file self.identifiers = set() super(EmitVisitor, self).__init__() def emit_identifier(self, name): name = str(name) if name in self.identifiers: return self.emit("_Py_IDENTIFIER(%s);" % name, 0) self.identifiers.add(name) def emit(self, line, depth): if line: line = (" " * TABSIZE * depth) + line self.file.write(line + "\n") class TypeInfo: def __init__(self, name): self.name = name self.has_userdata = None self.children = set() self.boxed = False self.product = False def __repr__(self): return f"" def determine_userdata(self, typeinfo, stack): if self.name in stack: return None stack.add(self.name) for child, child_seq in self.children: if child in asdl.builtin_types: continue childinfo = typeinfo[child] child_has_userdata = childinfo.determine_userdata(typeinfo, stack) if self.has_userdata is None and child_has_userdata is True: self.has_userdata = True stack.remove(self.name) return self.has_userdata class FindUserdataTypesVisitor(asdl.VisitorBase): def __init__(self, typeinfo): self.typeinfo = typeinfo super().__init__() def visitModule(self, mod): for dfn in mod.dfns: self.visit(dfn) stack = set() for info in self.typeinfo.values(): info.determine_userdata(self.typeinfo, stack) def visitType(self, type): self.typeinfo[type.name] = TypeInfo(type.name) self.visit(type.value, type.name) def visitSum(self, sum, name): info = self.typeinfo[name] if is_simple(sum): info.has_userdata = False else: if len(sum.types) > 1: info.boxed = True if sum.attributes: # attributes means Located, which has the `custom: U` field info.has_userdata = True for variant in sum.types: self.add_children(name, variant.fields) def visitProduct(self, product, name): info = self.typeinfo[name] if product.attributes: # attributes means Located, which has the `custom: U` field info.has_userdata = True if len(product.fields) > 2: info.boxed = True info.product = True self.add_children(name, product.fields) def add_children(self, name, fields): self.typeinfo[name].children.update((field.type, field.seq) for field in fields) def rust_field(field_name): if field_name == "type": return "type_" else: return field_name class TypeInfoEmitVisitor(EmitVisitor): def __init__(self, file, typeinfo): self.typeinfo = typeinfo super().__init__(file) def has_userdata(self, typ): return self.typeinfo[typ].has_userdata def get_generics(self, typ, *generics): if self.has_userdata(typ): return [f"<{g}>" for g in generics] else: return ["" for g in generics] class StructVisitor(TypeInfoEmitVisitor): """Visitor to generate typedefs for AST.""" def visitModule(self, mod): for dfn in mod.dfns: self.visit(dfn) def visitType(self, type, depth=0): self.visit(type.value, type.name, depth) def visitSum(self, sum, name, depth): if is_simple(sum): self.simple_sum(sum, name, depth) else: self.sum_with_constructors(sum, name, depth) def emit_attrs(self, depth): self.emit("#[derive(Clone, Debug, PartialEq)]", depth) def simple_sum(self, sum, name, depth): rustname = get_rust_type(name) self.emit_attrs(depth) self.emit(f"pub enum {rustname} {{", depth) for variant in sum.types: self.emit(f"{variant.name},", depth + 1) self.emit("}", depth) self.emit("", depth) def sum_with_constructors(self, sum, name, depth): typeinfo = self.typeinfo[name] generics, generics_applied = self.get_generics(name, "U = ()", "U") enumname = rustname = get_rust_type(name) # all the attributes right now are for location, so if it has attrs we # can just wrap it in Located<> if sum.attributes: enumname = rustname + "Kind" self.emit_attrs(depth) self.emit(f"pub enum {enumname}{generics} {{", depth) for t in sum.types: self.visit(t, typeinfo, depth + 1) self.emit("}", depth) if sum.attributes: self.emit( f"pub type {rustname} = Located<{enumname}{generics_applied}, U>;", depth, ) self.emit("", depth) def visitConstructor(self, cons, parent, depth): if cons.fields: self.emit(f"{cons.name} {{", depth) for f in cons.fields: self.visit(f, parent, "", depth + 1) self.emit("},", depth) else: self.emit(f"{cons.name},", depth) def visitField(self, field, parent, vis, depth): typ = get_rust_type(field.type) fieldtype = self.typeinfo.get(field.type) if fieldtype and fieldtype.has_userdata: typ = f"{typ}" # don't box if we're doing Vec, but do box if we're doing Vec>> if fieldtype and fieldtype.boxed and (not (parent.product or field.seq) or field.opt): typ = f"Box<{typ}>" if field.opt: typ = f"Option<{typ}>" if field.seq: typ = f"Vec<{typ}>" name = rust_field(field.name) self.emit(f"{vis}{name}: {typ},", depth) def visitProduct(self, product, name, depth): typeinfo = self.typeinfo[name] generics, generics_applied = self.get_generics(name, "U = ()", "U") dataname = rustname = get_rust_type(name) if product.attributes: dataname = rustname + "Data" self.emit_attrs(depth) has_expr = any(f.type != "identifier" for f in product.fields) if has_expr: datadef = f"{dataname}{generics}" else: datadef = dataname self.emit(f"pub struct {datadef} {{", depth) for f in product.fields: self.visit(f, typeinfo, "pub ", depth + 1) self.emit("}", depth) if product.attributes: # attributes should just be location info if not has_expr: generics_applied = "" self.emit( f"pub type {rustname} = Located<{dataname}{generics_applied}, U>;", depth, ) self.emit("", depth) class FoldTraitDefVisitor(TypeInfoEmitVisitor): def visitModule(self, mod, depth): self.emit("pub trait Fold {", depth) self.emit("type TargetU;", depth + 1) self.emit("type Error;", depth + 1) self.emit( "fn map_user(&mut self, user: U) -> Result;", depth + 2, ) for dfn in mod.dfns: self.visit(dfn, depth + 2) self.emit("}", depth) def visitType(self, type, depth): name = type.name apply_u, apply_target_u = self.get_generics(name, "U", "Self::TargetU") enumname = get_rust_type(name) self.emit( f"fn fold_{name}(&mut self, node: {enumname}{apply_u}) -> Result<{enumname}{apply_target_u}, Self::Error> {{", depth, ) self.emit(f"fold_{name}(self, node)", depth + 1) self.emit("}", depth) class FoldImplVisitor(TypeInfoEmitVisitor): def visitModule(self, mod, depth): self.emit( "fn fold_located + ?Sized, T, MT>(folder: &mut F, node: Located, f: impl FnOnce(&mut F, T) -> Result) -> Result, F::Error> {", depth, ) self.emit( "Ok(Located { custom: folder.map_user(node.custom)?, location: node.location, end_location: node.end_location, node: f(folder, node.node)? })", depth + 1, ) self.emit("}", depth) for dfn in mod.dfns: self.visit(dfn, depth) def visitType(self, type, depth=0): self.visit(type.value, type.name, depth) def visitSum(self, sum, name, depth): apply_t, apply_u, apply_target_u = self.get_generics( name, "T", "U", "F::TargetU" ) enumname = get_rust_type(name) is_located = bool(sum.attributes) self.emit(f"impl Foldable for {enumname}{apply_t} {{", depth) self.emit(f"type Mapped = {enumname}{apply_u};", depth + 1) self.emit( "fn fold + ?Sized>(self, folder: &mut F) -> Result {", depth + 1, ) self.emit(f"folder.fold_{name}(self)", depth + 2) self.emit("}", depth + 1) self.emit("}", depth) self.emit( f"pub fn fold_{name} + ?Sized>(#[allow(unused)] folder: &mut F, node: {enumname}{apply_u}) -> Result<{enumname}{apply_target_u}, F::Error> {{", depth, ) if is_located: self.emit("fold_located(folder, node, |folder, node| {", depth) enumname += "Kind" self.emit("match node {", depth + 1) for cons in sum.types: fields_pattern = self.make_pattern(cons.fields) self.emit( f"{enumname}::{cons.name} {{ {fields_pattern} }} => {{", depth + 2 ) self.gen_construction(f"{enumname}::{cons.name}", cons.fields, depth + 3) self.emit("}", depth + 2) self.emit("}", depth + 1) if is_located: self.emit("})", depth) self.emit("}", depth) def visitProduct(self, product, name, depth): apply_t, apply_u, apply_target_u = self.get_generics( name, "T", "U", "F::TargetU" ) structname = get_rust_type(name) is_located = bool(product.attributes) self.emit(f"impl Foldable for {structname}{apply_t} {{", depth) self.emit(f"type Mapped = {structname}{apply_u};", depth + 1) self.emit( "fn fold + ?Sized>(self, folder: &mut F) -> Result {", depth + 1, ) self.emit(f"folder.fold_{name}(self)", depth + 2) self.emit("}", depth + 1) self.emit("}", depth) self.emit( f"pub fn fold_{name} + ?Sized>(#[allow(unused)] folder: &mut F, node: {structname}{apply_u}) -> Result<{structname}{apply_target_u}, F::Error> {{", depth, ) if is_located: self.emit("fold_located(folder, node, |folder, node| {", depth) structname += "Data" fields_pattern = self.make_pattern(product.fields) self.emit(f"let {structname} {{ {fields_pattern} }} = node;", depth + 1) self.gen_construction(structname, product.fields, depth + 1) if is_located: self.emit("})", depth) self.emit("}", depth) def make_pattern(self, fields): return ",".join(rust_field(f.name) for f in fields) def gen_construction(self, cons_path, fields, depth): self.emit(f"Ok({cons_path} {{", depth) for field in fields: name = rust_field(field.name) self.emit(f"{name}: Foldable::fold({name}, folder)?,", depth + 1) self.emit("})", depth) class FoldModuleVisitor(TypeInfoEmitVisitor): def visitModule(self, mod): depth = 0 self.emit('#[cfg(feature = "fold")]', depth) self.emit("pub mod fold {", depth) self.emit("use super::*;", depth + 1) self.emit("use crate::fold_helpers::Foldable;", depth + 1) FoldTraitDefVisitor(self.file, self.typeinfo).visit(mod, depth + 1) FoldImplVisitor(self.file, self.typeinfo).visit(mod, depth + 1) self.emit("}", depth) class ClassDefVisitor(EmitVisitor): def visitModule(self, mod): for dfn in mod.dfns: self.visit(dfn) def visitType(self, type, depth=0): self.visit(type.value, type.name, depth) def visitSum(self, sum, name, depth): structname = "NodeKind" + get_rust_type(name) self.emit( f'#[pyclass(module = "_ast", name = {json.dumps(name)}, base = "AstNode")]', depth, ) self.emit(f"struct {structname};", depth) self.emit("#[pyclass(flags(HAS_DICT, BASETYPE))]", depth) self.emit(f"impl {structname} {{}}", depth) for cons in sum.types: self.visit(cons, sum.attributes, structname, depth) def visitConstructor(self, cons, attrs, base, depth): self.gen_classdef(cons.name, cons.fields, attrs, depth, base) def visitProduct(self, product, name, depth): self.gen_classdef(name, product.fields, product.attributes, depth) def gen_classdef(self, name, fields, attrs, depth, base="AstNode"): structname = "Node" + get_rust_type(name) self.emit( f'#[pyclass(module = "_ast", name = {json.dumps(name)}, base = {json.dumps(base)})]', depth, ) self.emit(f"struct {structname};", depth) self.emit("#[pyclass(flags(HAS_DICT, BASETYPE))]", depth) self.emit(f"impl {structname} {{", depth) self.emit(f"#[extend_class]", depth + 1) self.emit( "fn extend_class_with_fields(ctx: &Context, class: &'static Py) {", depth + 1, ) fields = ",".join( f"ctx.new_str(ascii!({json.dumps(f.name)})).into()" for f in fields ) self.emit( f"class.set_attr(identifier!(ctx, _fields), ctx.new_tuple(vec![{fields}]).into());", depth + 2, ) attrs = ",".join( f"ctx.new_str(ascii!({json.dumps(attr.name)})).into()" for attr in attrs ) self.emit( f"class.set_attr(identifier!(ctx, _attributes), ctx.new_list(vec![{attrs}]).into());", depth + 2, ) self.emit("}", depth + 1) self.emit("}", depth) class ExtendModuleVisitor(EmitVisitor): def visitModule(self, mod): depth = 0 self.emit( "pub fn extend_module_nodes(vm: &VirtualMachine, module: &PyObject) {", depth, ) self.emit("extend_module!(vm, module, {", depth + 1) for dfn in mod.dfns: self.visit(dfn, depth + 2) self.emit("})", depth + 1) self.emit("}", depth) def visitType(self, type, depth): self.visit(type.value, type.name, depth) def visitSum(self, sum, name, depth): rust_name = get_rust_type(name) self.emit( f"{json.dumps(name)} => NodeKind{rust_name}::make_class(&vm.ctx),", depth ) for cons in sum.types: self.visit(cons, depth) def visitConstructor(self, cons, depth): self.gen_extension(cons.name, depth) def visitProduct(self, product, name, depth): self.gen_extension(name, depth) def gen_extension(self, name, depth): rust_name = get_rust_type(name) self.emit(f"{json.dumps(name)} => Node{rust_name}::make_class(&vm.ctx),", depth) class TraitImplVisitor(EmitVisitor): def visitModule(self, mod): for dfn in mod.dfns: self.visit(dfn) def visitType(self, type, depth=0): self.visit(type.value, type.name, depth) def visitSum(self, sum, name, depth): enumname = get_rust_type(name) if sum.attributes: enumname += "Kind" self.emit(f"impl NamedNode for ast::{enumname} {{", depth) self.emit(f"const NAME: &'static str = {json.dumps(name)};", depth + 1) self.emit("}", depth) self.emit(f"impl Node for ast::{enumname} {{", depth) self.emit( "fn ast_to_object(self, _vm: &VirtualMachine) -> PyObjectRef {", depth + 1 ) self.emit("match self {", depth + 2) for variant in sum.types: self.constructor_to_object(variant, enumname, depth + 3) self.emit("}", depth + 2) self.emit("}", depth + 1) self.emit( "fn ast_from_object(_vm: &VirtualMachine, _object: PyObjectRef) -> PyResult {", depth + 1, ) self.gen_sum_fromobj(sum, name, enumname, depth + 2) self.emit("}", depth + 1) self.emit("}", depth) def constructor_to_object(self, cons, enumname, depth): fields_pattern = self.make_pattern(cons.fields) self.emit(f"ast::{enumname}::{cons.name} {{ {fields_pattern} }} => {{", depth) self.make_node(cons.name, cons.fields, depth + 1) self.emit("}", depth) def visitProduct(self, product, name, depth): structname = get_rust_type(name) if product.attributes: structname += "Data" self.emit(f"impl NamedNode for ast::{structname} {{", depth) self.emit(f"const NAME: &'static str = {json.dumps(name)};", depth + 1) self.emit("}", depth) self.emit(f"impl Node for ast::{structname} {{", depth) self.emit( "fn ast_to_object(self, _vm: &VirtualMachine) -> PyObjectRef {", depth + 1 ) fields_pattern = self.make_pattern(product.fields) self.emit(f"let ast::{structname} {{ {fields_pattern} }} = self;", depth + 2) self.make_node(name, product.fields, depth + 2) self.emit("}", depth + 1) self.emit( "fn ast_from_object(_vm: &VirtualMachine, _object: PyObjectRef) -> PyResult {", depth + 1, ) self.gen_product_fromobj(product, name, structname, depth + 2) self.emit("}", depth + 1) self.emit("}", depth) def make_node(self, variant, fields, depth): rust_variant = get_rust_type(variant) self.emit( f"let _node = AstNode.into_ref_with_type(_vm, Node{rust_variant}::static_type().to_owned()).unwrap();", depth, ) if fields: self.emit("let _dict = _node.as_object().dict().unwrap();", depth) for f in fields: self.emit( f"_dict.set_item({json.dumps(f.name)}, {rust_field(f.name)}.ast_to_object(_vm), _vm).unwrap();", depth, ) self.emit("_node.into()", depth) def make_pattern(self, fields): return ",".join(rust_field(f.name) for f in fields) def gen_sum_fromobj(self, sum, sumname, enumname, depth): if sum.attributes: self.extract_location(sumname, depth) self.emit("let _cls = _object.class();", depth) self.emit("Ok(", depth) for cons in sum.types: self.emit(f"if _cls.is(Node{cons.name}::static_type()) {{", depth) self.gen_construction(f"{enumname}::{cons.name}", cons, sumname, depth + 1) self.emit("} else", depth) self.emit("{", depth) msg = f'format!("expected some sort of {sumname}, but got {{}}",_object.repr(_vm)?)' self.emit(f"return Err(_vm.new_type_error({msg}));", depth + 1) self.emit("})", depth) def gen_product_fromobj(self, product, prodname, structname, depth): if product.attributes: self.extract_location(prodname, depth) self.emit("Ok(", depth) self.gen_construction(structname, product, prodname, depth + 1) self.emit(")", depth) def gen_construction(self, cons_path, cons, name, depth): self.emit(f"ast::{cons_path} {{", depth) for field in cons.fields: self.emit( f"{rust_field(field.name)}: {self.decode_field(field, name)},", depth + 1, ) self.emit("}", depth) def extract_location(self, typename, depth): row = self.decode_field(asdl.Field("int", "lineno"), typename) column = self.decode_field(asdl.Field("int", "col_offset"), typename) self.emit(f"let _location = ast::Location::new({row}, {column});", depth) def decode_field(self, field, typename): name = json.dumps(field.name) if field.opt and not field.seq: return f"get_node_field_opt(_vm, &_object, {name})?.map(|obj| Node::ast_from_object(_vm, obj)).transpose()?" else: return f"Node::ast_from_object(_vm, get_node_field(_vm, &_object, {name}, {json.dumps(typename)})?)?" class ChainOfVisitors: def __init__(self, *visitors): self.visitors = visitors def visit(self, object): for v in self.visitors: v.visit(object) v.emit("", 0) def write_ast_def(mod, typeinfo, f): f.write( textwrap.dedent( """ #![allow(clippy::derive_partial_eq_without_eq)] pub use crate::constant::*; pub use crate::Location; type Ident = String; \n """ ) ) StructVisitor(f, typeinfo).emit_attrs(0) f.write( textwrap.dedent( """ pub struct Located { pub location: Location, pub end_location: Option, pub custom: U, pub node: T, } impl Located { pub fn new(location: Location, end_location: Location, node: T) -> Self { Self { location, end_location: Some(end_location), custom: (), node } } } \n """.lstrip() ) ) c = ChainOfVisitors(StructVisitor(f, typeinfo), FoldModuleVisitor(f, typeinfo)) c.visit(mod) def write_ast_mod(mod, f): f.write( textwrap.dedent( """ #![allow(clippy::all)] use super::*; use crate::common::ascii; """ ) ) c = ChainOfVisitors(ClassDefVisitor(f), TraitImplVisitor(f), ExtendModuleVisitor(f)) c.visit(mod) def main(input_filename, ast_mod_filename, ast_def_filename, dump_module=False): auto_gen_msg = AUTOGEN_MESSAGE.format("/".join(Path(__file__).parts[-2:])) mod = asdl.parse(input_filename) if dump_module: print("Parsed Module:") print(mod) if not asdl.check(mod): sys.exit(1) typeinfo = {} FindUserdataTypesVisitor(typeinfo).visit(mod) with ast_def_filename.open("w") as def_file, ast_mod_filename.open("w") as mod_file: def_file.write(auto_gen_msg) write_ast_def(mod, typeinfo, def_file) mod_file.write(auto_gen_msg) write_ast_mod(mod, mod_file) print(f"{ast_def_filename}, {ast_mod_filename} regenerated.") if __name__ == "__main__": parser = ArgumentParser() parser.add_argument("input_file", type=Path) parser.add_argument("-M", "--mod-file", type=Path, required=True) parser.add_argument("-D", "--def-file", type=Path, required=True) parser.add_argument("-d", "--dump-module", action="store_true") args = parser.parse_args() main(args.input_file, args.mod_file, args.def_file, args.dump_module) rustpython-ast-0.2.0/src/ast_gen.rs000064400000000000000000001210521046102023000154250ustar 00000000000000// File automatically generated by ast/asdl_rs.py. #![allow(clippy::derive_partial_eq_without_eq)] pub use crate::constant::*; pub use crate::Location; type Ident = String; #[derive(Clone, Debug, PartialEq)] pub struct Located { pub location: Location, pub end_location: Option, pub custom: U, pub node: T, } impl Located { pub fn new(location: Location, end_location: Location, node: T) -> Self { Self { location, end_location: Some(end_location), custom: (), node, } } } #[derive(Clone, Debug, PartialEq)] pub enum Mod { Module { body: Vec>, type_ignores: Vec, }, Interactive { body: Vec>, }, Expression { body: Box>, }, FunctionType { argtypes: Vec>, returns: Box>, }, } #[derive(Clone, Debug, PartialEq)] pub enum StmtKind { FunctionDef { name: Ident, args: Box>, body: Vec>, decorator_list: Vec>, returns: Option>>, type_comment: Option, }, AsyncFunctionDef { name: Ident, args: Box>, body: Vec>, decorator_list: Vec>, returns: Option>>, type_comment: Option, }, ClassDef { name: Ident, bases: Vec>, keywords: Vec>, body: Vec>, decorator_list: Vec>, }, Return { value: Option>>, }, Delete { targets: Vec>, }, Assign { targets: Vec>, value: Box>, type_comment: Option, }, AugAssign { target: Box>, op: Operator, value: Box>, }, AnnAssign { target: Box>, annotation: Box>, value: Option>>, simple: usize, }, For { target: Box>, iter: Box>, body: Vec>, orelse: Vec>, type_comment: Option, }, AsyncFor { target: Box>, iter: Box>, body: Vec>, orelse: Vec>, type_comment: Option, }, While { test: Box>, body: Vec>, orelse: Vec>, }, If { test: Box>, body: Vec>, orelse: Vec>, }, With { items: Vec>, body: Vec>, type_comment: Option, }, AsyncWith { items: Vec>, body: Vec>, type_comment: Option, }, Match { subject: Box>, cases: Vec>, }, Raise { exc: Option>>, cause: Option>>, }, Try { body: Vec>, handlers: Vec>, orelse: Vec>, finalbody: Vec>, }, Assert { test: Box>, msg: Option>>, }, Import { names: Vec>, }, ImportFrom { module: Option, names: Vec>, level: Option, }, Global { names: Vec, }, Nonlocal { names: Vec, }, Expr { value: Box>, }, Pass, Break, Continue, } pub type Stmt = Located, U>; #[derive(Clone, Debug, PartialEq)] pub enum ExprKind { BoolOp { op: Boolop, values: Vec>, }, NamedExpr { target: Box>, value: Box>, }, BinOp { left: Box>, op: Operator, right: Box>, }, UnaryOp { op: Unaryop, operand: Box>, }, Lambda { args: Box>, body: Box>, }, IfExp { test: Box>, body: Box>, orelse: Box>, }, Dict { keys: Vec>, values: Vec>, }, Set { elts: Vec>, }, ListComp { elt: Box>, generators: Vec>, }, SetComp { elt: Box>, generators: Vec>, }, DictComp { key: Box>, value: Box>, generators: Vec>, }, GeneratorExp { elt: Box>, generators: Vec>, }, Await { value: Box>, }, Yield { value: Option>>, }, YieldFrom { value: Box>, }, Compare { left: Box>, ops: Vec, comparators: Vec>, }, Call { func: Box>, args: Vec>, keywords: Vec>, }, FormattedValue { value: Box>, conversion: usize, format_spec: Option>>, }, JoinedStr { values: Vec>, }, Constant { value: Constant, kind: Option, }, Attribute { value: Box>, attr: Ident, ctx: ExprContext, }, Subscript { value: Box>, slice: Box>, ctx: ExprContext, }, Starred { value: Box>, ctx: ExprContext, }, Name { id: Ident, ctx: ExprContext, }, List { elts: Vec>, ctx: ExprContext, }, Tuple { elts: Vec>, ctx: ExprContext, }, Slice { lower: Option>>, upper: Option>>, step: Option>>, }, } pub type Expr = Located, U>; #[derive(Clone, Debug, PartialEq)] pub enum ExprContext { Load, Store, Del, } #[derive(Clone, Debug, PartialEq)] pub enum Boolop { And, Or, } #[derive(Clone, Debug, PartialEq)] pub enum Operator { Add, Sub, Mult, MatMult, Div, Mod, Pow, LShift, RShift, BitOr, BitXor, BitAnd, FloorDiv, } #[derive(Clone, Debug, PartialEq)] pub enum Unaryop { Invert, Not, UAdd, USub, } #[derive(Clone, Debug, PartialEq)] pub enum Cmpop { Eq, NotEq, Lt, LtE, Gt, GtE, Is, IsNot, In, NotIn, } #[derive(Clone, Debug, PartialEq)] pub struct Comprehension { pub target: Expr, pub iter: Expr, pub ifs: Vec>, pub is_async: usize, } #[derive(Clone, Debug, PartialEq)] pub enum ExcepthandlerKind { ExceptHandler { type_: Option>>, name: Option, body: Vec>, }, } pub type Excepthandler = Located, U>; #[derive(Clone, Debug, PartialEq)] pub struct Arguments { pub posonlyargs: Vec>, pub args: Vec>, pub vararg: Option>>, pub kwonlyargs: Vec>, pub kw_defaults: Vec>, pub kwarg: Option>>, pub defaults: Vec>, } #[derive(Clone, Debug, PartialEq)] pub struct ArgData { pub arg: Ident, pub annotation: Option>>, pub type_comment: Option, } pub type Arg = Located, U>; #[derive(Clone, Debug, PartialEq)] pub struct KeywordData { pub arg: Option, pub value: Expr, } pub type Keyword = Located, U>; #[derive(Clone, Debug, PartialEq)] pub struct AliasData { pub name: Ident, pub asname: Option, } pub type Alias = Located; #[derive(Clone, Debug, PartialEq)] pub struct Withitem { pub context_expr: Expr, pub optional_vars: Option>>, } #[derive(Clone, Debug, PartialEq)] pub struct MatchCase { pub pattern: Pattern, pub guard: Option>>, pub body: Vec>, } #[derive(Clone, Debug, PartialEq)] pub enum PatternKind { MatchValue { value: Box>, }, MatchSingleton { value: Constant, }, MatchSequence { patterns: Vec>, }, MatchMapping { keys: Vec>, patterns: Vec>, rest: Option, }, MatchClass { cls: Box>, patterns: Vec>, kwd_attrs: Vec, kwd_patterns: Vec>, }, MatchStar { name: Option, }, MatchAs { pattern: Option>>, name: Option, }, MatchOr { patterns: Vec>, }, } pub type Pattern = Located, U>; #[derive(Clone, Debug, PartialEq)] pub enum TypeIgnore { TypeIgnore { lineno: usize, tag: String }, } #[cfg(feature = "fold")] pub mod fold { use super::*; use crate::fold_helpers::Foldable; pub trait Fold { type TargetU; type Error; fn map_user(&mut self, user: U) -> Result; fn fold_mod(&mut self, node: Mod) -> Result, Self::Error> { fold_mod(self, node) } fn fold_stmt(&mut self, node: Stmt) -> Result, Self::Error> { fold_stmt(self, node) } fn fold_expr(&mut self, node: Expr) -> Result, Self::Error> { fold_expr(self, node) } fn fold_expr_context(&mut self, node: ExprContext) -> Result { fold_expr_context(self, node) } fn fold_boolop(&mut self, node: Boolop) -> Result { fold_boolop(self, node) } fn fold_operator(&mut self, node: Operator) -> Result { fold_operator(self, node) } fn fold_unaryop(&mut self, node: Unaryop) -> Result { fold_unaryop(self, node) } fn fold_cmpop(&mut self, node: Cmpop) -> Result { fold_cmpop(self, node) } fn fold_comprehension( &mut self, node: Comprehension, ) -> Result, Self::Error> { fold_comprehension(self, node) } fn fold_excepthandler( &mut self, node: Excepthandler, ) -> Result, Self::Error> { fold_excepthandler(self, node) } fn fold_arguments( &mut self, node: Arguments, ) -> Result, Self::Error> { fold_arguments(self, node) } fn fold_arg(&mut self, node: Arg) -> Result, Self::Error> { fold_arg(self, node) } fn fold_keyword( &mut self, node: Keyword, ) -> Result, Self::Error> { fold_keyword(self, node) } fn fold_alias(&mut self, node: Alias) -> Result, Self::Error> { fold_alias(self, node) } fn fold_withitem( &mut self, node: Withitem, ) -> Result, Self::Error> { fold_withitem(self, node) } fn fold_match_case( &mut self, node: MatchCase, ) -> Result, Self::Error> { fold_match_case(self, node) } fn fold_pattern( &mut self, node: Pattern, ) -> Result, Self::Error> { fold_pattern(self, node) } fn fold_type_ignore(&mut self, node: TypeIgnore) -> Result { fold_type_ignore(self, node) } } fn fold_located + ?Sized, T, MT>( folder: &mut F, node: Located, f: impl FnOnce(&mut F, T) -> Result, ) -> Result, F::Error> { Ok(Located { custom: folder.map_user(node.custom)?, location: node.location, end_location: node.end_location, node: f(folder, node.node)?, }) } impl Foldable for Mod { type Mapped = Mod; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_mod(self) } } pub fn fold_mod + ?Sized>( #[allow(unused)] folder: &mut F, node: Mod, ) -> Result, F::Error> { match node { Mod::Module { body, type_ignores } => Ok(Mod::Module { body: Foldable::fold(body, folder)?, type_ignores: Foldable::fold(type_ignores, folder)?, }), Mod::Interactive { body } => Ok(Mod::Interactive { body: Foldable::fold(body, folder)?, }), Mod::Expression { body } => Ok(Mod::Expression { body: Foldable::fold(body, folder)?, }), Mod::FunctionType { argtypes, returns } => Ok(Mod::FunctionType { argtypes: Foldable::fold(argtypes, folder)?, returns: Foldable::fold(returns, folder)?, }), } } impl Foldable for Stmt { type Mapped = Stmt; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_stmt(self) } } pub fn fold_stmt + ?Sized>( #[allow(unused)] folder: &mut F, node: Stmt, ) -> Result, F::Error> { fold_located(folder, node, |folder, node| match node { StmtKind::FunctionDef { name, args, body, decorator_list, returns, type_comment, } => Ok(StmtKind::FunctionDef { name: Foldable::fold(name, folder)?, args: Foldable::fold(args, folder)?, body: Foldable::fold(body, folder)?, decorator_list: Foldable::fold(decorator_list, folder)?, returns: Foldable::fold(returns, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }), StmtKind::AsyncFunctionDef { name, args, body, decorator_list, returns, type_comment, } => Ok(StmtKind::AsyncFunctionDef { name: Foldable::fold(name, folder)?, args: Foldable::fold(args, folder)?, body: Foldable::fold(body, folder)?, decorator_list: Foldable::fold(decorator_list, folder)?, returns: Foldable::fold(returns, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }), StmtKind::ClassDef { name, bases, keywords, body, decorator_list, } => Ok(StmtKind::ClassDef { name: Foldable::fold(name, folder)?, bases: Foldable::fold(bases, folder)?, keywords: Foldable::fold(keywords, folder)?, body: Foldable::fold(body, folder)?, decorator_list: Foldable::fold(decorator_list, folder)?, }), StmtKind::Return { value } => Ok(StmtKind::Return { value: Foldable::fold(value, folder)?, }), StmtKind::Delete { targets } => Ok(StmtKind::Delete { targets: Foldable::fold(targets, folder)?, }), StmtKind::Assign { targets, value, type_comment, } => Ok(StmtKind::Assign { targets: Foldable::fold(targets, folder)?, value: Foldable::fold(value, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }), StmtKind::AugAssign { target, op, value } => Ok(StmtKind::AugAssign { target: Foldable::fold(target, folder)?, op: Foldable::fold(op, folder)?, value: Foldable::fold(value, folder)?, }), StmtKind::AnnAssign { target, annotation, value, simple, } => Ok(StmtKind::AnnAssign { target: Foldable::fold(target, folder)?, annotation: Foldable::fold(annotation, folder)?, value: Foldable::fold(value, folder)?, simple: Foldable::fold(simple, folder)?, }), StmtKind::For { target, iter, body, orelse, type_comment, } => Ok(StmtKind::For { target: Foldable::fold(target, folder)?, iter: Foldable::fold(iter, folder)?, body: Foldable::fold(body, folder)?, orelse: Foldable::fold(orelse, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }), StmtKind::AsyncFor { target, iter, body, orelse, type_comment, } => Ok(StmtKind::AsyncFor { target: Foldable::fold(target, folder)?, iter: Foldable::fold(iter, folder)?, body: Foldable::fold(body, folder)?, orelse: Foldable::fold(orelse, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }), StmtKind::While { test, body, orelse } => Ok(StmtKind::While { test: Foldable::fold(test, folder)?, body: Foldable::fold(body, folder)?, orelse: Foldable::fold(orelse, folder)?, }), StmtKind::If { test, body, orelse } => Ok(StmtKind::If { test: Foldable::fold(test, folder)?, body: Foldable::fold(body, folder)?, orelse: Foldable::fold(orelse, folder)?, }), StmtKind::With { items, body, type_comment, } => Ok(StmtKind::With { items: Foldable::fold(items, folder)?, body: Foldable::fold(body, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }), StmtKind::AsyncWith { items, body, type_comment, } => Ok(StmtKind::AsyncWith { items: Foldable::fold(items, folder)?, body: Foldable::fold(body, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }), StmtKind::Match { subject, cases } => Ok(StmtKind::Match { subject: Foldable::fold(subject, folder)?, cases: Foldable::fold(cases, folder)?, }), StmtKind::Raise { exc, cause } => Ok(StmtKind::Raise { exc: Foldable::fold(exc, folder)?, cause: Foldable::fold(cause, folder)?, }), StmtKind::Try { body, handlers, orelse, finalbody, } => Ok(StmtKind::Try { body: Foldable::fold(body, folder)?, handlers: Foldable::fold(handlers, folder)?, orelse: Foldable::fold(orelse, folder)?, finalbody: Foldable::fold(finalbody, folder)?, }), StmtKind::Assert { test, msg } => Ok(StmtKind::Assert { test: Foldable::fold(test, folder)?, msg: Foldable::fold(msg, folder)?, }), StmtKind::Import { names } => Ok(StmtKind::Import { names: Foldable::fold(names, folder)?, }), StmtKind::ImportFrom { module, names, level, } => Ok(StmtKind::ImportFrom { module: Foldable::fold(module, folder)?, names: Foldable::fold(names, folder)?, level: Foldable::fold(level, folder)?, }), StmtKind::Global { names } => Ok(StmtKind::Global { names: Foldable::fold(names, folder)?, }), StmtKind::Nonlocal { names } => Ok(StmtKind::Nonlocal { names: Foldable::fold(names, folder)?, }), StmtKind::Expr { value } => Ok(StmtKind::Expr { value: Foldable::fold(value, folder)?, }), StmtKind::Pass {} => Ok(StmtKind::Pass {}), StmtKind::Break {} => Ok(StmtKind::Break {}), StmtKind::Continue {} => Ok(StmtKind::Continue {}), }) } impl Foldable for Expr { type Mapped = Expr; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_expr(self) } } pub fn fold_expr + ?Sized>( #[allow(unused)] folder: &mut F, node: Expr, ) -> Result, F::Error> { fold_located(folder, node, |folder, node| match node { ExprKind::BoolOp { op, values } => Ok(ExprKind::BoolOp { op: Foldable::fold(op, folder)?, values: Foldable::fold(values, folder)?, }), ExprKind::NamedExpr { target, value } => Ok(ExprKind::NamedExpr { target: Foldable::fold(target, folder)?, value: Foldable::fold(value, folder)?, }), ExprKind::BinOp { left, op, right } => Ok(ExprKind::BinOp { left: Foldable::fold(left, folder)?, op: Foldable::fold(op, folder)?, right: Foldable::fold(right, folder)?, }), ExprKind::UnaryOp { op, operand } => Ok(ExprKind::UnaryOp { op: Foldable::fold(op, folder)?, operand: Foldable::fold(operand, folder)?, }), ExprKind::Lambda { args, body } => Ok(ExprKind::Lambda { args: Foldable::fold(args, folder)?, body: Foldable::fold(body, folder)?, }), ExprKind::IfExp { test, body, orelse } => Ok(ExprKind::IfExp { test: Foldable::fold(test, folder)?, body: Foldable::fold(body, folder)?, orelse: Foldable::fold(orelse, folder)?, }), ExprKind::Dict { keys, values } => Ok(ExprKind::Dict { keys: Foldable::fold(keys, folder)?, values: Foldable::fold(values, folder)?, }), ExprKind::Set { elts } => Ok(ExprKind::Set { elts: Foldable::fold(elts, folder)?, }), ExprKind::ListComp { elt, generators } => Ok(ExprKind::ListComp { elt: Foldable::fold(elt, folder)?, generators: Foldable::fold(generators, folder)?, }), ExprKind::SetComp { elt, generators } => Ok(ExprKind::SetComp { elt: Foldable::fold(elt, folder)?, generators: Foldable::fold(generators, folder)?, }), ExprKind::DictComp { key, value, generators, } => Ok(ExprKind::DictComp { key: Foldable::fold(key, folder)?, value: Foldable::fold(value, folder)?, generators: Foldable::fold(generators, folder)?, }), ExprKind::GeneratorExp { elt, generators } => Ok(ExprKind::GeneratorExp { elt: Foldable::fold(elt, folder)?, generators: Foldable::fold(generators, folder)?, }), ExprKind::Await { value } => Ok(ExprKind::Await { value: Foldable::fold(value, folder)?, }), ExprKind::Yield { value } => Ok(ExprKind::Yield { value: Foldable::fold(value, folder)?, }), ExprKind::YieldFrom { value } => Ok(ExprKind::YieldFrom { value: Foldable::fold(value, folder)?, }), ExprKind::Compare { left, ops, comparators, } => Ok(ExprKind::Compare { left: Foldable::fold(left, folder)?, ops: Foldable::fold(ops, folder)?, comparators: Foldable::fold(comparators, folder)?, }), ExprKind::Call { func, args, keywords, } => Ok(ExprKind::Call { func: Foldable::fold(func, folder)?, args: Foldable::fold(args, folder)?, keywords: Foldable::fold(keywords, folder)?, }), ExprKind::FormattedValue { value, conversion, format_spec, } => Ok(ExprKind::FormattedValue { value: Foldable::fold(value, folder)?, conversion: Foldable::fold(conversion, folder)?, format_spec: Foldable::fold(format_spec, folder)?, }), ExprKind::JoinedStr { values } => Ok(ExprKind::JoinedStr { values: Foldable::fold(values, folder)?, }), ExprKind::Constant { value, kind } => Ok(ExprKind::Constant { value: Foldable::fold(value, folder)?, kind: Foldable::fold(kind, folder)?, }), ExprKind::Attribute { value, attr, ctx } => Ok(ExprKind::Attribute { value: Foldable::fold(value, folder)?, attr: Foldable::fold(attr, folder)?, ctx: Foldable::fold(ctx, folder)?, }), ExprKind::Subscript { value, slice, ctx } => Ok(ExprKind::Subscript { value: Foldable::fold(value, folder)?, slice: Foldable::fold(slice, folder)?, ctx: Foldable::fold(ctx, folder)?, }), ExprKind::Starred { value, ctx } => Ok(ExprKind::Starred { value: Foldable::fold(value, folder)?, ctx: Foldable::fold(ctx, folder)?, }), ExprKind::Name { id, ctx } => Ok(ExprKind::Name { id: Foldable::fold(id, folder)?, ctx: Foldable::fold(ctx, folder)?, }), ExprKind::List { elts, ctx } => Ok(ExprKind::List { elts: Foldable::fold(elts, folder)?, ctx: Foldable::fold(ctx, folder)?, }), ExprKind::Tuple { elts, ctx } => Ok(ExprKind::Tuple { elts: Foldable::fold(elts, folder)?, ctx: Foldable::fold(ctx, folder)?, }), ExprKind::Slice { lower, upper, step } => Ok(ExprKind::Slice { lower: Foldable::fold(lower, folder)?, upper: Foldable::fold(upper, folder)?, step: Foldable::fold(step, folder)?, }), }) } impl Foldable for ExprContext { type Mapped = ExprContext; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_expr_context(self) } } pub fn fold_expr_context + ?Sized>( #[allow(unused)] folder: &mut F, node: ExprContext, ) -> Result { match node { ExprContext::Load {} => Ok(ExprContext::Load {}), ExprContext::Store {} => Ok(ExprContext::Store {}), ExprContext::Del {} => Ok(ExprContext::Del {}), } } impl Foldable for Boolop { type Mapped = Boolop; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_boolop(self) } } pub fn fold_boolop + ?Sized>( #[allow(unused)] folder: &mut F, node: Boolop, ) -> Result { match node { Boolop::And {} => Ok(Boolop::And {}), Boolop::Or {} => Ok(Boolop::Or {}), } } impl Foldable for Operator { type Mapped = Operator; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_operator(self) } } pub fn fold_operator + ?Sized>( #[allow(unused)] folder: &mut F, node: Operator, ) -> Result { match node { Operator::Add {} => Ok(Operator::Add {}), Operator::Sub {} => Ok(Operator::Sub {}), Operator::Mult {} => Ok(Operator::Mult {}), Operator::MatMult {} => Ok(Operator::MatMult {}), Operator::Div {} => Ok(Operator::Div {}), Operator::Mod {} => Ok(Operator::Mod {}), Operator::Pow {} => Ok(Operator::Pow {}), Operator::LShift {} => Ok(Operator::LShift {}), Operator::RShift {} => Ok(Operator::RShift {}), Operator::BitOr {} => Ok(Operator::BitOr {}), Operator::BitXor {} => Ok(Operator::BitXor {}), Operator::BitAnd {} => Ok(Operator::BitAnd {}), Operator::FloorDiv {} => Ok(Operator::FloorDiv {}), } } impl Foldable for Unaryop { type Mapped = Unaryop; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_unaryop(self) } } pub fn fold_unaryop + ?Sized>( #[allow(unused)] folder: &mut F, node: Unaryop, ) -> Result { match node { Unaryop::Invert {} => Ok(Unaryop::Invert {}), Unaryop::Not {} => Ok(Unaryop::Not {}), Unaryop::UAdd {} => Ok(Unaryop::UAdd {}), Unaryop::USub {} => Ok(Unaryop::USub {}), } } impl Foldable for Cmpop { type Mapped = Cmpop; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_cmpop(self) } } pub fn fold_cmpop + ?Sized>( #[allow(unused)] folder: &mut F, node: Cmpop, ) -> Result { match node { Cmpop::Eq {} => Ok(Cmpop::Eq {}), Cmpop::NotEq {} => Ok(Cmpop::NotEq {}), Cmpop::Lt {} => Ok(Cmpop::Lt {}), Cmpop::LtE {} => Ok(Cmpop::LtE {}), Cmpop::Gt {} => Ok(Cmpop::Gt {}), Cmpop::GtE {} => Ok(Cmpop::GtE {}), Cmpop::Is {} => Ok(Cmpop::Is {}), Cmpop::IsNot {} => Ok(Cmpop::IsNot {}), Cmpop::In {} => Ok(Cmpop::In {}), Cmpop::NotIn {} => Ok(Cmpop::NotIn {}), } } impl Foldable for Comprehension { type Mapped = Comprehension; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_comprehension(self) } } pub fn fold_comprehension + ?Sized>( #[allow(unused)] folder: &mut F, node: Comprehension, ) -> Result, F::Error> { let Comprehension { target, iter, ifs, is_async, } = node; Ok(Comprehension { target: Foldable::fold(target, folder)?, iter: Foldable::fold(iter, folder)?, ifs: Foldable::fold(ifs, folder)?, is_async: Foldable::fold(is_async, folder)?, }) } impl Foldable for Excepthandler { type Mapped = Excepthandler; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_excepthandler(self) } } pub fn fold_excepthandler + ?Sized>( #[allow(unused)] folder: &mut F, node: Excepthandler, ) -> Result, F::Error> { fold_located(folder, node, |folder, node| match node { ExcepthandlerKind::ExceptHandler { type_, name, body } => { Ok(ExcepthandlerKind::ExceptHandler { type_: Foldable::fold(type_, folder)?, name: Foldable::fold(name, folder)?, body: Foldable::fold(body, folder)?, }) } }) } impl Foldable for Arguments { type Mapped = Arguments; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_arguments(self) } } pub fn fold_arguments + ?Sized>( #[allow(unused)] folder: &mut F, node: Arguments, ) -> Result, F::Error> { let Arguments { posonlyargs, args, vararg, kwonlyargs, kw_defaults, kwarg, defaults, } = node; Ok(Arguments { posonlyargs: Foldable::fold(posonlyargs, folder)?, args: Foldable::fold(args, folder)?, vararg: Foldable::fold(vararg, folder)?, kwonlyargs: Foldable::fold(kwonlyargs, folder)?, kw_defaults: Foldable::fold(kw_defaults, folder)?, kwarg: Foldable::fold(kwarg, folder)?, defaults: Foldable::fold(defaults, folder)?, }) } impl Foldable for Arg { type Mapped = Arg; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_arg(self) } } pub fn fold_arg + ?Sized>( #[allow(unused)] folder: &mut F, node: Arg, ) -> Result, F::Error> { fold_located(folder, node, |folder, node| { let ArgData { arg, annotation, type_comment, } = node; Ok(ArgData { arg: Foldable::fold(arg, folder)?, annotation: Foldable::fold(annotation, folder)?, type_comment: Foldable::fold(type_comment, folder)?, }) }) } impl Foldable for Keyword { type Mapped = Keyword; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_keyword(self) } } pub fn fold_keyword + ?Sized>( #[allow(unused)] folder: &mut F, node: Keyword, ) -> Result, F::Error> { fold_located(folder, node, |folder, node| { let KeywordData { arg, value } = node; Ok(KeywordData { arg: Foldable::fold(arg, folder)?, value: Foldable::fold(value, folder)?, }) }) } impl Foldable for Alias { type Mapped = Alias; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_alias(self) } } pub fn fold_alias + ?Sized>( #[allow(unused)] folder: &mut F, node: Alias, ) -> Result, F::Error> { fold_located(folder, node, |folder, node| { let AliasData { name, asname } = node; Ok(AliasData { name: Foldable::fold(name, folder)?, asname: Foldable::fold(asname, folder)?, }) }) } impl Foldable for Withitem { type Mapped = Withitem; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_withitem(self) } } pub fn fold_withitem + ?Sized>( #[allow(unused)] folder: &mut F, node: Withitem, ) -> Result, F::Error> { let Withitem { context_expr, optional_vars, } = node; Ok(Withitem { context_expr: Foldable::fold(context_expr, folder)?, optional_vars: Foldable::fold(optional_vars, folder)?, }) } impl Foldable for MatchCase { type Mapped = MatchCase; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_match_case(self) } } pub fn fold_match_case + ?Sized>( #[allow(unused)] folder: &mut F, node: MatchCase, ) -> Result, F::Error> { let MatchCase { pattern, guard, body, } = node; Ok(MatchCase { pattern: Foldable::fold(pattern, folder)?, guard: Foldable::fold(guard, folder)?, body: Foldable::fold(body, folder)?, }) } impl Foldable for Pattern { type Mapped = Pattern; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_pattern(self) } } pub fn fold_pattern + ?Sized>( #[allow(unused)] folder: &mut F, node: Pattern, ) -> Result, F::Error> { fold_located(folder, node, |folder, node| match node { PatternKind::MatchValue { value } => Ok(PatternKind::MatchValue { value: Foldable::fold(value, folder)?, }), PatternKind::MatchSingleton { value } => Ok(PatternKind::MatchSingleton { value: Foldable::fold(value, folder)?, }), PatternKind::MatchSequence { patterns } => Ok(PatternKind::MatchSequence { patterns: Foldable::fold(patterns, folder)?, }), PatternKind::MatchMapping { keys, patterns, rest, } => Ok(PatternKind::MatchMapping { keys: Foldable::fold(keys, folder)?, patterns: Foldable::fold(patterns, folder)?, rest: Foldable::fold(rest, folder)?, }), PatternKind::MatchClass { cls, patterns, kwd_attrs, kwd_patterns, } => Ok(PatternKind::MatchClass { cls: Foldable::fold(cls, folder)?, patterns: Foldable::fold(patterns, folder)?, kwd_attrs: Foldable::fold(kwd_attrs, folder)?, kwd_patterns: Foldable::fold(kwd_patterns, folder)?, }), PatternKind::MatchStar { name } => Ok(PatternKind::MatchStar { name: Foldable::fold(name, folder)?, }), PatternKind::MatchAs { pattern, name } => Ok(PatternKind::MatchAs { pattern: Foldable::fold(pattern, folder)?, name: Foldable::fold(name, folder)?, }), PatternKind::MatchOr { patterns } => Ok(PatternKind::MatchOr { patterns: Foldable::fold(patterns, folder)?, }), }) } impl Foldable for TypeIgnore { type Mapped = TypeIgnore; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { folder.fold_type_ignore(self) } } pub fn fold_type_ignore + ?Sized>( #[allow(unused)] folder: &mut F, node: TypeIgnore, ) -> Result { match node { TypeIgnore::TypeIgnore { lineno, tag } => Ok(TypeIgnore::TypeIgnore { lineno: Foldable::fold(lineno, folder)?, tag: Foldable::fold(tag, folder)?, }), } } } rustpython-ast-0.2.0/src/constant.rs000064400000000000000000000170411046102023000156400ustar 00000000000000use num_bigint::BigInt; pub use rustpython_compiler_core::ConversionFlag; #[derive(Clone, Debug, PartialEq)] pub enum Constant { None, Bool(bool), Str(String), Bytes(Vec), Int(BigInt), Tuple(Vec), Float(f64), Complex { real: f64, imag: f64 }, Ellipsis, } impl From for Constant { fn from(s: String) -> Constant { Self::Str(s) } } impl From> for Constant { fn from(b: Vec) -> Constant { Self::Bytes(b) } } impl From for Constant { fn from(b: bool) -> Constant { Self::Bool(b) } } impl From for Constant { fn from(i: BigInt) -> Constant { Self::Int(i) } } #[cfg(feature = "rustpython-common")] impl std::fmt::Display for Constant { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Constant::None => f.pad("None"), Constant::Bool(b) => f.pad(if *b { "True" } else { "False" }), Constant::Str(s) => rustpython_common::str::repr(s).fmt(f), Constant::Bytes(b) => f.pad(&rustpython_common::bytes::repr(b)), Constant::Int(i) => i.fmt(f), Constant::Tuple(tup) => { if let [elt] = &**tup { write!(f, "({elt},)") } else { f.write_str("(")?; for (i, elt) in tup.iter().enumerate() { if i != 0 { f.write_str(", ")?; } elt.fmt(f)?; } f.write_str(")") } } Constant::Float(fp) => f.pad(&rustpython_common::float_ops::to_string(*fp)), Constant::Complex { real, imag } => { if *real == 0.0 { write!(f, "{imag}j") } else { write!(f, "({real}{imag:+}j)") } } Constant::Ellipsis => f.pad("..."), } } } #[cfg(feature = "constant-optimization")] #[non_exhaustive] #[derive(Default)] pub struct ConstantOptimizer {} #[cfg(feature = "constant-optimization")] impl ConstantOptimizer { #[inline] pub fn new() -> Self { Self {} } } #[cfg(feature = "constant-optimization")] impl crate::fold::Fold for ConstantOptimizer { type TargetU = U; type Error = std::convert::Infallible; #[inline] fn map_user(&mut self, user: U) -> Result { Ok(user) } fn fold_expr(&mut self, node: crate::Expr) -> Result, Self::Error> { match node.node { crate::ExprKind::Tuple { elts, ctx } => { let elts = elts .into_iter() .map(|x| self.fold_expr(x)) .collect::, _>>()?; let expr = if elts .iter() .all(|e| matches!(e.node, crate::ExprKind::Constant { .. })) { let tuple = elts .into_iter() .map(|e| match e.node { crate::ExprKind::Constant { value, .. } => value, _ => unreachable!(), }) .collect(); crate::ExprKind::Constant { value: Constant::Tuple(tuple), kind: None, } } else { crate::ExprKind::Tuple { elts, ctx } }; Ok(crate::Expr { node: expr, custom: node.custom, location: node.location, end_location: node.end_location, }) } _ => crate::fold::fold_expr(self, node), } } } #[cfg(test)] mod tests { #[cfg(feature = "constant-optimization")] #[test] fn test_constant_opt() { use super::*; use crate::fold::Fold; use crate::*; let start = Default::default(); let end = None; let custom = (); let ast = Located { location: start, end_location: end, custom, node: ExprKind::Tuple { ctx: ExprContext::Load, elts: vec![ Located { location: start, end_location: end, custom, node: ExprKind::Constant { value: BigInt::from(1).into(), kind: None, }, }, Located { location: start, end_location: end, custom, node: ExprKind::Constant { value: BigInt::from(2).into(), kind: None, }, }, Located { location: start, end_location: end, custom, node: ExprKind::Tuple { ctx: ExprContext::Load, elts: vec![ Located { location: start, end_location: end, custom, node: ExprKind::Constant { value: BigInt::from(3).into(), kind: None, }, }, Located { location: start, end_location: end, custom, node: ExprKind::Constant { value: BigInt::from(4).into(), kind: None, }, }, Located { location: start, end_location: end, custom, node: ExprKind::Constant { value: BigInt::from(5).into(), kind: None, }, }, ], }, }, ], }, }; let new_ast = ConstantOptimizer::new() .fold_expr(ast) .unwrap_or_else(|e| match e {}); assert_eq!( new_ast, Located { location: start, end_location: end, custom, node: ExprKind::Constant { value: Constant::Tuple(vec![ BigInt::from(1).into(), BigInt::from(2).into(), Constant::Tuple(vec![ BigInt::from(3).into(), BigInt::from(4).into(), BigInt::from(5).into(), ]) ]), kind: None }, } ); } } rustpython-ast-0.2.0/src/fold_helpers.rs000064400000000000000000000030131046102023000164470ustar 00000000000000use crate::{constant, fold::Fold}; pub(crate) trait Foldable { type Mapped; fn fold + ?Sized>( self, folder: &mut F, ) -> Result; } impl Foldable for Vec where X: Foldable, { type Mapped = Vec; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { self.into_iter().map(|x| x.fold(folder)).collect() } } impl Foldable for Option where X: Foldable, { type Mapped = Option; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { self.map(|x| x.fold(folder)).transpose() } } impl Foldable for Box where X: Foldable, { type Mapped = Box; fn fold + ?Sized>( self, folder: &mut F, ) -> Result { (*self).fold(folder).map(Box::new) } } macro_rules! simple_fold { ($($t:ty),+$(,)?) => { $(impl $crate::fold_helpers::Foldable for $t { type Mapped = Self; #[inline] fn fold + ?Sized>( self, _folder: &mut F, ) -> Result { Ok(self) } })+ }; } simple_fold!(usize, String, bool, constant::Constant); rustpython-ast-0.2.0/src/impls.rs000064400000000000000000000046711046102023000151400ustar 00000000000000use crate::{Constant, ExprKind}; impl ExprKind { /// Returns a short name for the node suitable for use in error messages. pub fn name(&self) -> &'static str { match self { ExprKind::BoolOp { .. } | ExprKind::BinOp { .. } | ExprKind::UnaryOp { .. } => { "operator" } ExprKind::Subscript { .. } => "subscript", ExprKind::Await { .. } => "await expression", ExprKind::Yield { .. } | ExprKind::YieldFrom { .. } => "yield expression", ExprKind::Compare { .. } => "comparison", ExprKind::Attribute { .. } => "attribute", ExprKind::Call { .. } => "function call", ExprKind::Constant { value, .. } => match value { Constant::Str(_) | Constant::Int(_) | Constant::Float(_) | Constant::Complex { .. } | Constant::Bytes(_) => "literal", Constant::Tuple(_) => "tuple", Constant::Bool(b) => { if *b { "True" } else { "False" } } Constant::None => "None", Constant::Ellipsis => "ellipsis", }, ExprKind::List { .. } => "list", ExprKind::Tuple { .. } => "tuple", ExprKind::Dict { .. } => "dict display", ExprKind::Set { .. } => "set display", ExprKind::ListComp { .. } => "list comprehension", ExprKind::DictComp { .. } => "dict comprehension", ExprKind::SetComp { .. } => "set comprehension", ExprKind::GeneratorExp { .. } => "generator expression", ExprKind::Starred { .. } => "starred", ExprKind::Slice { .. } => "slice", ExprKind::JoinedStr { values } => { if values .iter() .any(|e| matches!(e.node, ExprKind::JoinedStr { .. })) { "f-string expression" } else { "literal" } } ExprKind::FormattedValue { .. } => "f-string expression", ExprKind::Name { .. } => "name", ExprKind::Lambda { .. } => "lambda", ExprKind::IfExp { .. } => "conditional expression", ExprKind::NamedExpr { .. } => "named expression", } } } rustpython-ast-0.2.0/src/lib.rs000064400000000000000000000003431046102023000145520ustar 00000000000000mod ast_gen; mod constant; #[cfg(feature = "fold")] mod fold_helpers; mod impls; #[cfg(feature = "unparse")] mod unparse; pub use ast_gen::*; pub use rustpython_compiler_core::Location; pub type Suite = Vec>; rustpython-ast-0.2.0/src/unparse.rs000064400000000000000000000451611046102023000154700ustar 00000000000000use crate::{ Arg, Arguments, Boolop, Cmpop, Comprehension, Constant, ConversionFlag, Expr, ExprKind, Operator, }; use std::fmt; mod precedence { macro_rules! precedence { ($($op:ident,)*) => { precedence!(@0, $($op,)*); }; (@$i:expr, $op1:ident, $($op:ident,)*) => { pub const $op1: u8 = $i; precedence!(@$i + 1, $($op,)*); }; (@$i:expr,) => {}; } precedence!( TUPLE, TEST, OR, AND, NOT, CMP, // "EXPR" = BOR, BXOR, BAND, SHIFT, ARITH, TERM, FACTOR, POWER, AWAIT, ATOM, ); pub const EXPR: u8 = BOR; } #[repr(transparent)] struct Unparser<'a> { f: fmt::Formatter<'a>, } impl<'a> Unparser<'a> { fn new<'b>(f: &'b mut fmt::Formatter<'a>) -> &'b mut Unparser<'a> { unsafe { &mut *(f as *mut fmt::Formatter<'a> as *mut Unparser<'a>) } } fn p(&mut self, s: &str) -> fmt::Result { self.f.write_str(s) } fn p_if(&mut self, cond: bool, s: &str) -> fmt::Result { if cond { self.f.write_str(s)?; } Ok(()) } fn p_delim(&mut self, first: &mut bool, s: &str) -> fmt::Result { self.p_if(!std::mem::take(first), s) } fn write_fmt(&mut self, f: fmt::Arguments<'_>) -> fmt::Result { self.f.write_fmt(f) } fn unparse_expr(&mut self, ast: &Expr, level: u8) -> fmt::Result { macro_rules! opprec { ($opty:ident, $x:expr, $enu:path, $($var:ident($op:literal, $prec:ident)),*$(,)?) => { match $x { $(<$enu>::$var => (opprec!(@space $opty, $op), precedence::$prec),)* } }; (@space bin, $op:literal) => { concat!(" ", $op, " ") }; (@space un, $op:literal) => { $op }; } macro_rules! group_if { ($lvl:expr, $body:block) => {{ let group = level > $lvl; self.p_if(group, "(")?; let ret = $body; self.p_if(group, ")")?; ret }}; } match &ast.node { ExprKind::BoolOp { op, values } => { let (op, prec) = opprec!(bin, op, Boolop, And("and", AND), Or("or", OR)); group_if!(prec, { let mut first = true; for val in values { self.p_delim(&mut first, op)?; self.unparse_expr(val, prec + 1)?; } }) } ExprKind::NamedExpr { target, value } => { group_if!(precedence::TUPLE, { self.unparse_expr(target, precedence::ATOM)?; self.p(" := ")?; self.unparse_expr(value, precedence::ATOM)?; }) } ExprKind::BinOp { left, op, right } => { let rassoc = matches!(op, Operator::Pow); let (op, prec) = opprec!( bin, op, Operator, Add("+", ARITH), Sub("-", ARITH), Mult("*", TERM), MatMult("@", TERM), Div("/", TERM), Mod("%", TERM), Pow("**", POWER), LShift("<<", SHIFT), RShift(">>", SHIFT), BitOr("|", BOR), BitXor("^", BXOR), BitAnd("&", BAND), FloorDiv("//", TERM), ); group_if!(prec, { self.unparse_expr(left, prec + rassoc as u8)?; self.p(op)?; self.unparse_expr(right, prec + !rassoc as u8)?; }) } ExprKind::UnaryOp { op, operand } => { let (op, prec) = opprec!( un, op, crate::Unaryop, Invert("~", FACTOR), Not("not ", NOT), UAdd("+", FACTOR), USub("-", FACTOR) ); group_if!(prec, { self.p(op)?; self.unparse_expr(operand, prec)?; }) } ExprKind::Lambda { args, body } => { group_if!(precedence::TEST, { let npos = args.args.len() + args.posonlyargs.len(); self.p(if npos > 0 { "lambda " } else { "lambda" })?; self.unparse_args(args)?; write!(self, ": {}", **body)?; }) } ExprKind::IfExp { test, body, orelse } => { group_if!(precedence::TEST, { self.unparse_expr(body, precedence::TEST + 1)?; self.p(" if ")?; self.unparse_expr(test, precedence::TEST + 1)?; self.p(" else ")?; self.unparse_expr(orelse, precedence::TEST)?; }) } ExprKind::Dict { keys, values } => { self.p("{")?; let mut first = true; let (packed, unpacked) = values.split_at(keys.len()); for (k, v) in keys.iter().zip(packed) { self.p_delim(&mut first, ", ")?; write!(self, "{}: {}", *k, *v)?; } for d in unpacked { self.p_delim(&mut first, ", ")?; write!(self, "**{}", *d)?; } self.p("}")?; } ExprKind::Set { elts } => { self.p("{")?; let mut first = true; for v in elts { self.p_delim(&mut first, ", ")?; self.unparse_expr(v, precedence::TEST)?; } self.p("}")?; } ExprKind::ListComp { elt, generators } => { self.p("[")?; self.unparse_expr(elt, precedence::TEST)?; self.unparse_comp(generators)?; self.p("]")?; } ExprKind::SetComp { elt, generators } => { self.p("{")?; self.unparse_expr(elt, precedence::TEST)?; self.unparse_comp(generators)?; self.p("}")?; } ExprKind::DictComp { key, value, generators, } => { self.p("{")?; self.unparse_expr(key, precedence::TEST)?; self.p(": ")?; self.unparse_expr(value, precedence::TEST)?; self.unparse_comp(generators)?; self.p("}")?; } ExprKind::GeneratorExp { elt, generators } => { self.p("(")?; self.unparse_expr(elt, precedence::TEST)?; self.unparse_comp(generators)?; self.p(")")?; } ExprKind::Await { value } => { group_if!(precedence::AWAIT, { self.p("await ")?; self.unparse_expr(value, precedence::ATOM)?; }) } ExprKind::Yield { value } => { if let Some(value) = value { write!(self, "(yield {})", **value)?; } else { self.p("(yield)")?; } } ExprKind::YieldFrom { value } => { write!(self, "(yield from {})", **value)?; } ExprKind::Compare { left, ops, comparators, } => { group_if!(precedence::CMP, { let new_lvl = precedence::CMP + 1; self.unparse_expr(left, new_lvl)?; for (op, cmp) in ops.iter().zip(comparators) { let op = match op { Cmpop::Eq => " == ", Cmpop::NotEq => " != ", Cmpop::Lt => " < ", Cmpop::LtE => " <= ", Cmpop::Gt => " > ", Cmpop::GtE => " >= ", Cmpop::Is => " is ", Cmpop::IsNot => " is not ", Cmpop::In => " in ", Cmpop::NotIn => " not in ", }; self.p(op)?; self.unparse_expr(cmp, new_lvl)?; } }) } ExprKind::Call { func, args, keywords, } => { self.unparse_expr(func, precedence::ATOM)?; self.p("(")?; if let ( [Expr { node: ExprKind::GeneratorExp { elt, generators }, .. }], [], ) = (&**args, &**keywords) { // make sure a single genexp doesn't get double parens self.unparse_expr(elt, precedence::TEST)?; self.unparse_comp(generators)?; } else { let mut first = true; for arg in args { self.p_delim(&mut first, ", ")?; self.unparse_expr(arg, precedence::TEST)?; } for kw in keywords { self.p_delim(&mut first, ", ")?; if let Some(arg) = &kw.node.arg { self.p(arg)?; self.p("=")?; } else { self.p("**")?; } self.unparse_expr(&kw.node.value, precedence::TEST)?; } } self.p(")")?; } ExprKind::FormattedValue { value, conversion, format_spec, } => self.unparse_formatted(value, *conversion, format_spec.as_deref())?, ExprKind::JoinedStr { values } => self.unparse_joinedstr(values, false)?, ExprKind::Constant { value, kind } => { if let Some(kind) = kind { self.p(kind)?; } assert_eq!(f64::MAX_10_EXP, 308); let inf_str = "1e309"; match value { Constant::Float(f) if f.is_infinite() => self.p(inf_str)?, Constant::Complex { real, imag } if real.is_infinite() || imag.is_infinite() => { self.p(&value.to_string().replace("inf", inf_str))? } _ => fmt::Display::fmt(value, &mut self.f)?, } } ExprKind::Attribute { value, attr, .. } => { self.unparse_expr(value, precedence::ATOM)?; let period = if let ExprKind::Constant { value: Constant::Int(_), .. } = &value.node { " ." } else { "." }; self.p(period)?; self.p(attr)?; } ExprKind::Subscript { value, slice, .. } => { self.unparse_expr(value, precedence::ATOM)?; let mut lvl = precedence::TUPLE; if let ExprKind::Tuple { elts, .. } = &slice.node { if elts .iter() .any(|expr| matches!(expr.node, ExprKind::Starred { .. })) { lvl += 1 } } self.p("[")?; self.unparse_expr(slice, lvl)?; self.p("]")?; } ExprKind::Starred { value, .. } => { self.p("*")?; self.unparse_expr(value, precedence::EXPR)?; } ExprKind::Name { id, .. } => self.p(id)?, ExprKind::List { elts, .. } => { self.p("[")?; let mut first = true; for elt in elts { self.p_delim(&mut first, ", ")?; self.unparse_expr(elt, precedence::TEST)?; } self.p("]")?; } ExprKind::Tuple { elts, .. } => { if elts.is_empty() { self.p("()")?; } else { group_if!(precedence::TUPLE, { let mut first = true; for elt in elts { self.p_delim(&mut first, ", ")?; self.unparse_expr(elt, precedence::TEST)?; } self.p_if(elts.len() == 1, ",")?; }) } } ExprKind::Slice { lower, upper, step } => { if let Some(lower) = lower { self.unparse_expr(lower, precedence::TEST)?; } self.p(":")?; if let Some(upper) = upper { self.unparse_expr(upper, precedence::TEST)?; } if let Some(step) = step { self.p(":")?; self.unparse_expr(step, precedence::TEST)?; } } } Ok(()) } fn unparse_args(&mut self, args: &Arguments) -> fmt::Result { let mut first = true; let defaults_start = args.posonlyargs.len() + args.args.len() - args.defaults.len(); for (i, arg) in args.posonlyargs.iter().chain(&args.args).enumerate() { self.p_delim(&mut first, ", ")?; self.unparse_arg(arg)?; if let Some(i) = i.checked_sub(defaults_start) { write!(self, "={}", &args.defaults[i])?; } self.p_if(i + 1 == args.posonlyargs.len(), ", /")?; } if args.vararg.is_some() || !args.kwonlyargs.is_empty() { self.p_delim(&mut first, ", ")?; self.p("*")?; } if let Some(vararg) = &args.vararg { self.unparse_arg(vararg)?; } let defaults_start = args.kwonlyargs.len() - args.kw_defaults.len(); for (i, kwarg) in args.kwonlyargs.iter().enumerate() { self.p_delim(&mut first, ", ")?; self.unparse_arg(kwarg)?; if let Some(default) = i .checked_sub(defaults_start) .and_then(|i| args.kw_defaults.get(i)) { write!(self, "={default}")?; } } if let Some(kwarg) = &args.kwarg { self.p_delim(&mut first, ", ")?; self.p("**")?; self.unparse_arg(kwarg)?; } Ok(()) } fn unparse_arg(&mut self, arg: &Arg) -> fmt::Result { self.p(&arg.node.arg)?; if let Some(ann) = &arg.node.annotation { write!(self, ": {}", **ann)?; } Ok(()) } fn unparse_comp(&mut self, generators: &[Comprehension]) -> fmt::Result { for comp in generators { self.p(if comp.is_async > 0 { " async for " } else { " for " })?; self.unparse_expr(&comp.target, precedence::TUPLE)?; self.p(" in ")?; self.unparse_expr(&comp.iter, precedence::TEST + 1)?; for cond in &comp.ifs { self.p(" if ")?; self.unparse_expr(cond, precedence::TEST + 1)?; } } Ok(()) } fn unparse_fstring_body(&mut self, values: &[Expr], is_spec: bool) -> fmt::Result { for value in values { self.unparse_fstring_elem(value, is_spec)?; } Ok(()) } fn unparse_formatted( &mut self, val: &Expr, conversion: usize, spec: Option<&Expr>, ) -> fmt::Result { let buffered = to_string_fmt(|f| Unparser::new(f).unparse_expr(val, precedence::TEST + 1)); let brace = if buffered.starts_with('{') { // put a space to avoid escaping the bracket "{ " } else { "{" }; self.p(brace)?; self.p(&buffered)?; drop(buffered); if conversion != ConversionFlag::None as usize { self.p("!")?; let buf = &[conversion as u8]; let c = std::str::from_utf8(buf).unwrap(); self.p(c)?; } if let Some(spec) = spec { self.p(":")?; self.unparse_fstring_elem(spec, true)?; } self.p("}")?; Ok(()) } fn unparse_fstring_elem(&mut self, expr: &Expr, is_spec: bool) -> fmt::Result { match &expr.node { ExprKind::Constant { value, .. } => { if let Constant::Str(s) = value { self.unparse_fstring_str(s) } else { unreachable!() } } ExprKind::JoinedStr { values } => self.unparse_joinedstr(values, is_spec), ExprKind::FormattedValue { value, conversion, format_spec, } => self.unparse_formatted(value, *conversion, format_spec.as_deref()), _ => unreachable!(), } } fn unparse_fstring_str(&mut self, s: &str) -> fmt::Result { let s = s.replace('{', "{{").replace('}', "}}"); self.p(&s) } fn unparse_joinedstr(&mut self, values: &[Expr], is_spec: bool) -> fmt::Result { if is_spec { self.unparse_fstring_body(values, is_spec) } else { self.p("f")?; let body = to_string_fmt(|f| Unparser::new(f).unparse_fstring_body(values, is_spec)); fmt::Display::fmt(&rustpython_common::str::repr(&body), &mut self.f) } } } impl fmt::Display for Expr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { Unparser::new(f).unparse_expr(self, precedence::TEST) } } fn to_string_fmt(f: impl FnOnce(&mut fmt::Formatter) -> fmt::Result) -> String { use std::cell::Cell; struct Fmt(Cell>); impl fmt::Result> fmt::Display for Fmt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.take().unwrap()(f) } } Fmt(Cell::new(Some(f))).to_string() }