rustpython-parser-0.2.0/.cargo_vcs_info.json0000644000000001550000000000100145650ustar { "git": { "sha1": "c7faae9b22ce31a3ba1f2cc1cd3ad759b54ce100" }, "path_in_vcs": "compiler/parser" }rustpython-parser-0.2.0/Cargo.toml0000644000000032150000000000100125630ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "rustpython-parser" version = "0.2.0" authors = ["RustPython Team"] build = "build.rs" description = "Parser for python code." readme = "README.md" license = "MIT" repository = "https://github.com/RustPython/RustPython" [dependencies.ahash] version = "0.7.6" [dependencies.itertools] version = "0.10.3" [dependencies.lalrpop-util] version = "0.19.8" [dependencies.log] version = "0.4.16" [dependencies.num-bigint] version = "0.4.3" [dependencies.num-traits] version = "0.2.14" [dependencies.phf] version = "0.10.1" [dependencies.rustc-hash] version = "1.1.0" [dependencies.rustpython-ast] version = "0.2.0" [dependencies.rustpython-compiler-core] version = "0.2.0" [dependencies.thiserror] version = "1.0" [dependencies.unic-emoji-char] version = "0.9.0" [dependencies.unic-ucd-ident] version = "0.9.0" [dependencies.unicode_names2] version = "0.5.0" [dev-dependencies.insta] version = "1.14.0" [build-dependencies.anyhow] version = "1.0.45" [build-dependencies.lalrpop] version = "0.19.8" optional = true [build-dependencies.phf_codegen] version = "0.10" [build-dependencies.tiny-keccak] version = "2" features = ["sha3"] [features] default = ["lalrpop"] rustpython-parser-0.2.0/Cargo.toml.orig000064400000000000000000000016121046102023000162430ustar 00000000000000[package] name = "rustpython-parser" version = "0.2.0" description = "Parser for python code." authors = ["RustPython Team"] build = "build.rs" repository = "https://github.com/RustPython/RustPython" license = "MIT" edition = "2021" [features] default = ["lalrpop"] # removing this causes potential build failure [build-dependencies] anyhow = "1.0.45" lalrpop = { version = "0.19.8", optional = true } phf_codegen = "0.10" tiny-keccak = { version = "2", features = ["sha3"] } [dependencies] rustpython-ast = { path = "../ast", version = "0.2.0" } rustpython-compiler-core = { path = "../core", version = "0.2.0" } ahash = "0.7.6" itertools = "0.10.3" lalrpop-util = "0.19.8" log = "0.4.16" num-bigint = "0.4.3" num-traits = "0.2.14" phf = "0.10.1" rustc-hash = "1.1.0" thiserror = "1.0" unic-emoji-char = "0.9.0" unic-ucd-ident = "0.9.0" unicode_names2 = "0.5.0" [dev-dependencies] insta = "1.14.0" rustpython-parser-0.2.0/README.md000064400000000000000000000042151046102023000146350ustar 00000000000000# RustPython/parser This directory has the code for python lexing, parsing and generating Abstract Syntax Trees (AST). The steps are: - Lexical analysis: splits the source code into tokens. - Parsing and generating the AST: transforms those tokens into an AST. Uses `LALRPOP`, a Rust parser generator framework. This crate is published on [https://docs.rs/rustpython-parser](https://docs.rs/rustpython-parser). We wrote [a blog post](https://rustpython.github.io/2020/04/02/thing-explainer-parser.html) with screenshots and an explanation to help you understand the steps by seeing them in action. For more information on LALRPOP, here is a link to the [LALRPOP book](https://github.com/lalrpop/lalrpop). There is a readme in the `src` folder with the details of each file. ## Directory content `build.rs`: The build script. `Cargo.toml`: The config file. The `src` directory has: **lib.rs** This is the crate's root. **lexer.rs** This module takes care of lexing python source text. This means source code is translated into separate tokens. **parser.rs** A python parsing module. Use this module to parse python code into an AST. There are three ways to parse python code. You could parse a whole program, a single statement, or a single expression. **ast.rs** Implements abstract syntax tree (AST) nodes for the python language. Roughly equivalent to [the python AST](https://docs.python.org/3/library/ast.html). **python.lalrpop** Python grammar. **token.rs** Different token definitions. Loosely based on token.h from CPython source. **errors.rs** Define internal parse error types. The goal is to provide a matching and a safe error API, masking errors from LALR. **fstring.rs** Format strings. **function.rs** Collection of functions for parsing parameters, arguments. **location.rs** Datatypes to support source location information. **mode.rs** Execution mode check. Allowed modes are `exec`, `eval` or `single`. ## How to use For example, one could do this: ``` use rustpython_parser::{parser, ast}; let python_source = "print('Hello world')"; let python_ast = parser::parse_expression(python_source).unwrap(); ``` rustpython-parser-0.2.0/build.rs000064400000000000000000000111731046102023000150240ustar 00000000000000use std::fmt::Write as _; use std::fs::File; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::path::PathBuf; use tiny_keccak::{Hasher, Sha3}; fn main() -> anyhow::Result<()> { const SOURCE: &str = "python.lalrpop"; const TARGET: &str = "python.rs"; println!("cargo:rerun-if-changed={SOURCE}"); try_lalrpop(SOURCE, TARGET)?; gen_phf(); Ok(()) } fn requires_lalrpop(source: &str, target: &str) -> Option { let Ok(target) = File::open(target) else { return Some("python.rs doesn't exist. regenerate.".to_owned()); }; let sha_prefix = "// sha3: "; let sha3_line = if let Some(sha3_line) = BufReader::with_capacity(128, target) .lines() .find_map(|line| { let line = line.unwrap(); line.starts_with(sha_prefix).then_some(line) }) { sha3_line } else { // no sha3 line - maybe old version of lalrpop installed return Some("python.rs doesn't include sha3 hash. regenerate.".to_owned()); }; let expected_sha3_str = sha3_line.strip_prefix(sha_prefix).unwrap(); let actual_sha3 = { let mut hasher = Sha3::v256(); let mut f = BufReader::new(File::open(source).unwrap()); let mut line = String::new(); while f.read_line(&mut line).unwrap() != 0 { if line.ends_with('\n') { line.pop(); if line.ends_with('\r') { line.pop(); } } hasher.update(line.as_bytes()); hasher.update(b"\n"); line.clear(); } let mut hash = [0u8; 32]; hasher.finalize(&mut hash); hash }; let eq = sha_equal(expected_sha3_str, &actual_sha3); if !eq { let mut actual_sha3_str = String::new(); for byte in actual_sha3 { write!(actual_sha3_str, "{byte:02x}").unwrap(); } return Some(format!( "python.rs hash expected: {expected_sha3_str} but actual: {actual_sha3_str}" )); } None } fn try_lalrpop(source: &str, target: &str) -> anyhow::Result<()> { let Some(_message) = requires_lalrpop(source, target) else { return Ok(()); }; #[cfg(feature = "lalrpop")] lalrpop::process_root().unwrap_or_else(|e| { println!("cargo:warning={_message}"); panic!("running lalrpop failed. {e:?}"); }); #[cfg(not(feature = "lalrpop"))] { println!("cargo:warning=try: cargo build --manifest-path=compiler/parser/Cargo.toml --features=lalrpop"); } Ok(()) } fn sha_equal(expected_sha3_str: &str, actual_sha3: &[u8; 32]) -> bool { if expected_sha3_str.len() != 64 { panic!("lalrpop version? hash bug is fixed in 0.19.8"); } let mut expected_sha3 = [0u8; 32]; for (i, b) in expected_sha3.iter_mut().enumerate() { *b = u8::from_str_radix(&expected_sha3_str[i * 2..][..2], 16).unwrap(); } *actual_sha3 == expected_sha3 } fn gen_phf() { let out_dir = PathBuf::from(std::env::var_os("OUT_DIR").unwrap()); let mut kwds = phf_codegen::Map::new(); let kwds = kwds // Alphabetical keywords: .entry("...", "Tok::Ellipsis") .entry("False", "Tok::False") .entry("None", "Tok::None") .entry("True", "Tok::True") // moreso "standard" keywords .entry("and", "Tok::And") .entry("as", "Tok::As") .entry("assert", "Tok::Assert") .entry("async", "Tok::Async") .entry("await", "Tok::Await") .entry("break", "Tok::Break") .entry("class", "Tok::Class") .entry("continue", "Tok::Continue") .entry("def", "Tok::Def") .entry("del", "Tok::Del") .entry("elif", "Tok::Elif") .entry("else", "Tok::Else") .entry("except", "Tok::Except") .entry("finally", "Tok::Finally") .entry("for", "Tok::For") .entry("from", "Tok::From") .entry("global", "Tok::Global") .entry("if", "Tok::If") .entry("import", "Tok::Import") .entry("in", "Tok::In") .entry("is", "Tok::Is") .entry("lambda", "Tok::Lambda") .entry("nonlocal", "Tok::Nonlocal") .entry("not", "Tok::Not") .entry("or", "Tok::Or") .entry("pass", "Tok::Pass") .entry("raise", "Tok::Raise") .entry("return", "Tok::Return") .entry("try", "Tok::Try") .entry("while", "Tok::While") .entry("with", "Tok::With") .entry("yield", "Tok::Yield") .build(); writeln!( BufWriter::new(File::create(out_dir.join("keywords.rs")).unwrap()), "{kwds}", ) .unwrap(); } rustpython-parser-0.2.0/python.lalrpop000064400000000000000000001335021046102023000162740ustar 00000000000000// See also: file:///usr/share/doc/python/html/reference/grammar.html?highlight=grammar // See also: https://github.com/antlr/grammars-v4/blob/master/python3/Python3.g4 // See also: file:///usr/share/doc/python/html/reference/compound_stmts.html#function-definitions // See also: https://greentreesnakes.readthedocs.io/en/latest/nodes.html#keyword use crate::{ ast, error::{LexicalError, LexicalErrorType}, function::{ArgumentList, parse_args, parse_params, validate_arguments}, lexer, context::set_context, string::parse_strings, token::StringKind, }; use num_bigint::BigInt; grammar; // This is a hack to reduce the amount of lalrpop tables generated: // For each public entry point, a full parse table is generated. // By having only a single pub function, we reduce this to one. pub Top: ast::Mod = { StartModule => ast::Mod::Module { body, type_ignores: vec![] }, StartInteractive => ast::Mod::Interactive { body }, StartExpression ("\n")* => ast::Mod::Expression { body: Box::new(body) }, }; Program: ast::Suite = { => { lines.into_iter().flatten().collect() }, }; // A file line either has a declaration, or an empty newline: FileLine: ast::Suite = { Statement, "\n" => vec![], }; Suite: ast::Suite = { SimpleStatement, "\n" Indent Dedent => s.into_iter().flatten().collect(), }; Statement: ast::Suite = { SimpleStatement, => vec![s], }; SimpleStatement: ast::Suite = { ";"? "\n" => { let mut statements = vec![s1]; statements.extend(s2.into_iter().map(|e| e.1)); statements } }; SmallStatement: ast::Stmt = { ExpressionStatement, PassStatement, DelStatement, FlowStatement, ImportStatement, GlobalStatement, NonlocalStatement, AssertStatement, }; PassStatement: ast::Stmt = { "pass" => { ast::Stmt { location, end_location: Some(end_location), custom: (), node: ast::StmtKind::Pass, } }, }; DelStatement: ast::Stmt = { "del" => { ast::Stmt { location, end_location: Some(end_location), custom: (), node: ast::StmtKind::Delete { targets: targets.into_iter().map(|expr| set_context(expr, ast::ExprContext::Del)).collect() }, } }, }; ExpressionStatement: ast::Stmt = { => { // Just an expression, no assignment: if suffix.is_empty() { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Expr { value: Box::new(expression) } } } else { let mut targets = vec![set_context(expression, ast::ExprContext::Store)]; let mut values = suffix; while values.len() > 1 { targets.push(set_context(values.remove(0), ast::ExprContext::Store)); } let value = Box::new(values.into_iter().next().unwrap()); ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Assign { targets, value, type_comment: None }, } } }, => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::AugAssign { target: Box::new(set_context(target, ast::ExprContext::Store)), op, value: Box::new(rhs) }, } }, > ":" > => { let simple = matches!(target.node, ast::ExprKind::Name { .. }); ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::AnnAssign { target: Box::new(set_context(target, ast::ExprContext::Store)), annotation: Box::new(annotation), value: rhs.map(Box::new), simple: if simple { 1 } else { 0 }, }, } }, }; AssignSuffix: ast::Expr = { "=" => e }; TestListOrYieldExpr: ast::Expr = { TestList, YieldExpr } #[inline] TestOrStarExprList: ast::Expr = { // as far as I can tell, these were the same TestList }; TestOrStarExpr: ast::Expr = { Test<"all">, StarExpr, }; NamedOrStarExpr: ast::Expr = { NamedExpression, StarExpr, }; TestOrStarNamedExpr: ast::Expr = { NamedExpressionTest, StarExpr, }; AugAssign: ast::Operator = { "+=" => ast::Operator::Add, "-=" => ast::Operator::Sub, "*=" => ast::Operator::Mult, "@=" => ast::Operator::MatMult, "/=" => ast::Operator::Div, "%=" => ast::Operator::Mod, "&=" => ast::Operator::BitAnd, "|=" => ast::Operator::BitOr, "^=" => ast::Operator::BitXor, "<<=" => ast::Operator::LShift, ">>=" => ast::Operator::RShift, "**=" => ast::Operator::Pow, "//=" => ast::Operator::FloorDiv, }; FlowStatement: ast::Stmt = { "break" => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Break, } }, "continue" => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Continue, } }, "return" => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Return { value: value.map(Box::new) }, } }, => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Expr { value: Box::new(expression) }, } }, RaiseStatement, }; RaiseStatement: ast::Stmt = { "raise" => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Raise { exc: None, cause: None }, } }, "raise" > )?> => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Raise { exc: Some(Box::new(t)), cause: c.map(|x| Box::new(x.1)) }, } }, }; ImportStatement: ast::Stmt = { "import" >> => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Import { names }, } }, "from" "import" => { let (level, module) = source; ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::ImportFrom { level, module, names }, } }, }; ImportFromLocation: (Option, Option) = { => { (Some(dots.iter().sum()), Some(name)) }, => { (Some(dots.iter().sum()), None) }, }; ImportDots: usize = { "..." => 3, "." => 1, }; ImportAsNames: Vec = { >> => i, "(" >> ","? ")" => i, "*" => { // Star import all vec![ast::Alias::new(location, end_location, ast::AliasData { name: "*".to_string(), asname: None })] }, }; #[inline] ImportAsAlias: ast::Alias = { => ast::Alias::new(location, end_location, ast::AliasData { name, asname: a.map(|a| a.1) }), } // A name like abc or abc.def.ghi DottedName: String = { => n, => { let mut r = n.to_string(); for x in n2 { r.push_str("."); r.push_str(&x.1); } r }, }; GlobalStatement: ast::Stmt = { "global" > => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Global { names } } }, }; NonlocalStatement: ast::Stmt = { "nonlocal" > => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Nonlocal { names } } }, }; AssertStatement: ast::Stmt = { "assert" > )?> => { ast::Stmt { custom: (), location, end_location: Some(end_location), node: ast::StmtKind::Assert { test: Box::new(test), msg: msg.map(|e| Box::new(e.1)) } } }, }; CompoundStatement: ast::Stmt = { IfStatement, WhileStatement, ForStatement, TryStatement, WithStatement, FuncDef, ClassDef, }; IfStatement: ast::Stmt = { "if" ":" => { // Determine last else: let mut last = s3.map(|s| s.2).unwrap_or_default(); let end_location = last .last() .or_else(|| s2.last().and_then(|last| last.4.last())) .or_else(|| body.last()) .unwrap() .end_location; // handle elif: for i in s2.into_iter().rev() { let x = ast::Stmt { custom: (), location: i.0, end_location: i.4.last().unwrap().end_location, node: ast::StmtKind::If { test: Box::new(i.2), body: i.4, orelse: last }, }; last = vec![x]; } ast::Stmt { custom: (), location, end_location, node: ast::StmtKind::If { test: Box::new(test), body, orelse: last } } }, }; WhileStatement: ast::Stmt = { "while" ":" => { let orelse = s2.map(|s| s.2).unwrap_or_default(); let end_location = orelse .last() .or_else(|| body.last()) .unwrap() .end_location; ast::Stmt { custom: (), location, end_location, node: ast::StmtKind::While { test: Box::new(test), body, orelse }, } }, }; ForStatement: ast::Stmt = { "for" "in" ":" => { let orelse = s2.map(|s| s.2).unwrap_or_default(); let end_location = orelse .last() .or_else(|| body.last()) .unwrap() .end_location .unwrap(); let target = Box::new(set_context(target, ast::ExprContext::Store)); let iter = Box::new(iter); let type_comment = None; let node = if is_async.is_some() { ast::StmtKind::AsyncFor { target, iter, body, orelse, type_comment } } else { ast::StmtKind::For { target, iter, body, orelse, type_comment } }; ast::Stmt::new(location, end_location, node) }, }; TryStatement: ast::Stmt = { "try" ":" => { let orelse = else_suite.map(|s| s.2).unwrap_or_default(); let finalbody = finally.map(|s| s.2).unwrap_or_default(); let end_location = finalbody .last() .map(|last| last.end_location) .or_else(|| orelse.last().map(|last| last.end_location)) .or_else(|| handlers.last().map(|last| last.end_location)) .unwrap(); ast::Stmt { custom: (), location, end_location, node: ast::StmtKind::Try { body, handlers, orelse, finalbody, }, } }, "try" ":" => { let handlers = vec![]; let orelse = vec![]; let finalbody = finally.2; let end_location = finalbody.last().unwrap().end_location; ast::Stmt { custom: (), location, end_location, node: ast::StmtKind::Try { body, handlers, orelse, finalbody, }, } }, }; ExceptClause: ast::Excepthandler = { "except" ?> ":" => { let end_location = body.last().unwrap().end_location.unwrap(); ast::Excepthandler::new( location, end_location, ast::ExcepthandlerKind::ExceptHandler { type_: typ.map(Box::new), name: None, body, }, ) }, "except" "as" Identifier)> ":" => { let end_location = body.last().unwrap().end_location.unwrap(); ast::Excepthandler::new( location, end_location, ast::ExcepthandlerKind::ExceptHandler { type_: Some(Box::new(x.0)), name: Some(x.2), body, }, ) }, }; WithStatement: ast::Stmt = { "with" ":" => { let end_location = body.last().unwrap().end_location.unwrap(); let type_comment = None; let node = if is_async.is_some() { ast::StmtKind::AsyncWith { items, body, type_comment } } else { ast::StmtKind::With { items, body, type_comment } }; ast::Stmt::new(location, end_location, node) }, }; WithItems: Vec = { "(" ","? ")", "(" ",")?> > >)*> ","? ")" => { left.into_iter().flatten().chain([mid]).chain(right).collect() }, > => vec![<>], > >)+> => { [item].into_iter().chain(items).collect() } }; #[inline] WithItemsNoAs: Vec = { >> => { <>.into_iter().map(|context_expr| ast::Withitem { context_expr, optional_vars: None }).collect() }, } WithItem: ast::Withitem = { > if Goal != "as" => ast::Withitem { context_expr: <>, optional_vars: None }, > "as" > => { let optional_vars = Some(Box::new(set_context(vars, ast::ExprContext::Store))); ast::Withitem { context_expr, optional_vars } }, }; FuncDef: ast::Stmt = { "def" " Test<"all">)?> ":" => { let args = Box::new(args); let returns = r.map(|x| Box::new(x.1)); let end_location = body.last().unwrap().end_location.unwrap(); let type_comment = None; let node = if is_async.is_some() { ast::StmtKind::AsyncFunctionDef { name, args, body, decorator_list, returns, type_comment } } else { ast::StmtKind::FunctionDef { name, args, body, decorator_list, returns, type_comment } }; ast::Stmt::new(location, end_location, node) }, }; Parameters: ast::Arguments = { "(" )?> ")" =>? { let args = validate_arguments( a.unwrap_or_else(|| ast::Arguments { posonlyargs: vec![], args: vec![], vararg: None, kwonlyargs: vec![], kw_defaults: vec![], kwarg: None, defaults: vec![] }) )?; Ok(args) } }; // Note that this is a macro which is used once for function defs, and // once for lambda defs. ParameterList: ast::Arguments = { > )?> ","? =>? { let (posonlyargs, args, defaults) = parse_params(param1)?; // Now gather rest of parameters: let (vararg, kwonlyargs, kw_defaults, kwarg) = args2.map_or((None, vec![], vec![], None), |x| x.1); Ok(ast::Arguments { posonlyargs, args, kwonlyargs, vararg, kwarg, defaults, kw_defaults, }) }, > )> ","? =>? { let (posonlyargs, args, defaults) = parse_params(param1)?; // Now gather rest of parameters: let vararg = None; let kwonlyargs = vec![]; let kw_defaults = vec![]; let kwarg = kw.1; Ok(ast::Arguments { posonlyargs, args, kwonlyargs, vararg, kwarg, defaults, kw_defaults, }) }, > ","? => { let (vararg, kwonlyargs, kw_defaults, kwarg) = params; ast::Arguments { posonlyargs: vec![], args: vec![], kwonlyargs, vararg, kwarg, defaults: vec![], kw_defaults, } }, > ","? => { ast::Arguments { posonlyargs: vec![], args: vec![], kwonlyargs: vec![], vararg: None, kwarg, defaults: vec![], kw_defaults: vec![], } }, }; // Use inline here to make sure the "," is not creating an ambiguity. #[inline] ParameterDefs: (Vec<(ast::Arg, Option)>, Vec<(ast::Arg, Option)>) = { >> => { (vec![], args) }, >> "," "/" )*> => { (pos_args, args.into_iter().map(|e| e.1).collect()) }, }; ParameterDef: (ast::Arg, Option) = { => (i, None), "=" > => (i, Some(e)), }; UntypedParameter: ast::Arg = { => ast::Arg::new( location, end_location, ast::ArgData { arg, annotation: None, type_comment: None }, ), }; TypedParameter: ast::Arg = { )?> => { let annotation = a.map(|x| Box::new(x.1)); ast::Arg::new(location, end_location, ast::ArgData { arg, annotation, type_comment: None }) }, }; // Use inline here to make sure the "," is not creating an ambiguity. // TODO: figure out another grammar that makes this inline no longer required. #[inline] ParameterListStarArgs: (Option>, Vec, Vec, Option>) = { "*" )*> )?> =>? { // Extract keyword arguments: let mut kwonlyargs = Vec::new(); let mut kw_defaults = Vec::new(); let mut kwargs = Vec::new(); for (name, value) in kw.into_iter().map(|x| x.1) { if let Some(value) = value { kwonlyargs.push(name); kw_defaults.push(value); } else { kwargs.push(name); } } kwargs.extend(kwonlyargs.into_iter()); if va.is_none() && kwargs.is_empty() && kwarg.is_none() { Err(LexicalError { error: LexicalErrorType::OtherError("named arguments must follow bare *".to_string()), location: location, })? } let kwarg = kwarg.map(|n| n.1).flatten(); let va = va.map(Box::new); Ok((va, kwargs, kw_defaults, kwarg)) } }; KwargParameter: Option> = { "**" => { kwarg.map(Box::new) } }; ClassDef: ast::Stmt = { "class" ":" => { let (bases, keywords) = match a { Some((_, arg, _)) => (arg.args, arg.keywords), None => (vec![], vec![]), }; let end_location = body.last().unwrap().end_location; ast::Stmt { custom: (), location, end_location, node: ast::StmtKind::ClassDef { name, bases, keywords, body, decorator_list, }, } }, }; // Decorators: Decorator: ast::Expr = { "@" "\n" => { p }, }; YieldExpr: ast::Expr = { "yield" => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Yield { value: value.map(Box::new) } }, "yield" "from" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::YieldFrom { value: Box::new(e) } }, }; Test: ast::Expr = { > "if" > "else" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::IfExp { test: Box::new(test), body: Box::new(body), orelse: Box::new(orelse), } }, OrTest, LambdaDef, }; NamedExpressionTest: ast::Expr = { NamedExpression, Test<"all">, } NamedExpression: ast::Expr = { ":=" > => { ast::Expr { location, end_location: value.end_location, custom: (), node: ast::ExprKind::NamedExpr { target: Box::new(ast::Expr::new( location, end_location, ast::ExprKind::Name { id, ctx: ast::ExprContext::Store }, )), value: Box::new(value), } } }, }; LambdaDef: ast::Expr = { "lambda" ?> ":" > =>? { let p = validate_arguments( p.unwrap_or_else(|| { ast::Arguments { posonlyargs: vec![], args: vec![], vararg: None, kwonlyargs: vec![], kw_defaults: vec![], kwarg: None, defaults: vec![] } } ))?; Ok(ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Lambda { args: Box::new(p), body: Box::new(body) } }) } } OrTest: ast::Expr = { > )+> => { let mut values = vec![e1]; values.extend(e2.into_iter().map(|e| e.1)); ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BoolOp { op: ast::Boolop::Or, values } } }, AndTest, }; AndTest: ast::Expr = { > )+> => { let mut values = vec![e1]; values.extend(e2.into_iter().map(|e| e.1)); ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BoolOp { op: ast::Boolop::And, values } } }, NotTest, }; NotTest: ast::Expr = { "not" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::UnaryOp { operand: Box::new(e), op: ast::Unaryop::Not } }, Comparison, }; Comparison: ast::Expr = { > )+> => { let (ops, comparators) = comparisons.into_iter().unzip(); ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Compare { left: Box::new(left), ops, comparators } } }, Expression, }; CompOp: ast::Cmpop = { "==" => ast::Cmpop::Eq, "!=" => ast::Cmpop::NotEq, "<" => ast::Cmpop::Lt, "<=" => ast::Cmpop::LtE, ">" => ast::Cmpop::Gt, ">=" => ast::Cmpop::GtE, "in" => ast::Cmpop::In, "not" "in" => ast::Cmpop::NotIn, "is" => ast::Cmpop::Is, "is" "not" => ast::Cmpop::IsNot, }; Expression: ast::Expr = { > "|" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BinOp { left: Box::new(e1), op: ast::Operator::BitOr, right: Box::new(e2) } }, XorExpression, }; XorExpression: ast::Expr = { > "^" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BinOp { left: Box::new(e1), op: ast::Operator::BitXor, right: Box::new(e2) } }, AndExpression, }; AndExpression: ast::Expr = { > "&" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BinOp { left: Box::new(e1), op: ast::Operator::BitAnd, right: Box::new(e2) } }, ShiftExpression, }; ShiftExpression: ast::Expr = { > > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BinOp { left: Box::new(e1), op, right: Box::new(e2) } }, ArithmeticExpression, }; ShiftOp: ast::Operator = { "<<" => ast::Operator::LShift, ">>" => ast::Operator::RShift, }; ArithmeticExpression: ast::Expr = { > > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BinOp { left: Box::new(a), op, right: Box::new(b) } }, Term, }; AddOp: ast::Operator = { "+" => ast::Operator::Add, "-" => ast::Operator::Sub, }; Term: ast::Expr = { > > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BinOp { left: Box::new(a), op, right: Box::new(b) } }, Factor, }; MulOp: ast::Operator = { "*" => ast::Operator::Mult, "/" => ast::Operator::Div, "//" => ast::Operator::FloorDiv, "%" => ast::Operator::Mod, "@" => ast::Operator::MatMult, }; Factor: ast::Expr = { > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::UnaryOp { operand: Box::new(e), op } }, Power, }; UnaryOp: ast::Unaryop = { "+" => ast::Unaryop::UAdd, "-" => ast::Unaryop::USub, "~" => ast::Unaryop::Invert, }; Power: ast::Expr = { > "**" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::BinOp { left: Box::new(e), op: ast::Operator::Pow, right: Box::new(b) } }, AtomExpr, }; AtomExpr: ast::Expr = { "await" > => { ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Await { value: Box::new(atom) } } }, AtomExpr2, } AtomExpr2: ast::Expr = { Atom, > "(" ")" => { ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Call { func: Box::new(f), args: a.args, keywords: a.keywords } } }, > "[" "]" => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Subscript { value: Box::new(e), slice: Box::new(s), ctx: ast::ExprContext::Load } }, > "." => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Attribute { value: Box::new(e), attr, ctx: ast::ExprContext::Load } }, }; SubscriptList: ast::Expr = { => { if s2.is_empty() && trailing_comma.is_none() { s1 } else { let mut dims = vec![s1]; for x in s2 { dims.push(x.1) } ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Tuple { elts: dims, ctx: ast::ExprContext::Load }, } } } }; Subscript: ast::Expr = { NamedExpressionTest, ?> ":" ?> => { let lower = e1.map(Box::new); let upper = e2.map(Box::new); let step = e3.flatten().map(Box::new); ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Slice { lower, upper, step } } } }; SliceOp: Option = { ":" ?> => e, } Atom: ast::Expr = { =>? Ok(parse_strings(s)?), => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Constant { value, kind: None } }, => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Name { id: name, ctx: ast::ExprContext::Load } }, "[" "]" => { let elts = e.unwrap_or_default(); ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::List { elts, ctx: ast::ExprContext::Load } } }, "[" "]" => { ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::ListComp { elt: Box::new(elt), generators } } }, "(" >> ")" if Goal != "no-withitems" => { if elts.len() == 1 && trailing_comma.is_none() { elts.into_iter().next().unwrap() } else { ast::Expr::new( location, end_location, ast::ExprKind::Tuple { elts, ctx: ast::ExprContext::Load }, ) } }, "(" >> ",")?> )*> ")" =>? { if left.is_none() && right.is_empty() && trailing_comma.is_none() { if matches!(mid.node, ast::ExprKind::Starred { .. }) { Err(LexicalError{ error: LexicalErrorType::OtherError("cannot use starred expression here".to_string()), location: mid.location, })? } Ok(mid) } else { let elts = left.into_iter().flatten().chain([mid]).chain(right).collect(); Ok(ast::Expr::new( location, end_location, ast::ExprKind::Tuple { elts, ctx: ast::ExprContext::Load }, )) } }, "(" ")" => ast::Expr::new( location, end_location, ast::ExprKind::Tuple { elts: Vec::new(), ctx: ast::ExprContext::Load } ), "(" ")" => e, "(" ")" => { ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::GeneratorExp { elt: Box::new(elt), generators } } }, "(" "**" > ")" =>? { Err(LexicalError{ error : LexicalErrorType::OtherError("cannot use double starred expression here".to_string()), location: location, }.into()) }, "{" "}" => { let pairs = e.unwrap_or_default(); let (keys, values) = match pairs.iter().position(|(k,_)| k.is_none()) { Some(unpack_idx) => { let mut pairs = pairs; let (keys, mut values): (_, Vec<_>) = pairs.drain(..unpack_idx).map(|(k, v)| (*k.unwrap(), v)).unzip(); fn build_map(items: &mut Vec<(ast::Expr, ast::Expr)>) -> ast::Expr { let location = items[0].0.location; let end_location = items[0].0.end_location; let (keys, values) = items.drain(..).unzip(); ast::Expr { location, end_location, custom: (), node: ast::ExprKind::Dict { keys, values } } } let mut items = Vec::new(); for (key, value) in pairs.into_iter() { if let Some(key) = key { items.push((*key, value)); continue; } if !items.is_empty() { values.push(build_map(&mut items)); } values.push(value); } if !items.is_empty() { values.push(build_map(&mut items)); } (keys, values) }, None => pairs.into_iter().map(|(k, v)| (*k.unwrap(), v)).unzip() }; ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Dict { keys, values } } }, "{" "}" => { ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::DictComp { key: Box::new(e1.0), value: Box::new(e1.1), generators, } } }, "{" "}" => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Set { elts } }, "{" "}" => { ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::SetComp { elt: Box::new(elt), generators } } }, "True" => ast::Expr::new(location, end_location, ast::ExprKind::Constant { value: true.into(), kind: None }), "False" => ast::Expr::new(location, end_location, ast::ExprKind::Constant { value: false.into(), kind: None }), "None" => ast::Expr::new(location, end_location, ast::ExprKind::Constant { value: ast::Constant::None, kind: None }), "..." => ast::Expr::new(location, end_location, ast::ExprKind::Constant { value: ast::Constant::Ellipsis, kind: None }), }; ListLiteralValues: Vec = { > ","? => e, }; DictLiteralValues: Vec<(Option>, ast::Expr)> = { > ","? => elements, }; DictEntry: (ast::Expr, ast::Expr) = { > ":" > => (e1, e2), }; DictElement: (Option>, ast::Expr) = { => (Some(Box::new(e.0)), e.1), "**" > => (None, e), }; SetLiteralValues: Vec = { > ","? => e1 }; ExpressionOrStarExpression = { Expression<"all">, StarExpr }; ExpressionList: ast::Expr = { GenericList }; ExpressionList2: Vec = { > ","? => elements, }; // A test list is one of: // - a list of expressions // - a single expression // - a single expression followed by a trailing comma #[inline] TestList: ast::Expr = { GenericList }; GenericList: ast::Expr = { > => { if elts.len() == 1 && trailing_comma.is_none() { elts.into_iter().next().unwrap() } else { ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Tuple { elts, ctx: ast::ExprContext::Load } } } } } // Test StarExpr: ast::Expr = { "*" > => ast::Expr { location, end_location: Some(end_location), custom: (), node: ast::ExprKind::Starred { value: Box::new(e), ctx: ast::ExprContext::Load }, } }; // Comprehensions: CompFor: Vec = => c; SingleForComprehension: ast::Comprehension = { "for" "in" > => { let is_async = is_async.is_some(); ast::Comprehension { target: set_context(target, ast::ExprContext::Store), iter, ifs, is_async: if is_async { 1 } else { 0 }, } } }; ExpressionNoCond: ast::Expr = OrTest<"all">; ComprehensionIf: ast::Expr = "if" => c; ArgumentList: ArgumentList = { > =>? { let arg_list = parse_args(e)?; Ok(arg_list) } }; FunctionArgument: (Option<(ast::Location, ast::Location, Option)>, ast::Expr) = { => { let expr = match c { Some(c) => ast::Expr { location: e.location, end_location: e.end_location, custom: (), node: ast::ExprKind::GeneratorExp { elt: Box::new(e), generators: c, } }, None => e, }; (None, expr) }, "=" > => (Some((location, end_location, Some(i))), e), "*" > => { let expr = ast::Expr::new( location, end_location, ast::ExprKind::Starred { value: Box::new(e), ctx: ast::ExprContext::Load }, ); (None, expr) }, "**" > => (Some((location, end_location, None)), e), }; #[inline] Comma: Vec = { ",")*> => { let mut items = items; items.extend(last); items } }; #[inline] OneOrMore: Vec = { => { let mut items = vec![i1]; items.extend(i2.into_iter().map(|e| e.1)); items } }; Constant: ast::Constant = { => ast::Constant::Int(value), => ast::Constant::Float(value), => ast::Constant::Complex { real: s.0, imag: s.1 }, }; Identifier: String = => s; // Hook external lexer: extern { type Location = ast::Location; type Error = LexicalError; enum lexer::Tok { Indent => lexer::Tok::Indent, Dedent => lexer::Tok::Dedent, StartModule => lexer::Tok::StartModule, StartInteractive => lexer::Tok::StartInteractive, StartExpression => lexer::Tok::StartExpression, "+" => lexer::Tok::Plus, "-" => lexer::Tok::Minus, "~" => lexer::Tok::Tilde, ":" => lexer::Tok::Colon, "." => lexer::Tok::Dot, "..." => lexer::Tok::Ellipsis, "," => lexer::Tok::Comma, "*" => lexer::Tok::Star, "**" => lexer::Tok::DoubleStar, "&" => lexer::Tok::Amper, "@" => lexer::Tok::At, "%" => lexer::Tok::Percent, "//" => lexer::Tok::DoubleSlash, "^" => lexer::Tok::CircumFlex, "|" => lexer::Tok::Vbar, "<<" => lexer::Tok::LeftShift, ">>" => lexer::Tok::RightShift, "/" => lexer::Tok::Slash, "(" => lexer::Tok::Lpar, ")" => lexer::Tok::Rpar, "[" => lexer::Tok::Lsqb, "]" => lexer::Tok::Rsqb, "{" => lexer::Tok::Lbrace, "}" => lexer::Tok::Rbrace, "=" => lexer::Tok::Equal, "+=" => lexer::Tok::PlusEqual, "-=" => lexer::Tok::MinusEqual, "*=" => lexer::Tok::StarEqual, "@=" => lexer::Tok::AtEqual, "/=" => lexer::Tok::SlashEqual, "%=" => lexer::Tok::PercentEqual, "&=" => lexer::Tok::AmperEqual, "|=" => lexer::Tok::VbarEqual, "^=" => lexer::Tok::CircumflexEqual, "<<=" => lexer::Tok::LeftShiftEqual, ">>=" => lexer::Tok::RightShiftEqual, "**=" => lexer::Tok::DoubleStarEqual, "//=" => lexer::Tok::DoubleSlashEqual, ":=" => lexer::Tok::ColonEqual, "==" => lexer::Tok::EqEqual, "!=" => lexer::Tok::NotEqual, "<" => lexer::Tok::Less, "<=" => lexer::Tok::LessEqual, ">" => lexer::Tok::Greater, ">=" => lexer::Tok::GreaterEqual, "->" => lexer::Tok::Rarrow, "and" => lexer::Tok::And, "as" => lexer::Tok::As, "assert" => lexer::Tok::Assert, "async" => lexer::Tok::Async, "await" => lexer::Tok::Await, "break" => lexer::Tok::Break, "class" => lexer::Tok::Class, "continue" => lexer::Tok::Continue, "def" => lexer::Tok::Def, "del" => lexer::Tok::Del, "elif" => lexer::Tok::Elif, "else" => lexer::Tok::Else, "except" => lexer::Tok::Except, "finally" => lexer::Tok::Finally, "for" => lexer::Tok::For, "from" => lexer::Tok::From, "global" => lexer::Tok::Global, "if" => lexer::Tok::If, "import" => lexer::Tok::Import, "in" => lexer::Tok::In, "is" => lexer::Tok::Is, "lambda" => lexer::Tok::Lambda, "nonlocal" => lexer::Tok::Nonlocal, "not" => lexer::Tok::Not, "or" => lexer::Tok::Or, "pass" => lexer::Tok::Pass, "raise" => lexer::Tok::Raise, "return" => lexer::Tok::Return, "try" => lexer::Tok::Try, "while" => lexer::Tok::While, "with" => lexer::Tok::With, "yield" => lexer::Tok::Yield, "True" => lexer::Tok::True, "False" => lexer::Tok::False, "None" => lexer::Tok::None, int => lexer::Tok::Int { value: }, float => lexer::Tok::Float { value: }, complex => lexer::Tok::Complex { real: , imag: }, string => lexer::Tok::String { value: , kind: , triple_quoted: }, name => lexer::Tok::Name { name: }, "\n" => lexer::Tok::Newline, ";" => lexer::Tok::Semi, "#" => lexer::Tok::Comment(_), } } rustpython-parser-0.2.0/src/context.rs000064400000000000000000000125151046102023000162010ustar 00000000000000use rustpython_ast::{Expr, ExprContext, ExprKind}; pub fn set_context(expr: Expr, ctx: ExprContext) -> Expr { match expr.node { ExprKind::Name { id, .. } => Expr { node: ExprKind::Name { id, ctx }, ..expr }, ExprKind::Tuple { elts, .. } => Expr { node: ExprKind::Tuple { elts: elts .into_iter() .map(|elt| set_context(elt, ctx.clone())) .collect(), ctx, }, ..expr }, ExprKind::List { elts, .. } => Expr { node: ExprKind::List { elts: elts .into_iter() .map(|elt| set_context(elt, ctx.clone())) .collect(), ctx, }, ..expr }, ExprKind::Attribute { value, attr, .. } => Expr { node: ExprKind::Attribute { value, attr, ctx }, ..expr }, ExprKind::Subscript { value, slice, .. } => Expr { node: ExprKind::Subscript { value, slice, ctx }, ..expr }, ExprKind::Starred { value, .. } => Expr { node: ExprKind::Starred { value: Box::new(set_context(*value, ctx.clone())), ctx, }, ..expr }, _ => expr, } } #[cfg(test)] mod tests { use crate::parser::parse_program; #[test] fn test_assign_name() { let source = String::from("x = (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_tuple() { let source = String::from("(x, y) = (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_list() { let source = String::from("[x, y] = (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_attribute() { let source = String::from("x.y = (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_subscript() { let source = String::from("x[y] = (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_starred() { let source = String::from("(x, *y) = (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_for() { let source = String::from("for x in (1, 2, 3): pass"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_list_comp() { let source = String::from("x = [y for y in (1, 2, 3)]"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_set_comp() { let source = String::from("x = {y for y in (1, 2, 3)}"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_with() { let source = String::from("with 1 as x: pass"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_assign_named_expr() { let source = String::from("if x:= 1: pass"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_ann_assign_name() { let source = String::from("x: int = 1"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_aug_assign_name() { let source = String::from("x += 1"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_aug_assign_attribute() { let source = String::from("x.y += (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_aug_assign_subscript() { let source = String::from("x[y] += (1, 2, 3)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_del_name() { let source = String::from("del x"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_del_attribute() { let source = String::from("del x.y"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_del_subscript() { let source = String::from("del x[y]"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } } rustpython-parser-0.2.0/src/error.rs000064400000000000000000000234721046102023000156520ustar 00000000000000//! Define internal parse error types //! The goal is to provide a matching and a safe error API, maksing errors from LALR use crate::{ast::Location, token::Tok}; use lalrpop_util::ParseError as LalrpopError; use std::fmt; /// Represents an error during lexical scanning. #[derive(Debug, PartialEq)] pub struct LexicalError { pub error: LexicalErrorType, pub location: Location, } impl LexicalError { pub fn new(error: LexicalErrorType, location: Location) -> Self { Self { error, location } } } #[derive(Debug, PartialEq)] pub enum LexicalErrorType { StringError, UnicodeError, NestingError, IndentationError, TabError, TabsAfterSpaces, DefaultArgumentError, DuplicateArgumentError(String), PositionalArgumentError, UnpackedArgumentError, DuplicateKeywordArgumentError(String), UnrecognizedToken { tok: char }, FStringError(FStringErrorType), LineContinuationError, Eof, OtherError(String), } impl fmt::Display for LexicalErrorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { LexicalErrorType::StringError => write!(f, "Got unexpected string"), LexicalErrorType::FStringError(error) => write!(f, "f-string: {error}"), LexicalErrorType::UnicodeError => write!(f, "Got unexpected unicode"), LexicalErrorType::NestingError => write!(f, "Got unexpected nesting"), LexicalErrorType::IndentationError => { write!(f, "unindent does not match any outer indentation level") } LexicalErrorType::TabError => { write!(f, "inconsistent use of tabs and spaces in indentation") } LexicalErrorType::TabsAfterSpaces => { write!(f, "Tabs not allowed as part of indentation after spaces") } LexicalErrorType::DefaultArgumentError => { write!(f, "non-default argument follows default argument") } LexicalErrorType::DuplicateArgumentError(arg_name) => { write!(f, "duplicate argument '{arg_name}' in function definition") } LexicalErrorType::DuplicateKeywordArgumentError(arg_name) => { write!(f, "keyword argument repeated: {arg_name}") } LexicalErrorType::PositionalArgumentError => { write!(f, "positional argument follows keyword argument") } LexicalErrorType::UnpackedArgumentError => { write!( f, "iterable argument unpacking follows keyword argument unpacking" ) } LexicalErrorType::UnrecognizedToken { tok } => { write!(f, "Got unexpected token {tok}") } LexicalErrorType::LineContinuationError => { write!(f, "unexpected character after line continuation character") } LexicalErrorType::Eof => write!(f, "unexpected EOF while parsing"), LexicalErrorType::OtherError(msg) => write!(f, "{msg}"), } } } // TODO: consolidate these with ParseError #[derive(Debug, PartialEq)] pub struct FStringError { pub error: FStringErrorType, pub location: Location, } impl FStringError { pub fn new(error: FStringErrorType, location: Location) -> Self { Self { error, location } } } impl From for LexicalError { fn from(err: FStringError) -> Self { LexicalError { error: LexicalErrorType::FStringError(err.error), location: err.location, } } } #[derive(Debug, PartialEq)] pub enum FStringErrorType { UnclosedLbrace, UnopenedRbrace, ExpectedRbrace, InvalidExpression(Box), InvalidConversionFlag, EmptyExpression, MismatchedDelimiter(char, char), ExpressionNestedTooDeeply, ExpressionCannotInclude(char), SingleRbrace, Unmatched(char), UnterminatedString, } impl fmt::Display for FStringErrorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { FStringErrorType::UnclosedLbrace => write!(f, "expecting '}}'"), FStringErrorType::UnopenedRbrace => write!(f, "Unopened '}}'"), FStringErrorType::ExpectedRbrace => write!(f, "Expected '}}' after conversion flag."), FStringErrorType::InvalidExpression(error) => { write!(f, "{error}") } FStringErrorType::InvalidConversionFlag => write!(f, "invalid conversion character"), FStringErrorType::EmptyExpression => write!(f, "empty expression not allowed"), FStringErrorType::MismatchedDelimiter(first, second) => write!( f, "closing parenthesis '{second}' does not match opening parenthesis '{first}'" ), FStringErrorType::SingleRbrace => write!(f, "single '}}' is not allowed"), FStringErrorType::Unmatched(delim) => write!(f, "unmatched '{delim}'"), FStringErrorType::ExpressionNestedTooDeeply => { write!(f, "expressions nested too deeply") } FStringErrorType::UnterminatedString => { write!(f, "unterminated string") } FStringErrorType::ExpressionCannotInclude(c) => { if *c == '\\' { write!(f, "f-string expression part cannot include a backslash") } else { write!(f, "f-string expression part cannot include '{c}'s") } } } } } impl From for LalrpopError { fn from(err: FStringError) -> Self { lalrpop_util::ParseError::User { error: LexicalError { error: LexicalErrorType::FStringError(err.error), location: err.location, }, } } } /// Represents an error during parsing pub type ParseError = rustpython_compiler_core::BaseError; #[derive(Debug, PartialEq, thiserror::Error)] pub enum ParseErrorType { /// Parser encountered an unexpected end of input Eof, /// Parser encountered an extra token ExtraToken(Tok), /// Parser encountered an invalid token InvalidToken, /// Parser encountered an unexpected token UnrecognizedToken(Tok, Option), /// Maps to `User` type from `lalrpop-util` Lexical(LexicalErrorType), } /// Convert `lalrpop_util::ParseError` to our internal type pub(crate) fn parse_error_from_lalrpop( err: LalrpopError, source_path: &str, ) -> ParseError { let source_path = source_path.to_owned(); match err { // TODO: Are there cases where this isn't an EOF? LalrpopError::InvalidToken { location } => ParseError { error: ParseErrorType::Eof, location, source_path, }, LalrpopError::ExtraToken { token } => ParseError { error: ParseErrorType::ExtraToken(token.1), location: token.0, source_path, }, LalrpopError::User { error } => ParseError { error: ParseErrorType::Lexical(error.error), location: error.location, source_path, }, LalrpopError::UnrecognizedToken { token, expected } => { // Hacky, but it's how CPython does it. See PyParser_AddToken, // in particular "Only one possible expected token" comment. let expected = (expected.len() == 1).then(|| expected[0].clone()); ParseError { error: ParseErrorType::UnrecognizedToken(token.1, expected), location: token.0.with_col_offset(1), source_path, } } LalrpopError::UnrecognizedEOF { location, expected } => { // This could be an initial indentation error that we should ignore let indent_error = expected == ["Indent"]; if indent_error { ParseError { error: ParseErrorType::Lexical(LexicalErrorType::IndentationError), location, source_path, } } else { ParseError { error: ParseErrorType::Eof, location, source_path, } } } } } impl fmt::Display for ParseErrorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ParseErrorType::Eof => write!(f, "Got unexpected EOF"), ParseErrorType::ExtraToken(ref tok) => write!(f, "Got extraneous token: {tok:?}"), ParseErrorType::InvalidToken => write!(f, "Got invalid token"), ParseErrorType::UnrecognizedToken(ref tok, ref expected) => { if *tok == Tok::Indent { write!(f, "unexpected indent") } else if expected.as_deref() == Some("Indent") { write!(f, "expected an indented block") } else { write!(f, "invalid syntax. Got unexpected token {tok}") } } ParseErrorType::Lexical(ref error) => write!(f, "{error}"), } } } impl ParseErrorType { pub fn is_indentation_error(&self) -> bool { match self { ParseErrorType::Lexical(LexicalErrorType::IndentationError) => true, ParseErrorType::UnrecognizedToken(token, expected) => { *token == Tok::Indent || expected.clone() == Some("Indent".to_owned()) } _ => false, } } pub fn is_tab_error(&self) -> bool { matches!( self, ParseErrorType::Lexical(LexicalErrorType::TabError) | ParseErrorType::Lexical(LexicalErrorType::TabsAfterSpaces) ) } } rustpython-parser-0.2.0/src/function.rs000064400000000000000000000103251046102023000163370ustar 00000000000000use crate::ast; use crate::error::{LexicalError, LexicalErrorType}; use rustc_hash::FxHashSet; pub struct ArgumentList { pub args: Vec, pub keywords: Vec, } type ParameterDefs = (Vec, Vec, Vec); type ParameterDef = (ast::Arg, Option); pub fn validate_arguments(arguments: ast::Arguments) -> Result { let mut all_args: Vec<&ast::Located> = vec![]; all_args.extend(arguments.posonlyargs.iter()); all_args.extend(arguments.args.iter()); if let Some(a) = &arguments.vararg { all_args.push(a); } all_args.extend(arguments.kwonlyargs.iter()); if let Some(a) = &arguments.kwarg { all_args.push(a); } let mut all_arg_names = FxHashSet::with_hasher(Default::default()); for arg in all_args { let arg_name = &arg.node.arg; if !all_arg_names.insert(arg_name) { return Err(LexicalError { error: LexicalErrorType::DuplicateArgumentError(arg_name.to_string()), location: arg.location, }); } } Ok(arguments) } pub fn parse_params( params: (Vec, Vec), ) -> Result { let mut pos_only = Vec::with_capacity(params.0.len()); let mut names = Vec::with_capacity(params.1.len()); let mut defaults = vec![]; let mut try_default = |name: &ast::Arg, default| { if let Some(default) = default { defaults.push(default); } else if !defaults.is_empty() { // Once we have started with defaults, all remaining arguments must // have defaults return Err(LexicalError { error: LexicalErrorType::DefaultArgumentError, location: name.location, }); } Ok(()) }; for (name, default) in params.0 { try_default(&name, default)?; pos_only.push(name); } for (name, default) in params.1 { try_default(&name, default)?; names.push(name); } Ok((pos_only, names, defaults)) } type FunctionArgument = ( Option<(ast::Location, ast::Location, Option)>, ast::Expr, ); pub fn parse_args(func_args: Vec) -> Result { let mut args = vec![]; let mut keywords = vec![]; let mut keyword_names = FxHashSet::with_capacity_and_hasher(func_args.len(), Default::default()); let mut double_starred = false; for (name, value) in func_args { match name { Some((start, end, name)) => { if let Some(keyword_name) = &name { if keyword_names.contains(keyword_name) { return Err(LexicalError { error: LexicalErrorType::DuplicateKeywordArgumentError( keyword_name.to_string(), ), location: start, }); } keyword_names.insert(keyword_name.clone()); } else { double_starred = true; } keywords.push(ast::Keyword::new( start, end, ast::KeywordData { arg: name, value }, )); } None => { // Allow starred arguments after keyword arguments but // not after double-starred arguments. if !keywords.is_empty() && !is_starred(&value) { return Err(LexicalError { error: LexicalErrorType::PositionalArgumentError, location: value.location, }); } else if double_starred { return Err(LexicalError { error: LexicalErrorType::UnpackedArgumentError, location: value.location, }); } args.push(value); } } } Ok(ArgumentList { args, keywords }) } fn is_starred(exp: &ast::Expr) -> bool { matches!(exp.node, ast::ExprKind::Starred { .. }) } rustpython-parser-0.2.0/src/lexer.rs000064400000000000000000001461671046102023000156470ustar 00000000000000//! This module takes care of lexing python source text. //! //! This means source code is translated into separate tokens. pub use super::token::{StringKind, Tok}; use crate::ast::Location; use crate::error::{LexicalError, LexicalErrorType}; use num_bigint::BigInt; use num_traits::identities::Zero; use num_traits::Num; use std::char; use std::cmp::Ordering; use std::ops::Index; use std::slice::SliceIndex; use std::str::FromStr; use unic_emoji_char::is_emoji_presentation; use unic_ucd_ident::{is_xid_continue, is_xid_start}; #[derive(Clone, Copy, PartialEq, Debug, Default)] struct IndentationLevel { tabs: usize, spaces: usize, } impl IndentationLevel { fn compare_strict( &self, other: &IndentationLevel, location: Location, ) -> Result { // We only know for sure that we're smaller or bigger if tabs // and spaces both differ in the same direction. Otherwise we're // dependent on the size of tabs. match self.tabs.cmp(&other.tabs) { Ordering::Less => { if self.spaces <= other.spaces { Ok(Ordering::Less) } else { Err(LexicalError { location, error: LexicalErrorType::TabError, }) } } Ordering::Greater => { if self.spaces >= other.spaces { Ok(Ordering::Greater) } else { Err(LexicalError { location, error: LexicalErrorType::TabError, }) } } Ordering::Equal => Ok(self.spaces.cmp(&other.spaces)), } } } #[derive(Debug)] struct Indentations { indent_stack: Vec, } impl Indentations { fn is_empty(&self) -> bool { self.indent_stack.len() == 1 } fn push(&mut self, indent: IndentationLevel) { self.indent_stack.push(indent); } fn pop(&mut self) -> Option { if self.is_empty() { return None; } self.indent_stack.pop() } fn current(&self) -> &IndentationLevel { self.indent_stack .last() .expect("Indentations must have at least one level") } } impl Default for Indentations { fn default() -> Self { Self { indent_stack: vec![IndentationLevel::default()], } } } struct CharWindow, const N: usize> { source: T, window: [Option; N], } impl CharWindow where T: Iterator, { fn new(source: T) -> Self { Self { source, window: [None; N], } } fn slide(&mut self) -> Option { self.window.rotate_left(1); let next = self.source.next(); *self.window.last_mut().expect("never empty") = next; next } fn change_first(&mut self, ch: char) { *self.window.first_mut().expect("never empty") = Some(ch); } } impl Index for CharWindow where T: Iterator, Idx: SliceIndex<[Option]>, { type Output = Idx::Output; fn index(&self, index: Idx) -> &Self::Output { &self.window[index] } } pub struct Lexer> { window: CharWindow, at_begin_of_line: bool, nesting: usize, // Amount of parenthesis indentations: Indentations, pending: Vec, location: Location, } // generated in build.rs, in gen_phf() pub static KEYWORDS: phf::Map<&'static str, Tok> = include!(concat!(env!("OUT_DIR"), "/keywords.rs")); pub type Spanned = (Location, Tok, Location); pub type LexResult = Result; #[inline] pub fn make_tokenizer(source: &str) -> impl Iterator + '_ { make_tokenizer_located(source, Location::default()) } pub fn make_tokenizer_located( source: &str, start_location: Location, ) -> impl Iterator + '_ { let nlh = NewlineHandler::new(source.chars()); Lexer::new(nlh, start_location) } // The newline handler is an iterator which collapses different newline // types into \n always. pub struct NewlineHandler> { window: CharWindow, } impl NewlineHandler where T: Iterator, { pub fn new(source: T) -> Self { let mut nlh = NewlineHandler { window: CharWindow::new(source), }; nlh.shift(); nlh.shift(); nlh } fn shift(&mut self) -> Option { let result = self.window[0]; self.window.slide(); result } } impl Iterator for NewlineHandler where T: Iterator, { type Item = char; fn next(&mut self) -> Option { // Collapse \r\n into \n loop { match self.window[..2] { [Some('\r'), Some('\n')] => { // Windows EOL into \n self.shift(); } [Some('\r'), _] => { // MAC EOL into \n self.window.change_first('\n'); } _ => break, } } self.shift() } } impl Lexer where T: Iterator, { pub fn new(input: T, start: Location) -> Self { let mut lxr = Lexer { at_begin_of_line: true, nesting: 0, indentations: Indentations::default(), pending: Vec::new(), location: start, window: CharWindow::new(input), }; lxr.window.slide(); lxr.window.slide(); lxr.window.slide(); // TODO: Handle possible mismatch between BOM and explicit encoding declaration. if let Some('\u{feff}') = lxr.window[0] { lxr.window.slide(); } lxr } // Lexer helper functions: fn lex_identifier(&mut self) -> LexResult { // Detect potential string like rb'' b'' f'' u'' r'' match self.window[..3] { [Some(c), Some('"' | '\''), ..] => { if let Ok(kind) = StringKind::try_from(c) { return self.lex_string(kind); } } [Some(c1), Some(c2), Some('"' | '\'')] => { if let Ok(kind) = StringKind::try_from([c1, c2]) { return self.lex_string(kind); } } _ => {} }; let start_pos = self.get_pos(); let mut name = String::new(); while self.is_identifier_continuation() { name.push(self.next_char().unwrap()); } let end_pos = self.get_pos(); if let Some(tok) = KEYWORDS.get(name.as_str()) { Ok((start_pos, tok.clone(), end_pos)) } else { Ok((start_pos, Tok::Name { name }, end_pos)) } } /// Numeric lexing. The feast can start! fn lex_number(&mut self) -> LexResult { let start_pos = self.get_pos(); match self.window[..2] { [Some('0'), Some('x' | 'X')] => { // Hex! (0xdeadbeef) self.next_char(); self.next_char(); self.lex_number_radix(start_pos, 16) } [Some('0'), Some('o' | 'O')] => { // Octal style! (0o377) self.next_char(); self.next_char(); self.lex_number_radix(start_pos, 8) } [Some('0'), Some('b' | 'B')] => { // Binary! (0b_1110_0101) self.next_char(); self.next_char(); self.lex_number_radix(start_pos, 2) } _ => self.lex_normal_number(), } } /// Lex a hex/octal/decimal/binary number without a decimal point. fn lex_number_radix(&mut self, start_pos: Location, radix: u32) -> LexResult { let value_text = self.radix_run(radix); let end_pos = self.get_pos(); let value = BigInt::from_str_radix(&value_text, radix).map_err(|e| LexicalError { error: LexicalErrorType::OtherError(format!("{e:?}")), location: start_pos, })?; Ok((start_pos, Tok::Int { value }, end_pos)) } /// Lex a normal number, that is, no octal, hex or binary number. fn lex_normal_number(&mut self) -> LexResult { let start_pos = self.get_pos(); let start_is_zero = self.window[0] == Some('0'); // Normal number: let mut value_text = self.radix_run(10); // If float: if self.window[0] == Some('.') || self.at_exponent() { // Take '.': if self.window[0] == Some('.') { if self.window[1] == Some('_') { return Err(LexicalError { error: LexicalErrorType::OtherError("Invalid Syntax".to_owned()), location: self.get_pos(), }); } value_text.push(self.next_char().unwrap()); value_text.push_str(&self.radix_run(10)); } // 1e6 for example: if let Some('e' | 'E') = self.window[0] { if self.window[1] == Some('_') { return Err(LexicalError { error: LexicalErrorType::OtherError("Invalid Syntax".to_owned()), location: self.get_pos(), }); } value_text.push(self.next_char().unwrap().to_ascii_lowercase()); // Optional +/- if matches!(self.window[0], Some('-' | '+')) { if self.window[1] == Some('_') { return Err(LexicalError { error: LexicalErrorType::OtherError("Invalid Syntax".to_owned()), location: self.get_pos(), }); } value_text.push(self.next_char().unwrap()); } value_text.push_str(&self.radix_run(10)); } let value = f64::from_str(&value_text).map_err(|_| LexicalError { error: LexicalErrorType::OtherError("Invalid decimal literal".to_owned()), location: self.get_pos(), })?; // Parse trailing 'j': if matches!(self.window[0], Some('j' | 'J')) { self.next_char(); let end_pos = self.get_pos(); Ok(( start_pos, Tok::Complex { real: 0.0, imag: value, }, end_pos, )) } else { let end_pos = self.get_pos(); Ok((start_pos, Tok::Float { value }, end_pos)) } } else { // Parse trailing 'j': if matches!(self.window[0], Some('j' | 'J')) { self.next_char(); let end_pos = self.get_pos(); let imag = f64::from_str(&value_text).unwrap(); Ok((start_pos, Tok::Complex { real: 0.0, imag }, end_pos)) } else { let end_pos = self.get_pos(); let value = value_text.parse::().unwrap(); if start_is_zero && !value.is_zero() { // leading zeros in decimal integer literals are not permitted return Err(LexicalError { error: LexicalErrorType::OtherError("Invalid Token".to_owned()), location: self.get_pos(), }); } Ok((start_pos, Tok::Int { value }, end_pos)) } } } /// Consume a sequence of numbers with the given radix, /// the digits can be decorated with underscores /// like this: '1_2_3_4' == '1234' fn radix_run(&mut self, radix: u32) -> String { let mut value_text = String::new(); loop { if let Some(c) = self.take_number(radix) { value_text.push(c); } else if self.window[0] == Some('_') && Lexer::::is_digit_of_radix(self.window[1], radix) { self.next_char(); } else { break; } } value_text } /// Consume a single character with the given radix. fn take_number(&mut self, radix: u32) -> Option { let take_char = Lexer::::is_digit_of_radix(self.window[0], radix); take_char.then(|| self.next_char().unwrap()) } /// Test if a digit is of a certain radix. fn is_digit_of_radix(c: Option, radix: u32) -> bool { match radix { 2 => matches!(c, Some('0'..='1')), 8 => matches!(c, Some('0'..='7')), 10 => matches!(c, Some('0'..='9')), 16 => matches!(c, Some('0'..='9') | Some('a'..='f') | Some('A'..='F')), other => unimplemented!("Radix not implemented: {}", other), } } /// Test if we face '[eE][-+]?[0-9]+' fn at_exponent(&self) -> bool { match self.window[..2] { [Some('e' | 'E'), Some('+' | '-')] => matches!(self.window[2], Some('0'..='9')), [Some('e' | 'E'), Some('0'..='9')] => true, _ => false, } } /// Skip everything until end of line fn lex_comment(&mut self) -> LexResult { let start_pos = self.get_pos(); let mut value = String::new(); value.push(self.next_char().unwrap()); loop { match self.window[0] { Some('\n') | None => { let end_pos = self.get_pos(); return Ok((start_pos, Tok::Comment(value), end_pos)); } Some(_) => {} } value.push(self.next_char().unwrap()); } } fn lex_string(&mut self, kind: StringKind) -> LexResult { let start_pos = self.get_pos(); for _ in 0..kind.prefix_len() { self.next_char(); } let quote_char = self.next_char().unwrap(); let mut string_content = String::new(); // If the next two characters are also the quote character, then we have a triple-quoted // string; consume those two characters and ensure that we require a triple-quote to close let triple_quoted = if self.window[..2] == [Some(quote_char); 2] { self.next_char(); self.next_char(); true } else { false }; loop { match self.next_char() { Some(c) => { if c == '\\' { if let Some(next_c) = self.next_char() { string_content.push('\\'); string_content.push(next_c); continue; } } if c == '\n' && !triple_quoted { return Err(LexicalError { error: LexicalErrorType::OtherError( "EOL while scanning string literal".to_owned(), ), location: self.get_pos(), }); } if c == quote_char { if triple_quoted { // Look ahead at the next two characters; if we have two more // quote_chars, it's the end of the string; consume the remaining // closing quotes and break the loop if self.window[..2] == [Some(quote_char); 2] { self.next_char(); self.next_char(); break; } } else { break; } } string_content.push(c); } None => { return Err(LexicalError { error: if triple_quoted { LexicalErrorType::Eof } else { LexicalErrorType::StringError }, location: self.get_pos(), }); } } } let end_pos = self.get_pos(); let tok = Tok::String { value: string_content, kind, triple_quoted, }; Ok((start_pos, tok, end_pos)) } fn is_identifier_start(&self, c: char) -> bool { c == '_' || is_xid_start(c) } fn is_identifier_continuation(&self) -> bool { match self.window[0] { Some('_' | '0'..='9') => true, Some(c) => is_xid_continue(c), _ => false, } } /// This is the main entry point. Call this function to retrieve the next token. /// This function is used by the iterator implementation. fn inner_next(&mut self) -> LexResult { // top loop, keep on processing, until we have something pending. while self.pending.is_empty() { // Detect indentation levels if self.at_begin_of_line { self.handle_indentations()?; } self.consume_normal()?; } Ok(self.pending.remove(0)) } /// Given we are at the start of a line, count the number of spaces and/or tabs until the first character. fn eat_indentation(&mut self) -> Result { // Determine indentation: let mut spaces: usize = 0; let mut tabs: usize = 0; loop { match self.window[0] { Some(' ') => { /* if tabs != 0 { // Don't allow spaces after tabs as part of indentation. // This is technically stricter than python3 but spaces after // tabs is even more insane than mixing spaces and tabs. return Some(Err(LexicalError { error: LexicalErrorType::OtherError("Spaces not allowed as part of indentation after tabs".to_owned()), location: self.get_pos(), })); } */ self.next_char(); spaces += 1; } Some('\t') => { if spaces != 0 { // Don't allow tabs after spaces as part of indentation. // This is technically stricter than python3 but spaces before // tabs is even more insane than mixing spaces and tabs. return Err(LexicalError { error: LexicalErrorType::TabsAfterSpaces, location: self.get_pos(), }); } self.next_char(); tabs += 1; } Some('#') => { let comment = self.lex_comment()?; self.emit(comment); spaces = 0; tabs = 0; } Some('\x0C') => { // Form feed character! // Reset indentation for the Emacs user. self.next_char(); spaces = 0; tabs = 0; } Some('\n') => { // Empty line! self.next_char(); spaces = 0; tabs = 0; } None => { spaces = 0; tabs = 0; break; } _ => { self.at_begin_of_line = false; break; } } } Ok(IndentationLevel { tabs, spaces }) } fn handle_indentations(&mut self) -> Result<(), LexicalError> { let indentation_level = self.eat_indentation()?; if self.nesting != 0 { return Ok(()); } // Determine indent or dedent: let current_indentation = self.indentations.current(); let ordering = indentation_level.compare_strict(current_indentation, self.get_pos())?; match ordering { Ordering::Equal => { // Same same } Ordering::Greater => { // New indentation level: self.indentations.push(indentation_level); let tok_pos = self.get_pos(); self.emit((tok_pos, Tok::Indent, tok_pos)); } Ordering::Less => { // One or more dedentations // Pop off other levels until col is found: loop { let current_indentation = self.indentations.current(); let ordering = indentation_level.compare_strict(current_indentation, self.get_pos())?; match ordering { Ordering::Less => { self.indentations.pop(); let tok_pos = self.get_pos(); self.emit((tok_pos, Tok::Dedent, tok_pos)); } Ordering::Equal => { // We arrived at proper level of indentation. break; } Ordering::Greater => { return Err(LexicalError { error: LexicalErrorType::IndentationError, location: self.get_pos(), }); } } } } } Ok(()) } /// Take a look at the next character, if any, and decide upon the next steps. fn consume_normal(&mut self) -> Result<(), LexicalError> { // Check if we have some character: if let Some(c) = self.window[0] { // First check identifier: if self.is_identifier_start(c) { let identifier = self.lex_identifier()?; self.emit(identifier); } else if is_emoji_presentation(c) { let tok_start = self.get_pos(); self.next_char(); let tok_end = self.get_pos(); self.emit(( tok_start, Tok::Name { name: c.to_string(), }, tok_end, )); } else { self.consume_character(c)?; } } else { // We reached end of file. let tok_pos = self.get_pos(); // First of all, we need all nestings to be finished. if self.nesting > 0 { return Err(LexicalError { error: LexicalErrorType::Eof, location: tok_pos, }); } // Next, insert a trailing newline, if required. if !self.at_begin_of_line { self.at_begin_of_line = true; self.emit((tok_pos, Tok::Newline, tok_pos)); } // Next, flush the indentation stack to zero. while !self.indentations.is_empty() { self.indentations.pop(); self.emit((tok_pos, Tok::Dedent, tok_pos)); } self.emit((tok_pos, Tok::EndOfFile, tok_pos)); } Ok(()) } /// Okay, we are facing a weird character, what is it? Determine that. fn consume_character(&mut self, c: char) -> Result<(), LexicalError> { match c { '0'..='9' => { let number = self.lex_number()?; self.emit(number); } '#' => { let comment = self.lex_comment()?; self.emit(comment); } '"' | '\'' => { let string = self.lex_string(StringKind::String)?; self.emit(string); } '=' => { let tok_start = self.get_pos(); self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::EqEqual, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Equal, tok_end)); } } } '+' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::PlusEqual, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Plus, tok_end)); } } '*' => { let tok_start = self.get_pos(); self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::StarEqual, tok_end)); } Some('*') => { self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::DoubleStarEqual, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::DoubleStar, tok_end)); } } } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Star, tok_end)); } } } '/' => { let tok_start = self.get_pos(); self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::SlashEqual, tok_end)); } Some('/') => { self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::DoubleSlashEqual, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::DoubleSlash, tok_end)); } } } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Slash, tok_end)); } } } '%' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::PercentEqual, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Percent, tok_end)); } } '|' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::VbarEqual, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Vbar, tok_end)); } } '^' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::CircumflexEqual, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::CircumFlex, tok_end)); } } '&' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::AmperEqual, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Amper, tok_end)); } } '-' => { let tok_start = self.get_pos(); self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::MinusEqual, tok_end)); } Some('>') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::Rarrow, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Minus, tok_end)); } } } '@' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::AtEqual, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::At, tok_end)); } } '!' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::NotEqual, tok_end)); } else { return Err(LexicalError { error: LexicalErrorType::UnrecognizedToken { tok: '!' }, location: tok_start, }); } } '~' => { self.eat_single_char(Tok::Tilde); } '(' => { self.eat_single_char(Tok::Lpar); self.nesting += 1; } ')' => { self.eat_single_char(Tok::Rpar); if self.nesting == 0 { return Err(LexicalError { error: LexicalErrorType::NestingError, location: self.get_pos(), }); } self.nesting -= 1; } '[' => { self.eat_single_char(Tok::Lsqb); self.nesting += 1; } ']' => { self.eat_single_char(Tok::Rsqb); if self.nesting == 0 { return Err(LexicalError { error: LexicalErrorType::NestingError, location: self.get_pos(), }); } self.nesting -= 1; } '{' => { self.eat_single_char(Tok::Lbrace); self.nesting += 1; } '}' => { self.eat_single_char(Tok::Rbrace); if self.nesting == 0 { return Err(LexicalError { error: LexicalErrorType::NestingError, location: self.get_pos(), }); } self.nesting -= 1; } ':' => { let tok_start = self.get_pos(); self.next_char(); if let Some('=') = self.window[0] { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::ColonEqual, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Colon, tok_end)); } } ';' => { self.eat_single_char(Tok::Semi); } '<' => { let tok_start = self.get_pos(); self.next_char(); match self.window[0] { Some('<') => { self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::LeftShiftEqual, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::LeftShift, tok_end)); } } } Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::LessEqual, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Less, tok_end)); } } } '>' => { let tok_start = self.get_pos(); self.next_char(); match self.window[0] { Some('>') => { self.next_char(); match self.window[0] { Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::RightShiftEqual, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::RightShift, tok_end)); } } } Some('=') => { self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::GreaterEqual, tok_end)); } _ => { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Greater, tok_end)); } } } ',' => { let tok_start = self.get_pos(); self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::Comma, tok_end)); } '.' => { if let Some('0'..='9') = self.window[1] { let number = self.lex_number()?; self.emit(number); } else { let tok_start = self.get_pos(); self.next_char(); if self.window[..2] == [Some('.'); 2] { self.next_char(); self.next_char(); let tok_end = self.get_pos(); self.emit((tok_start, Tok::Ellipsis, tok_end)); } else { let tok_end = self.get_pos(); self.emit((tok_start, Tok::Dot, tok_end)); } } } '\n' => { let tok_start = self.get_pos(); self.next_char(); let tok_end = self.get_pos(); // Depending on the nesting level, we emit newline or not: if self.nesting == 0 { self.at_begin_of_line = true; self.emit((tok_start, Tok::Newline, tok_end)); } } ' ' | '\t' | '\x0C' => { // Skip whitespaces self.next_char(); while let Some(' ' | '\t' | '\x0C') = self.window[0] { self.next_char(); } } '\\' => { self.next_char(); if let Some('\n') = self.window[0] { self.next_char(); } else { return Err(LexicalError { error: LexicalErrorType::LineContinuationError, location: self.get_pos(), }); } if self.window[0].is_none() { return Err(LexicalError { error: LexicalErrorType::Eof, location: self.get_pos(), }); } } _ => { let c = self.next_char(); return Err(LexicalError { error: LexicalErrorType::UnrecognizedToken { tok: c.unwrap() }, location: self.get_pos(), }); } // Ignore all the rest.. } Ok(()) } fn eat_single_char(&mut self, ty: Tok) { let tok_start = self.get_pos(); self.next_char().unwrap(); let tok_end = self.get_pos(); self.emit((tok_start, ty, tok_end)); } /// Helper function to go to the next character coming up. fn next_char(&mut self) -> Option { let c = self.window[0]; self.window.slide(); if c == Some('\n') { self.location.newline(); } else { self.location.go_right(); } c } /// Helper function to retrieve the current position. fn get_pos(&self) -> Location { self.location } /// Helper function to emit a lexed token to the queue of tokens. fn emit(&mut self, spanned: Spanned) { self.pending.push(spanned); } } /* Implement iterator pattern for the get_tok function. Calling the next element in the iterator will yield the next lexical token. */ impl Iterator for Lexer where T: Iterator, { type Item = LexResult; fn next(&mut self) -> Option { // Idea: create some sort of hash map for single char tokens: // let mut X = HashMap::new(); // X.insert('=', Tok::Equal); let token = self.inner_next(); trace!( "Lex token {:?}, nesting={:?}, indent stack: {:?}", token, self.nesting, self.indentations, ); match token { Ok((_, Tok::EndOfFile, _)) => None, r => Some(r), } } } #[cfg(test)] mod tests { use super::{make_tokenizer, NewlineHandler, StringKind, Tok}; use num_bigint::BigInt; const WINDOWS_EOL: &str = "\r\n"; const MAC_EOL: &str = "\r"; const UNIX_EOL: &str = "\n"; pub fn lex_source(source: &str) -> Vec { let lexer = make_tokenizer(source); lexer.map(|x| x.unwrap().1).collect() } #[test] fn test_newline_processor() { // Escape \ followed by \n (by removal): let src = "b\\\r\n"; assert_eq!(4, src.len()); let nlh = NewlineHandler::new(src.chars()); let x: Vec = nlh.collect(); assert_eq!(vec!['b', '\\', '\n'], x); } fn stok(s: &str) -> Tok { Tok::String { value: s.to_owned(), kind: StringKind::String, triple_quoted: false, } } fn raw_stok(s: &str) -> Tok { Tok::String { value: s.to_owned(), kind: StringKind::RawString, triple_quoted: false, } } #[test] fn test_numbers() { let source = "0x2f 0o12 0b1101 0 123 123_45_67_890 0.2 1e+2 2.1e3 2j 2.2j"; let tokens = lex_source(source); assert_eq!( tokens, vec![ Tok::Int { value: BigInt::from(47), }, Tok::Int { value: BigInt::from(10) }, Tok::Int { value: BigInt::from(13), }, Tok::Int { value: BigInt::from(0), }, Tok::Int { value: BigInt::from(123), }, Tok::Int { value: BigInt::from(1234567890), }, Tok::Float { value: 0.2 }, Tok::Float { value: 100.0 }, Tok::Float { value: 2100.0 }, Tok::Complex { real: 0.0, imag: 2.0, }, Tok::Complex { real: 0.0, imag: 2.2, }, Tok::Newline, ] ); } macro_rules! test_line_comment { ($($name:ident: $eol:expr,)*) => { $( #[test] fn $name() { let source = format!(r"99232 # {}", $eol); let tokens = lex_source(&source); assert_eq!(tokens, vec![Tok::Int { value: BigInt::from(99232) }, Tok::Comment(format!("# {}", $eol)), Tok::Newline]); } )* } } test_line_comment! { test_line_comment_long: " foo", test_line_comment_whitespace: " ", test_line_comment_single_whitespace: " ", test_line_comment_empty: "", } macro_rules! test_comment_until_eol { ($($name:ident: $eol:expr,)*) => { $( #[test] fn $name() { let source = format!("123 # Foo{}456", $eol); let tokens = lex_source(&source); assert_eq!( tokens, vec![ Tok::Int { value: BigInt::from(123) }, Tok::Comment("# Foo".to_string()), Tok::Newline, Tok::Int { value: BigInt::from(456) }, Tok::Newline, ] ) } )* } } test_comment_until_eol! { test_comment_until_windows_eol: WINDOWS_EOL, test_comment_until_mac_eol: MAC_EOL, test_comment_until_unix_eol: UNIX_EOL, } #[test] fn test_assignment() { let source = r"avariable = 99 + 2-0"; let tokens = lex_source(source); assert_eq!( tokens, vec![ Tok::Name { name: String::from("avariable"), }, Tok::Equal, Tok::Int { value: BigInt::from(99) }, Tok::Plus, Tok::Int { value: BigInt::from(2) }, Tok::Minus, Tok::Int { value: BigInt::from(0) }, Tok::Newline, ] ); } macro_rules! test_indentation_with_eol { ($($name:ident: $eol:expr,)*) => { $( #[test] fn $name() { let source = format!("def foo():{} return 99{}{}", $eol, $eol, $eol); let tokens = lex_source(&source); assert_eq!( tokens, vec![ Tok::Def, Tok::Name { name: String::from("foo"), }, Tok::Lpar, Tok::Rpar, Tok::Colon, Tok::Newline, Tok::Indent, Tok::Return, Tok::Int { value: BigInt::from(99) }, Tok::Newline, Tok::Dedent, ] ); } )* }; } test_indentation_with_eol! { test_indentation_windows_eol: WINDOWS_EOL, test_indentation_mac_eol: MAC_EOL, test_indentation_unix_eol: UNIX_EOL, } macro_rules! test_double_dedent_with_eol { ($($name:ident: $eol:expr,)*) => { $( #[test] fn $name() { let source = format!("def foo():{} if x:{}{} return 99{}{}", $eol, $eol, $eol, $eol, $eol); let tokens = lex_source(&source); assert_eq!( tokens, vec![ Tok::Def, Tok::Name { name: String::from("foo"), }, Tok::Lpar, Tok::Rpar, Tok::Colon, Tok::Newline, Tok::Indent, Tok::If, Tok::Name { name: String::from("x"), }, Tok::Colon, Tok::Newline, Tok::Indent, Tok::Return, Tok::Int { value: BigInt::from(99) }, Tok::Newline, Tok::Dedent, Tok::Dedent, ] ); } )* } } macro_rules! test_double_dedent_with_tabs { ($($name:ident: $eol:expr,)*) => { $( #[test] fn $name() { let source = format!("def foo():{}\tif x:{}{}\t return 99{}{}", $eol, $eol, $eol, $eol, $eol); let tokens = lex_source(&source); assert_eq!( tokens, vec![ Tok::Def, Tok::Name { name: String::from("foo"), }, Tok::Lpar, Tok::Rpar, Tok::Colon, Tok::Newline, Tok::Indent, Tok::If, Tok::Name { name: String::from("x"), }, Tok::Colon, Tok::Newline, Tok::Indent, Tok::Return, Tok::Int { value: BigInt::from(99) }, Tok::Newline, Tok::Dedent, Tok::Dedent, ] ); } )* } } test_double_dedent_with_eol! { test_double_dedent_windows_eol: WINDOWS_EOL, test_double_dedent_mac_eol: MAC_EOL, test_double_dedent_unix_eol: UNIX_EOL, } test_double_dedent_with_tabs! { test_double_dedent_tabs_windows_eol: WINDOWS_EOL, test_double_dedent_tabs_mac_eol: MAC_EOL, test_double_dedent_tabs_unix_eol: UNIX_EOL, } macro_rules! test_newline_in_brackets { ($($name:ident: $eol:expr,)*) => { $( #[test] fn $name() { let source = format!("x = [{} 1,2{}]{}", $eol, $eol, $eol); let tokens = lex_source(&source); assert_eq!( tokens, vec![ Tok::Name { name: String::from("x"), }, Tok::Equal, Tok::Lsqb, Tok::Int { value: BigInt::from(1) }, Tok::Comma, Tok::Int { value: BigInt::from(2) }, Tok::Rsqb, Tok::Newline, ] ); } )* }; } test_newline_in_brackets! { test_newline_in_brackets_windows_eol: WINDOWS_EOL, test_newline_in_brackets_mac_eol: MAC_EOL, test_newline_in_brackets_unix_eol: UNIX_EOL, } #[test] fn test_operators() { let source = "//////=/ /"; let tokens = lex_source(source); assert_eq!( tokens, vec![ Tok::DoubleSlash, Tok::DoubleSlash, Tok::DoubleSlashEqual, Tok::Slash, Tok::Slash, Tok::Newline, ] ); } #[test] fn test_string() { let source = r#""double" 'single' 'can\'t' "\\\"" '\t\r\n' '\g' r'raw\'' '\420' '\200\0a'"#; let tokens = lex_source(source); assert_eq!( tokens, vec![ stok("double"), stok("single"), stok(r"can\'t"), stok(r#"\\\""#), stok(r"\t\r\n"), stok(r"\g"), raw_stok(r"raw\'"), stok(r"\420"), stok(r"\200\0a"), Tok::Newline, ] ); } macro_rules! test_string_continuation { ($($name:ident: $eol:expr,)*) => { $( #[test] fn $name() { let source = format!("\"abc\\{}def\"", $eol); let tokens = lex_source(&source); assert_eq!( tokens, vec![ stok("abc\\\ndef"), Tok::Newline, ] ) } )* } } test_string_continuation! { test_string_continuation_windows_eol: WINDOWS_EOL, test_string_continuation_mac_eol: MAC_EOL, test_string_continuation_unix_eol: UNIX_EOL, } #[test] fn test_escape_unicode_name() { let source = r#""\N{EN SPACE}""#; let tokens = lex_source(source); assert_eq!(tokens, vec![stok(r"\N{EN SPACE}"), Tok::Newline]) } } rustpython-parser-0.2.0/src/lib.rs000064400000000000000000000016371046102023000152660ustar 00000000000000//! This crate can be used to parse python sourcecode into a so //! called AST (abstract syntax tree). //! //! The stages involved in this process are lexical analysis and //! parsing. The lexical analysis splits the sourcecode into //! tokens, and the parsing transforms those tokens into an AST. //! //! For example, one could do this: //! //! ``` //! use rustpython_parser::{parser, ast}; //! //! let python_source = "print('Hello world')"; //! let python_ast = parser::parse_expression(python_source, "").unwrap(); //! //! ``` #![doc(html_logo_url = "https://raw.githubusercontent.com/RustPython/RustPython/main/logo.png")] #![doc(html_root_url = "https://docs.rs/rustpython-parser/")] #[macro_use] extern crate log; pub use rustpython_ast as ast; pub mod error; mod function; pub mod lexer; pub mod mode; pub mod parser; mod string_parser; #[rustfmt::skip] mod python; mod context; mod string; pub mod token; rustpython-parser-0.2.0/src/mode.rs000064400000000000000000000024361046102023000154420ustar 00000000000000use crate::token::Tok; #[derive(Clone, Copy)] pub enum Mode { Module, Interactive, Expression, } impl Mode { pub(crate) fn to_marker(self) -> Tok { match self { Self::Module => Tok::StartModule, Self::Interactive => Tok::StartInteractive, Self::Expression => Tok::StartExpression, } } } impl From for Mode { fn from(mode: rustpython_compiler_core::Mode) -> Self { use rustpython_compiler_core::Mode as CompileMode; match mode { CompileMode::Exec => Self::Module, CompileMode::Eval => Self::Expression, CompileMode::Single | CompileMode::BlockExpr => Self::Interactive, } } } impl std::str::FromStr for Mode { type Err = ModeParseError; fn from_str(s: &str) -> Result { match s { "exec" | "single" => Ok(Mode::Module), "eval" => Ok(Mode::Expression), _ => Err(ModeParseError { _priv: () }), } } } #[derive(Debug)] pub struct ModeParseError { _priv: (), } impl std::fmt::Display for ModeParseError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, r#"mode should be "exec", "eval", or "single""#) } } rustpython-parser-0.2.0/src/parser.rs000064400000000000000000000230151046102023000160060ustar 00000000000000//! Python parsing. //! //! Use this module to parse python code into an AST. //! There are three ways to parse python code. You could //! parse a whole program, a single statement, or a single //! expression. use crate::lexer::{LexResult, Tok}; pub use crate::mode::Mode; use crate::{ast, error::ParseError, lexer, python}; use ast::Location; use itertools::Itertools; use std::iter; /* * Parse python code. * Grammar may be inspired by antlr grammar for python: * https://github.com/antlr/grammars-v4/tree/master/python3 */ /// Parse a full python program, containing usually multiple lines. pub fn parse_program(source: &str, source_path: &str) -> Result { parse(source, Mode::Module, source_path).map(|top| match top { ast::Mod::Module { body, .. } => body, _ => unreachable!(), }) } /// Parses a python expression /// /// # Example /// ``` /// extern crate num_bigint; /// use rustpython_parser::{parser, ast}; /// let expr = parser::parse_expression("1 + 2", "").unwrap(); /// /// assert_eq!( /// expr, /// ast::Expr { /// location: ast::Location::new(1, 0), /// end_location: Some(ast::Location::new(1, 5)), /// custom: (), /// node: ast::ExprKind::BinOp { /// left: Box::new(ast::Expr { /// location: ast::Location::new(1, 0), /// end_location: Some(ast::Location::new(1, 1)), /// custom: (), /// node: ast::ExprKind::Constant { /// value: ast::Constant::Int(1.into()), /// kind: None, /// } /// }), /// op: ast::Operator::Add, /// right: Box::new(ast::Expr { /// location: ast::Location::new(1, 4), /// end_location: Some(ast::Location::new(1, 5)), /// custom: (), /// node: ast::ExprKind::Constant { /// value: ast::Constant::Int(2.into()), /// kind: None, /// } /// }) /// } /// }, /// ); /// /// ``` pub fn parse_expression(source: &str, path: &str) -> Result { parse_expression_located(source, path, Location::new(1, 0)) } pub fn parse_expression_located( source: &str, path: &str, location: Location, ) -> Result { parse_located(source, Mode::Expression, path, location).map(|top| match top { ast::Mod::Expression { body } => *body, _ => unreachable!(), }) } // Parse a given source code pub fn parse(source: &str, mode: Mode, source_path: &str) -> Result { parse_located(source, mode, source_path, Location::new(1, 0)) } // Parse a given source code from a given location pub fn parse_located( source: &str, mode: Mode, source_path: &str, location: Location, ) -> Result { let lxr = lexer::make_tokenizer_located(source, location); let marker_token = (Default::default(), mode.to_marker(), Default::default()); let tokenizer = iter::once(Ok(marker_token)) .chain(lxr) .filter_ok(|(_, tok, _)| !matches!(tok, Tok::Comment { .. })); python::TopParser::new() .parse(tokenizer) .map_err(|e| crate::error::parse_error_from_lalrpop(e, source_path)) } // Parse a given token iterator. pub fn parse_tokens( lxr: impl IntoIterator, mode: Mode, source_path: &str, ) -> Result { let marker_token = (Default::default(), mode.to_marker(), Default::default()); let tokenizer = iter::once(Ok(marker_token)) .chain(lxr) .filter_ok(|(_, tok, _)| !matches!(tok, Tok::Comment(_))); python::TopParser::new() .parse(tokenizer) .map_err(|e| crate::error::parse_error_from_lalrpop(e, source_path)) } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_empty() { let parse_ast = parse_program("", "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_string() { let source = String::from("'Hello world'"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_f_string() { let source = String::from("f'Hello world'"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_print_hello() { let source = String::from("print('Hello world')"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_print_2() { let source = String::from("print('Hello world', 2)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_kwargs() { let source = String::from("my_func('positional', keyword=2)"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_if_elif_else() { let source = String::from("if 1: 10\nelif 2: 20\nelse: 30"); let parse_ast = parse_program(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_lambda() { let source = "lambda x, y: x * y"; // lambda(x, y): x * y"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_tuples() { let source = "a, b = 4, 5"; insta::assert_debug_snapshot!(parse_program(source, "").unwrap()); } #[test] fn test_parse_class() { let source = "\ class Foo(A, B): def __init__(self): pass def method_with_default(self, arg='default'): pass "; insta::assert_debug_snapshot!(parse_program(source, "").unwrap()); } #[test] fn test_parse_dict_comprehension() { let source = String::from("{x1: x2 for y in z}"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_list_comprehension() { let source = String::from("[x for y in z]"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_double_list_comprehension() { let source = String::from("[x for y, y2 in z for a in b if a < 5 if a > 10]"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_generator_comprehension() { let source = String::from("(x for y in z)"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_named_expression_generator_comprehension() { let source = String::from("(x := y + 1 for y in z)"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_if_else_generator_comprehension() { let source = String::from("(x if y else y for y in z)"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_boolop_or() { let source = String::from("x or y"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_boolop_and() { let source = String::from("x and y"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_slice() { let source = String::from("x[1:2:3]"); let parse_ast = parse_expression(&source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_with_statement() { let source = "\ with 0: pass with 0 as x: pass with 0, 1: pass with 0 as x, 1 as y: pass with 0 if 1 else 2: pass with 0 if 1 else 2 as x: pass with (): pass with () as x: pass with (0): pass with (0) as x: pass with (0,): pass with (0,) as x: pass with (0, 1): pass with (0, 1) as x: pass with (*a,): pass with (*a,) as x: pass with (0, *a): pass with (0, *a) as x: pass with (a := 0): pass with (a := 0) as x: pass with (a := 0, b := 1): pass with (a := 0, b := 1) as x: pass with (0 as a): pass with (0 as a,): pass with (0 as a, 1 as b): pass with (0 as a, 1 as b,): pass "; insta::assert_debug_snapshot!(parse_program(source, "").unwrap()); } #[test] fn test_with_statement_invalid() { for source in [ "with 0,: pass", "with 0 as x,: pass", "with 0 as *x: pass", "with *a: pass", "with *a as x: pass", "with (*a): pass", "with (*a) as x: pass", "with *a, 0 as x: pass", "with (*a, 0 as x): pass", "with 0 as x, *a: pass", "with (0 as x, *a): pass", "with (0 as x) as y: pass", "with (0 as x), 1: pass", "with ((0 as x)): pass", "with a := 0 as x: pass", "with (a := 0 as x): pass", ] { assert!(parse_program(source, "").is_err()); } } } rustpython-parser-0.2.0/src/python.rs000064400000000000000000000001031046102023000160240ustar 00000000000000#![allow(clippy::all)] #![allow(unused)] include!("../python.rs"); rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__ann_assign_name.snap000064400000000000000000000036701046102023000315140ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: AnnAssign { target: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, annotation: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "int", ctx: Load, }, }, value: Some( Located { location: Location { row: 1, column: 9, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, ), simple: 1, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign.snap000064400000000000000000000017331046102023000276560ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 1, }, custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 1, }, custom: (), node: Name { id: "x", ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 5, }, custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_attribute.snap000064400000000000000000000104521046102023000317370ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 3, }, ), custom: (), node: Attribute { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, attr: "y", ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_for.snap000064400000000000000000000100211046102023000305120ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 24, }, ), custom: (), node: For { target: Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 9, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 16, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, body: [ Located { location: Location { row: 1, column: 20, }, end_location: Some( Location { row: 1, column: 24, }, ), custom: (), node: Pass, }, ], orelse: [], type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_list.snap000064400000000000000000000121361046102023000307100ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: List { elts: [ Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, ], ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 9, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 16, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_list_comp.snap000064400000000000000000000153441046102023000317320ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 26, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 26, }, ), custom: (), node: ListComp { elt: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 11, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 16, }, end_location: Some( Location { row: 1, column: 25, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 17, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 20, }, end_location: Some( Location { row: 1, column: 21, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 23, }, end_location: Some( Location { row: 1, column: 24, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, ifs: [], is_async: 0, }, ], }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_name.snap000064400000000000000000000071761046102023000306650ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 9, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 11, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_named_expr.snap000064400000000000000000000050021046102023000320510ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: If { test: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, value: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, }, }, body: [ Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Pass, }, ], orelse: [], }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_set_comp.snap000064400000000000000000000153431046102023000315510ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 26, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 26, }, ), custom: (), node: SetComp { elt: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 11, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 16, }, end_location: Some( Location { row: 1, column: 25, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 17, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 20, }, end_location: Some( Location { row: 1, column: 21, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 23, }, end_location: Some( Location { row: 1, column: 24, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, ifs: [], is_async: 0, }, ], }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_starred.snap000064400000000000000000000136461046102023000314100ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 19, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Starred { value: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, ctx: Store, }, }, ], ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 19, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 11, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 14, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 17, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_subscript.snap000064400000000000000000000116521046102023000317550ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 16, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Subscript { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, slice: Located { location: Location { row: 1, column: 2, }, end_location: Some( Location { row: 1, column: 3, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 16, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 9, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 11, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 14, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_tuple.snap000064400000000000000000000121371046102023000310670ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, ], ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 9, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 16, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__assign_with.snap000064400000000000000000000045041046102023000307100ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__aug_assign_attribute.snap000064400000000000000000000101741046102023000325740ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 16, }, ), custom: (), node: AugAssign { target: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 3, }, ), custom: (), node: Attribute { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, attr: "y", ctx: Store, }, }, op: Add, value: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 16, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 9, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 11, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 14, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__aug_assign_name.snap000064400000000000000000000025511046102023000315110ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: AugAssign { target: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, op: Add, value: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__aug_assign_subscript.snap000064400000000000000000000112751046102023000326120ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: AugAssign { target: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Subscript { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, slice: Located { location: Location { row: 1, column: 2, }, end_location: Some( Location { row: 1, column: 3, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, ctx: Store, }, }, op: Add, value: Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 9, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, Located { location: Location { row: 1, column: 12, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, Located { location: Location { row: 1, column: 15, }, end_location: Some( Location { row: 1, column: 16, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ], ctx: Load, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__del_attribute.snap000064400000000000000000000031121046102023000312120ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Delete { targets: [ Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Attribute { value: Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, attr: "y", ctx: Del, }, }, ], }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__del_name.snap000064400000000000000000000016401046102023000301330ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Delete { targets: [ Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "x", ctx: Del, }, }, ], }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__context__tests__del_subscript.snap000064400000000000000000000043121046102023000312300ustar 00000000000000--- source: compiler/parser/src/context.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Delete { targets: [ Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Subscript { value: Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, slice: Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, ctx: Del, }, }, ], }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_boolop_and.snap000064400000000000000000000023631046102023000315100ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: BoolOp { op: And, values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, ], }, } rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_boolop_or.snap000064400000000000000000000023621046102023000313650ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: BoolOp { op: Or, values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, ], }, } rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_class.snap000064400000000000000000000206441046102023000305030ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: "parse_program(source, \"\").unwrap()" --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 5, column: 6, }, ), custom: (), node: ClassDef { name: "Foo", bases: [ Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Name { id: "A", ctx: Load, }, }, Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Name { id: "B", ctx: Load, }, }, ], keywords: [], body: [ Located { location: Location { row: 2, column: 1, }, end_location: Some( Location { row: 3, column: 6, }, ), custom: (), node: FunctionDef { name: "__init__", args: Arguments { posonlyargs: [], args: [ Located { location: Location { row: 2, column: 14, }, end_location: Some( Location { row: 2, column: 18, }, ), custom: (), node: ArgData { arg: "self", annotation: None, type_comment: None, }, }, ], vararg: None, kwonlyargs: [], kw_defaults: [], kwarg: None, defaults: [], }, body: [ Located { location: Location { row: 3, column: 2, }, end_location: Some( Location { row: 3, column: 6, }, ), custom: (), node: Pass, }, ], decorator_list: [], returns: None, type_comment: None, }, }, Located { location: Location { row: 4, column: 1, }, end_location: Some( Location { row: 5, column: 6, }, ), custom: (), node: FunctionDef { name: "method_with_default", args: Arguments { posonlyargs: [], args: [ Located { location: Location { row: 4, column: 25, }, end_location: Some( Location { row: 4, column: 29, }, ), custom: (), node: ArgData { arg: "self", annotation: None, type_comment: None, }, }, Located { location: Location { row: 4, column: 31, }, end_location: Some( Location { row: 4, column: 34, }, ), custom: (), node: ArgData { arg: "arg", annotation: None, type_comment: None, }, }, ], vararg: None, kwonlyargs: [], kw_defaults: [], kwarg: None, defaults: [ Located { location: Location { row: 4, column: 35, }, end_location: Some( Location { row: 4, column: 44, }, ), custom: (), node: Constant { value: Str( "default", ), kind: None, }, }, ], }, body: [ Located { location: Location { row: 5, column: 2, }, end_location: Some( Location { row: 5, column: 6, }, ), custom: (), node: Pass, }, ], decorator_list: [], returns: None, type_comment: None, }, }, ], decorator_list: [], }, }, ] ././@LongLink00006440000000000000000000000146000000000000007774Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_dict_comprehension.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_dict_comprehension.sna000064400000000000000000000044131046102023000330660ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 19, }, ), custom: (), node: DictComp { key: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 3, }, ), custom: (), node: Name { id: "x1", ctx: Load, }, }, value: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "x2", ctx: Load, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 12, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 17, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Name { id: "z", ctx: Load, }, }, ifs: [], is_async: 0, }, ], }, } ././@LongLink00006440000000000000000000000155000000000000007774Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_double_list_comprehension.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_double_list_comprehens000064400000000000000000000221451046102023000331640ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 48, }, ), custom: (), node: ListComp { elt: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Name { id: "y2", ctx: Store, }, }, ], ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 16, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Name { id: "z", ctx: Load, }, }, ifs: [], is_async: 0, }, Comprehension { target: Located { location: Location { row: 1, column: 22, }, end_location: Some( Location { row: 1, column: 23, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 27, }, end_location: Some( Location { row: 1, column: 28, }, ), custom: (), node: Name { id: "b", ctx: Load, }, }, ifs: [ Located { location: Location { row: 1, column: 32, }, end_location: Some( Location { row: 1, column: 37, }, ), custom: (), node: Compare { left: Located { location: Location { row: 1, column: 32, }, end_location: Some( Location { row: 1, column: 33, }, ), custom: (), node: Name { id: "a", ctx: Load, }, }, ops: [ Lt, ], comparators: [ Located { location: Location { row: 1, column: 36, }, end_location: Some( Location { row: 1, column: 37, }, ), custom: (), node: Constant { value: Int( 5, ), kind: None, }, }, ], }, }, Located { location: Location { row: 1, column: 41, }, end_location: Some( Location { row: 1, column: 47, }, ), custom: (), node: Compare { left: Located { location: Location { row: 1, column: 41, }, end_location: Some( Location { row: 1, column: 42, }, ), custom: (), node: Name { id: "a", ctx: Load, }, }, ops: [ Gt, ], comparators: [ Located { location: Location { row: 1, column: 45, }, end_location: Some( Location { row: 1, column: 47, }, ), custom: (), node: Constant { value: Int( 10, ), kind: None, }, }, ], }, }, ], is_async: 0, }, ], }, } rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_empty.snap000064400000000000000000000000761046102023000305310ustar 00000000000000--- source: parser/src/parser.rs expression: parse_ast --- [] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_f_string.snap000064400000000000000000000030661046102023000312100ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, ], }, }, }, }, ] ././@LongLink00006440000000000000000000000153000000000000007772Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_generator_comprehension.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_generator_comprehensio000064400000000000000000000035701046102023000331760ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: GeneratorExp { elt: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 12, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Name { id: "z", ctx: Load, }, }, ifs: [], is_async: 0, }, ], }, } rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_if_elif_else.snap000064400000000000000000000151711046102023000320020ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 3, column: 8, }, ), custom: (), node: If { test: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, body: [ Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Constant { value: Int( 10, ), kind: None, }, }, }, }, ], orelse: [ Located { location: Location { row: 2, column: 0, }, end_location: Some( Location { row: 2, column: 10, }, ), custom: (), node: If { test: Located { location: Location { row: 2, column: 5, }, end_location: Some( Location { row: 2, column: 6, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, body: [ Located { location: Location { row: 2, column: 8, }, end_location: Some( Location { row: 2, column: 10, }, ), custom: (), node: Expr { value: Located { location: Location { row: 2, column: 8, }, end_location: Some( Location { row: 2, column: 10, }, ), custom: (), node: Constant { value: Int( 20, ), kind: None, }, }, }, }, ], orelse: [ Located { location: Location { row: 3, column: 6, }, end_location: Some( Location { row: 3, column: 8, }, ), custom: (), node: Expr { value: Located { location: Location { row: 3, column: 6, }, end_location: Some( Location { row: 3, column: 8, }, ), custom: (), node: Constant { value: Int( 30, ), kind: None, }, }, }, }, ], }, }, ], }, }, ] ././@LongLink00006440000000000000000000000163000000000000007773Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_if_else_generator_comprehension.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_if_else_generator_comp000064400000000000000000000066321046102023000331310ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 26, }, ), custom: (), node: GeneratorExp { elt: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: IfExp { test: Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, body: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, orelse: Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 19, }, end_location: Some( Location { row: 1, column: 20, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 24, }, end_location: Some( Location { row: 1, column: 25, }, ), custom: (), node: Name { id: "z", ctx: Load, }, }, ifs: [], is_async: 0, }, ], }, } rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_kwargs.snap000064400000000000000000000074461046102023000307010ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 32, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 32, }, ), custom: (), node: Call { func: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "my_func", ctx: Load, }, }, args: [ Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 20, }, ), custom: (), node: Constant { value: Str( "positional", ), kind: None, }, }, ], keywords: [ Located { location: Location { row: 1, column: 22, }, end_location: Some( Location { row: 1, column: 31, }, ), custom: (), node: KeywordData { arg: Some( "keyword", ), value: Located { location: Location { row: 1, column: 30, }, end_location: Some( Location { row: 1, column: 31, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_lambda.snap000064400000000000000000000112721046102023000306130ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Lambda { args: Arguments { posonlyargs: [], args: [ Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: ArgData { arg: "x", annotation: None, type_comment: None, }, }, Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: ArgData { arg: "y", annotation: None, type_comment: None, }, }, ], vararg: None, kwonlyargs: [], kw_defaults: [], kwarg: None, defaults: [], }, body: Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: BinOp { left: Located { location: Location { row: 1, column: 13, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, op: Mult, right: Located { location: Location { row: 1, column: 17, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, }, }, }, }, }, }, ] ././@LongLink00006440000000000000000000000146000000000000007774Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_list_comprehension.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_list_comprehension.sna000064400000000000000000000035641046102023000331240ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: ListComp { elt: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 12, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Name { id: "z", ctx: Load, }, }, ifs: [], is_async: 0, }, ], }, } ././@LongLink00006440000000000000000000000174000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_named_expression_generator_comprehension.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_named_expression_gener000064400000000000000000000103741046102023000331600ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 23, }, ), custom: (), node: GeneratorExp { elt: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 1, column: 1, }, end_location: Some( Location { row: 1, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, value: Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: BinOp { left: Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "y", ctx: Load, }, }, op: Add, right: Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, }, }, }, }, generators: [ Comprehension { target: Located { location: Location { row: 1, column: 16, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, iter: Located { location: Location { row: 1, column: 21, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: Name { id: "z", ctx: Load, }, }, ifs: [], is_async: 0, }, ], }, } rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_print_2.snap000064400000000000000000000056431046102023000307550ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 23, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 23, }, ), custom: (), node: Call { func: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "print", ctx: Load, }, }, args: [ Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 19, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, Located { location: Location { row: 1, column: 21, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, ], keywords: [], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_print_hello.snap000064400000000000000000000042631046102023000317140ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 20, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 20, }, ), custom: (), node: Call { func: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "print", ctx: Load, }, }, args: [ Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 19, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, ], keywords: [], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_string.snap000064400000000000000000000016031046102023000306760ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__parse_tuples.snap000064400000000000000000000106171046102023000307110ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: "parse_program(source, \"\").unwrap()" --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Assign { targets: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Name { id: "b", ctx: Store, }, }, ], ctx: Store, }, }, ], value: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Constant { value: Int( 4, ), kind: None, }, }, Located { location: Location { row: 1, column: 10, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Constant { value: Int( 5, ), kind: None, }, }, ], ctx: Load, }, }, type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__slice.snap000064400000000000000000000062431046102023000273020ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: parse_ast --- Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Subscript { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 1, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, slice: Located { location: Location { row: 1, column: 2, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Slice { lower: Some( Located { location: Location { row: 1, column: 2, }, end_location: Some( Location { row: 1, column: 3, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, ), upper: Some( Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, ), step: Some( Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Constant { value: Int( 3, ), kind: None, }, }, ), }, }, ctx: Load, }, } rustpython-parser-0.2.0/src/snapshots/rustpython_parser__parser__tests__with_statement.snap000064400000000000000000002532631046102023000312500ustar 00000000000000--- source: compiler/parser/src/parser.rs expression: "parse_program(source, \"\").unwrap()" --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 2, column: 0, }, end_location: Some( Location { row: 2, column: 17, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 2, column: 5, }, end_location: Some( Location { row: 2, column: 6, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 2, column: 10, }, end_location: Some( Location { row: 2, column: 11, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 2, column: 13, }, end_location: Some( Location { row: 2, column: 17, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 3, column: 0, }, end_location: Some( Location { row: 3, column: 15, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 3, column: 5, }, end_location: Some( Location { row: 3, column: 6, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: None, }, Withitem { context_expr: Located { location: Location { row: 3, column: 8, }, end_location: Some( Location { row: 3, column: 9, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 3, column: 11, }, end_location: Some( Location { row: 3, column: 15, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 4, column: 0, }, end_location: Some( Location { row: 4, column: 25, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 4, column: 5, }, end_location: Some( Location { row: 4, column: 6, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 4, column: 10, }, end_location: Some( Location { row: 4, column: 11, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, Withitem { context_expr: Located { location: Location { row: 4, column: 13, }, end_location: Some( Location { row: 4, column: 14, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 4, column: 18, }, end_location: Some( Location { row: 4, column: 19, }, ), custom: (), node: Name { id: "y", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 4, column: 21, }, end_location: Some( Location { row: 4, column: 25, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 5, column: 0, }, end_location: Some( Location { row: 5, column: 24, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 5, column: 5, }, end_location: Some( Location { row: 5, column: 18, }, ), custom: (), node: IfExp { test: Located { location: Location { row: 5, column: 10, }, end_location: Some( Location { row: 5, column: 11, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, body: Located { location: Location { row: 5, column: 5, }, end_location: Some( Location { row: 5, column: 6, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, orelse: Located { location: Location { row: 5, column: 17, }, end_location: Some( Location { row: 5, column: 18, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 5, column: 20, }, end_location: Some( Location { row: 5, column: 24, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 6, column: 0, }, end_location: Some( Location { row: 6, column: 29, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 6, column: 5, }, end_location: Some( Location { row: 6, column: 18, }, ), custom: (), node: IfExp { test: Located { location: Location { row: 6, column: 10, }, end_location: Some( Location { row: 6, column: 11, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, body: Located { location: Location { row: 6, column: 5, }, end_location: Some( Location { row: 6, column: 6, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, orelse: Located { location: Location { row: 6, column: 17, }, end_location: Some( Location { row: 6, column: 18, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, }, }, optional_vars: Some( Located { location: Location { row: 6, column: 22, }, end_location: Some( Location { row: 6, column: 23, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 6, column: 25, }, end_location: Some( Location { row: 6, column: 29, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 7, column: 0, }, end_location: Some( Location { row: 7, column: 13, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 7, column: 5, }, end_location: Some( Location { row: 7, column: 7, }, ), custom: (), node: Tuple { elts: [], ctx: Load, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 7, column: 9, }, end_location: Some( Location { row: 7, column: 13, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 8, column: 0, }, end_location: Some( Location { row: 8, column: 18, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 8, column: 5, }, end_location: Some( Location { row: 8, column: 7, }, ), custom: (), node: Tuple { elts: [], ctx: Load, }, }, optional_vars: Some( Located { location: Location { row: 8, column: 11, }, end_location: Some( Location { row: 8, column: 12, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 8, column: 14, }, end_location: Some( Location { row: 8, column: 18, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 9, column: 0, }, end_location: Some( Location { row: 9, column: 14, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 9, column: 6, }, end_location: Some( Location { row: 9, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 9, column: 10, }, end_location: Some( Location { row: 9, column: 14, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 10, column: 0, }, end_location: Some( Location { row: 10, column: 19, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 10, column: 6, }, end_location: Some( Location { row: 10, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 10, column: 12, }, end_location: Some( Location { row: 10, column: 13, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 10, column: 15, }, end_location: Some( Location { row: 10, column: 19, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 11, column: 0, }, end_location: Some( Location { row: 11, column: 15, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 11, column: 6, }, end_location: Some( Location { row: 11, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 11, column: 11, }, end_location: Some( Location { row: 11, column: 15, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 12, column: 0, }, end_location: Some( Location { row: 12, column: 20, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 12, column: 5, }, end_location: Some( Location { row: 12, column: 9, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 12, column: 6, }, end_location: Some( Location { row: 12, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, ], ctx: Load, }, }, optional_vars: Some( Located { location: Location { row: 12, column: 13, }, end_location: Some( Location { row: 12, column: 14, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 12, column: 16, }, end_location: Some( Location { row: 12, column: 20, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 13, column: 0, }, end_location: Some( Location { row: 13, column: 17, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 13, column: 6, }, end_location: Some( Location { row: 13, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: None, }, Withitem { context_expr: Located { location: Location { row: 13, column: 9, }, end_location: Some( Location { row: 13, column: 10, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 13, column: 13, }, end_location: Some( Location { row: 13, column: 17, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 14, column: 0, }, end_location: Some( Location { row: 14, column: 22, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 14, column: 5, }, end_location: Some( Location { row: 14, column: 11, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 14, column: 6, }, end_location: Some( Location { row: 14, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, Located { location: Location { row: 14, column: 9, }, end_location: Some( Location { row: 14, column: 10, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, ], ctx: Load, }, }, optional_vars: Some( Located { location: Location { row: 14, column: 15, }, end_location: Some( Location { row: 14, column: 16, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 14, column: 18, }, end_location: Some( Location { row: 14, column: 22, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 15, column: 0, }, end_location: Some( Location { row: 15, column: 16, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 15, column: 5, }, end_location: Some( Location { row: 15, column: 10, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 15, column: 6, }, end_location: Some( Location { row: 15, column: 8, }, ), custom: (), node: Starred { value: Located { location: Location { row: 15, column: 7, }, end_location: Some( Location { row: 15, column: 8, }, ), custom: (), node: Name { id: "a", ctx: Load, }, }, ctx: Load, }, }, ], ctx: Load, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 15, column: 12, }, end_location: Some( Location { row: 15, column: 16, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 16, column: 0, }, end_location: Some( Location { row: 16, column: 21, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 16, column: 5, }, end_location: Some( Location { row: 16, column: 10, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 16, column: 6, }, end_location: Some( Location { row: 16, column: 8, }, ), custom: (), node: Starred { value: Located { location: Location { row: 16, column: 7, }, end_location: Some( Location { row: 16, column: 8, }, ), custom: (), node: Name { id: "a", ctx: Load, }, }, ctx: Load, }, }, ], ctx: Load, }, }, optional_vars: Some( Located { location: Location { row: 16, column: 14, }, end_location: Some( Location { row: 16, column: 15, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 16, column: 17, }, end_location: Some( Location { row: 16, column: 21, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 17, column: 0, }, end_location: Some( Location { row: 17, column: 18, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 17, column: 5, }, end_location: Some( Location { row: 17, column: 12, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 17, column: 6, }, end_location: Some( Location { row: 17, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, Located { location: Location { row: 17, column: 9, }, end_location: Some( Location { row: 17, column: 11, }, ), custom: (), node: Starred { value: Located { location: Location { row: 17, column: 10, }, end_location: Some( Location { row: 17, column: 11, }, ), custom: (), node: Name { id: "a", ctx: Load, }, }, ctx: Load, }, }, ], ctx: Load, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 17, column: 14, }, end_location: Some( Location { row: 17, column: 18, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 18, column: 0, }, end_location: Some( Location { row: 18, column: 23, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 18, column: 5, }, end_location: Some( Location { row: 18, column: 12, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 18, column: 6, }, end_location: Some( Location { row: 18, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, Located { location: Location { row: 18, column: 9, }, end_location: Some( Location { row: 18, column: 11, }, ), custom: (), node: Starred { value: Located { location: Location { row: 18, column: 10, }, end_location: Some( Location { row: 18, column: 11, }, ), custom: (), node: Name { id: "a", ctx: Load, }, }, ctx: Load, }, }, ], ctx: Load, }, }, optional_vars: Some( Located { location: Location { row: 18, column: 16, }, end_location: Some( Location { row: 18, column: 17, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 18, column: 19, }, end_location: Some( Location { row: 18, column: 23, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 19, column: 0, }, end_location: Some( Location { row: 19, column: 19, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 19, column: 6, }, end_location: Some( Location { row: 19, column: 12, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 19, column: 6, }, end_location: Some( Location { row: 19, column: 7, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, value: Located { location: Location { row: 19, column: 11, }, end_location: Some( Location { row: 19, column: 12, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 19, column: 15, }, end_location: Some( Location { row: 19, column: 19, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 20, column: 0, }, end_location: Some( Location { row: 20, column: 24, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 20, column: 6, }, end_location: Some( Location { row: 20, column: 12, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 20, column: 6, }, end_location: Some( Location { row: 20, column: 7, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, value: Located { location: Location { row: 20, column: 11, }, end_location: Some( Location { row: 20, column: 12, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, }, }, optional_vars: Some( Located { location: Location { row: 20, column: 17, }, end_location: Some( Location { row: 20, column: 18, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 20, column: 20, }, end_location: Some( Location { row: 20, column: 24, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 21, column: 0, }, end_location: Some( Location { row: 21, column: 27, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 21, column: 5, }, end_location: Some( Location { row: 21, column: 21, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 21, column: 6, }, end_location: Some( Location { row: 21, column: 12, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 21, column: 6, }, end_location: Some( Location { row: 21, column: 7, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, value: Located { location: Location { row: 21, column: 11, }, end_location: Some( Location { row: 21, column: 12, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, }, }, Located { location: Location { row: 21, column: 14, }, end_location: Some( Location { row: 21, column: 20, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 21, column: 14, }, end_location: Some( Location { row: 21, column: 15, }, ), custom: (), node: Name { id: "b", ctx: Store, }, }, value: Located { location: Location { row: 21, column: 19, }, end_location: Some( Location { row: 21, column: 20, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, }, }, ], ctx: Load, }, }, optional_vars: None, }, ], body: [ Located { location: Location { row: 21, column: 23, }, end_location: Some( Location { row: 21, column: 27, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 22, column: 0, }, end_location: Some( Location { row: 22, column: 32, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 22, column: 5, }, end_location: Some( Location { row: 22, column: 21, }, ), custom: (), node: Tuple { elts: [ Located { location: Location { row: 22, column: 6, }, end_location: Some( Location { row: 22, column: 12, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 22, column: 6, }, end_location: Some( Location { row: 22, column: 7, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, value: Located { location: Location { row: 22, column: 11, }, end_location: Some( Location { row: 22, column: 12, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, }, }, Located { location: Location { row: 22, column: 14, }, end_location: Some( Location { row: 22, column: 20, }, ), custom: (), node: NamedExpr { target: Located { location: Location { row: 22, column: 14, }, end_location: Some( Location { row: 22, column: 15, }, ), custom: (), node: Name { id: "b", ctx: Store, }, }, value: Located { location: Location { row: 22, column: 19, }, end_location: Some( Location { row: 22, column: 20, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, }, }, ], ctx: Load, }, }, optional_vars: Some( Located { location: Location { row: 22, column: 25, }, end_location: Some( Location { row: 22, column: 26, }, ), custom: (), node: Name { id: "x", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 22, column: 28, }, end_location: Some( Location { row: 22, column: 32, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 23, column: 0, }, end_location: Some( Location { row: 23, column: 19, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 23, column: 6, }, end_location: Some( Location { row: 23, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 23, column: 11, }, end_location: Some( Location { row: 23, column: 12, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 23, column: 15, }, end_location: Some( Location { row: 23, column: 19, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 24, column: 0, }, end_location: Some( Location { row: 24, column: 20, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 24, column: 6, }, end_location: Some( Location { row: 24, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 24, column: 11, }, end_location: Some( Location { row: 24, column: 12, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 24, column: 16, }, end_location: Some( Location { row: 24, column: 20, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 25, column: 0, }, end_location: Some( Location { row: 25, column: 27, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 25, column: 6, }, end_location: Some( Location { row: 25, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 25, column: 11, }, end_location: Some( Location { row: 25, column: 12, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, ), }, Withitem { context_expr: Located { location: Location { row: 25, column: 14, }, end_location: Some( Location { row: 25, column: 15, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 25, column: 19, }, end_location: Some( Location { row: 25, column: 20, }, ), custom: (), node: Name { id: "b", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 25, column: 23, }, end_location: Some( Location { row: 25, column: 27, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, Located { location: Location { row: 26, column: 0, }, end_location: Some( Location { row: 26, column: 28, }, ), custom: (), node: With { items: [ Withitem { context_expr: Located { location: Location { row: 26, column: 6, }, end_location: Some( Location { row: 26, column: 7, }, ), custom: (), node: Constant { value: Int( 0, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 26, column: 11, }, end_location: Some( Location { row: 26, column: 12, }, ), custom: (), node: Name { id: "a", ctx: Store, }, }, ), }, Withitem { context_expr: Located { location: Location { row: 26, column: 14, }, end_location: Some( Location { row: 26, column: 15, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, optional_vars: Some( Located { location: Location { row: 26, column: 19, }, end_location: Some( Location { row: 26, column: 20, }, ), custom: (), node: Name { id: "b", ctx: Store, }, }, ), }, ], body: [ Located { location: Location { row: 26, column: 24, }, end_location: Some( Location { row: 26, column: 28, }, ), custom: (), node: Pass, }, ], type_comment: None, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__double_quoted_byte.snap000064400000000000000000000220471046102023000320730ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 738, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 738, }, ), custom: (), node: Constant { value: Bytes( [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, ], ), kind: None, }, }, }, }, ] ././@LongLink00006440000000000000000000000151000000000000007770Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__escape_char_in_byte_literal.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__escape_char_in_byte_literal.000064400000000000000000000023311046102023000330070ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Constant { value: Bytes( [ 111, 109, 107, 109, 111, 107, 92, 88, 97, 97, ], ), kind: None, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__escape_octet.snap000064400000000000000000000020621046102023000306460ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Bytes( [ 35, 97, 4, 83, 52, ], ), kind: None, }, }, }, }, ] ././@LongLink00006440000000000000000000000147000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__fstring_escaped_character.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__fstring_escaped_character.sn000064400000000000000000000060111046102023000330410ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Constant { value: Str( "\\", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__fstring_escaped_newline.snap000064400000000000000000000060111046102023000330670ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Constant { value: Str( "\n", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 5, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, ], }, }, }, }, ] ././@LongLink00006440000000000000000000000147000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__fstring_line_continuation.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__fstring_line_continuation.sn000064400000000000000000000060131046102023000331440ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 4, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 4, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 4, }, ), custom: (), node: Constant { value: Str( "\\\n", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 4, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 2, column: 1, }, end_location: Some( Location { row: 2, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, ], }, }, }, }, ] ././@LongLink00006440000000000000000000000147000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__fstring_unescaped_newline.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__fstring_unescaped_newline.sn000064400000000000000000000060111046102023000331110ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 6, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 6, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 6, }, ), custom: (), node: Constant { value: Str( "\n", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 2, column: 6, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 2, column: 1, }, end_location: Some( Location { row: 2, column: 2, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_f_string_concat_1.snap000064400000000000000000000030661046102023000327710ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_f_string_concat_2.snap000064400000000000000000000030661046102023000327720ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_f_string_concat_3.snap000064400000000000000000000061741046102023000327760ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, Located { location: Location { row: 1, column: 9, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 17, }, end_location: Some( Location { row: 1, column: 20, }, ), custom: (), node: Constant { value: Str( "!", ), kind: None, }, }, conversion: 0, format_spec: None, }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_string_concat.snap000064400000000000000000000016031046102023000322370ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 16, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 16, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, }, }, ] ././@LongLink00006440000000000000000000000162000000000000007772Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_string_triple_quotes_with_kind.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_string_triple_quotes_w000064400000000000000000000016711046102023000332620ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 20, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 20, }, ), custom: (), node: Constant { value: Str( "Hello, world!", ), kind: Some( "u", ), }, }, }, }, ] ././@LongLink00006440000000000000000000000147000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_u_f_string_concat_1.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_u_f_string_concat_1.sn000064400000000000000000000032021046102023000327640ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: Some( "u", ), }, }, ], }, }, }, }, ] ././@LongLink00006440000000000000000000000147000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_u_f_string_concat_2.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_u_f_string_concat_2.sn000064400000000000000000000032031046102023000327660ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 22, }, ), custom: (), node: Constant { value: Str( "Hello world!", ), kind: Some( "u", ), }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_u_string_concat_1.snap000064400000000000000000000016031046102023000330030ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: None, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__parse_u_string_concat_2.snap000064400000000000000000000016671046102023000330160ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 17, }, ), custom: (), node: Constant { value: Str( "Hello world", ), kind: Some( "u", ), }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__raw_byte_literal_1.snap000064400000000000000000000020231046102023000317550ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Constant { value: Bytes( [ 92, 120, 49, 122, ], ), kind: None, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__raw_byte_literal_2.snap000064400000000000000000000017211046102023000317620ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Constant { value: Bytes( [ 92, 92, ], ), kind: None, }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__raw_fstring.snap000064400000000000000000000044301046102023000305360ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 4, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, ], }, }, }, }, ] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__single_quoted_byte.snap000064400000000000000000000220471046102023000321020ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 738, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 738, }, ), custom: (), node: Constant { value: Bytes( [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, ], ), kind: None, }, }, }, }, ] ././@LongLink00006440000000000000000000000147000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__triple_quoted_raw_fstring.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string__tests__triple_quoted_raw_fstring.sn000064400000000000000000000044331046102023000331600ustar 00000000000000--- source: compiler/parser/src/string.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Expr { value: Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 6, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, ], }, }, }, }, ] ././@LongLink00006440000000000000000000000167000000000000007777Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__fstring_parse_selfdocumenting_base.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__fstring_parse_selfdoc000064400000000000000000000031711046102023000331730ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Str( "user=", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Str( "", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "user", ctx: Load, }, }, conversion: 114, format_spec: None, }, }, ] ././@LongLink00006440000000000000000000000174000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__fstring_parse_selfdocumenting_base_more.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__fstring_parse_selfdoc000064400000000000000000000076451046102023000332050ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: Constant { value: Str( "mix ", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: Constant { value: Str( "user=", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: Constant { value: Str( "", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Name { id: "user", ctx: Load, }, }, conversion: 114, format_spec: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: Constant { value: Str( " with text and ", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: Constant { value: Str( "second=", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: Constant { value: Str( "", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 38, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 29, }, end_location: Some( Location { row: 1, column: 35, }, ), custom: (), node: Name { id: "second", ctx: Load, }, }, conversion: 114, format_spec: None, }, }, ] ././@LongLink00006440000000000000000000000171000000000000007772Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__fstring_parse_selfdocumenting_format.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__fstring_parse_selfdoc000064400000000000000000000057301046102023000331760ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Str( "user=", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Str( "", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 7, }, ), custom: (), node: Name { id: "user", ctx: Load, }, }, conversion: 0, format_spec: Some( Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 14, }, ), custom: (), node: Constant { value: Str( ">10", ), kind: None, }, }, ], }, }, ), }, }, ] ././@LongLink00006440000000000000000000000150000000000000007767Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_empty_fstring.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_empty_fstring.s000064400000000000000000000001431046102023000331470ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: "parse_fstring(\"\").unwrap()" --- [] rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring.snap000064400000000000000000000040701046102023000324330ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Name { id: "a", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 7, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Name { id: "b", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 18, }, ), custom: (), node: Constant { value: Str( "{foo}", ), kind: None, }, }, ] ././@LongLink00006440000000000000000000000151000000000000007770Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_equals.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_equals.000064400000000000000000000045601046102023000331270ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Compare { left: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 5, }, ), custom: (), node: Constant { value: Int( 42, ), kind: None, }, }, ops: [ Eq, ], comparators: [ Located { location: Location { row: 1, column: 9, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: Constant { value: Int( 42, ), kind: None, }, }, ], }, }, conversion: 0, format_spec: None, }, }, ] ././@LongLink00006440000000000000000000000156000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_nested_spec.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_nested_000064400000000000000000000060271046102023000332000ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "foo", ctx: Load, }, }, conversion: 0, format_spec: Some( Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 15, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 12, }, ), custom: (), node: Name { id: "spec", ctx: Load, }, }, conversion: 0, format_spec: None, }, }, ], }, }, ), }, }, ] ././@LongLink00006440000000000000000000000155000000000000007774Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_not_equals.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_not_equ000064400000000000000000000045571046102023000332370ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 11, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 9, }, ), custom: (), node: Compare { left: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Constant { value: Int( 1, ), kind: None, }, }, ops: [ NotEq, ], comparators: [ Located { location: Location { row: 1, column: 8, }, end_location: Some( Location { row: 1, column: 9, }, ), custom: (), node: Constant { value: Int( 2, ), kind: None, }, }, ], }, }, conversion: 0, format_spec: None, }, }, ] ././@LongLink00006440000000000000000000000162000000000000007772Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_not_nested_spec.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_not_nes000064400000000000000000000043531046102023000332240ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 6, }, ), custom: (), node: Name { id: "foo", ctx: Load, }, }, conversion: 0, format_spec: Some( Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: JoinedStr { values: [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 13, }, ), custom: (), node: Constant { value: Str( "spec", ), kind: None, }, }, ], }, }, ), }, }, ] ././@LongLink00006440000000000000000000000165000000000000007775Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_selfdoc_prec_space.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_selfdoc000064400000000000000000000031661046102023000331770ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Str( "x =", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Str( "", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 114, format_spec: None, }, }, ] ././@LongLink00006440000000000000000000000171000000000000007772Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_selfdoc_trailing_space.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_selfdoc000064400000000000000000000031661046102023000331770ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Str( "x=", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: Constant { value: Str( " ", ), kind: None, }, }, Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 4, }, ), custom: (), node: Name { id: "x", ctx: Load, }, }, conversion: 114, format_spec: None, }, }, ] ././@LongLink00006440000000000000000000000155000000000000007774Lustar rustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_yield_expr.snaprustpython-parser-0.2.0/src/snapshots/rustpython_parser__string_parser__tests__parse_fstring_yield_e000064400000000000000000000015551046102023000331720ustar 00000000000000--- source: compiler/parser/src/string_parser.rs expression: parse_ast --- [ Located { location: Location { row: 1, column: 0, }, end_location: Some( Location { row: 1, column: 10, }, ), custom: (), node: FormattedValue { value: Located { location: Location { row: 1, column: 3, }, end_location: Some( Location { row: 1, column: 8, }, ), custom: (), node: Yield { value: None, }, }, conversion: 0, format_spec: None, }, }, ] rustpython-parser-0.2.0/src/string.rs000064400000000000000000000234061046102023000160240ustar 00000000000000use crate::{ ast::{Constant, Expr, ExprKind, Location}, error::{LexicalError, LexicalErrorType}, string_parser::parse_string, token::StringKind, }; use itertools::Itertools; pub fn parse_strings( values: Vec<(Location, (String, StringKind, bool), Location)>, ) -> Result { // Preserve the initial location and kind. let initial_start = values[0].0; let last_end = values.last().unwrap().2; let initial_kind = (values[0].1 .1 == StringKind::Unicode).then(|| "u".to_owned()); let has_fstring = values.iter().any(|(_, (_, kind, ..), _)| kind.is_fstring()); let num_bytes = values .iter() .filter(|(_, (_, kind, ..), _)| kind.is_bytes()) .count(); let has_bytes = num_bytes > 0; if has_bytes && num_bytes < values.len() { return Err(LexicalError { error: LexicalErrorType::OtherError( "cannot mix bytes and nonbytes literals".to_owned(), ), location: initial_start, }); } if has_bytes { let mut content: Vec = vec![]; for (start, (source, kind, triple_quoted), end) in values { for value in parse_string(&source, kind, triple_quoted, start, end)? { match value.node { ExprKind::Constant { value: Constant::Bytes(value), .. } => content.extend(value), _ => unreachable!("Unexpected non-bytes expression."), } } } return Ok(Expr::new( initial_start, last_end, ExprKind::Constant { value: Constant::Bytes(content), kind: None, }, )); } if !has_fstring { let mut content: Vec = vec![]; for (start, (source, kind, triple_quoted), end) in values { for value in parse_string(&source, kind, triple_quoted, start, end)? { match value.node { ExprKind::Constant { value: Constant::Str(value), .. } => content.push(value), _ => unreachable!("Unexpected non-string expression."), } } } return Ok(Expr::new( initial_start, last_end, ExprKind::Constant { value: Constant::Str(content.join("")), kind: initial_kind, }, )); } // De-duplicate adjacent constants. let mut deduped: Vec = vec![]; let mut current: Vec = vec![]; let take_current = |current: &mut Vec| -> Expr { Expr::new( initial_start, last_end, ExprKind::Constant { value: Constant::Str(current.drain(..).join("")), kind: initial_kind.clone(), }, ) }; for (start, (source, kind, triple_quoted), end) in values { for value in parse_string(&source, kind, triple_quoted, start, end)? { match value.node { ExprKind::FormattedValue { .. } => { if !current.is_empty() { deduped.push(take_current(&mut current)); } deduped.push(value) } ExprKind::Constant { value: Constant::Str(value), .. } => current.push(value), _ => unreachable!("Unexpected non-string expression."), } } } if !current.is_empty() { deduped.push(take_current(&mut current)); } Ok(Expr::new( initial_start, last_end, ExprKind::JoinedStr { values: deduped }, )) } #[cfg(test)] mod tests { use crate::parser::parse_program; #[test] fn test_parse_string_concat() { let source = "'Hello ' 'world'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_u_string_concat_1() { let source = "'Hello ' u'world'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_u_string_concat_2() { let source = "u'Hello ' 'world'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_f_string_concat_1() { let source = "'Hello ' f'world'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_f_string_concat_2() { let source = "'Hello ' f'world'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_f_string_concat_3() { let source = "'Hello ' f'world{\"!\"}'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_u_f_string_concat_1() { let source = "u'Hello ' f'world'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_u_f_string_concat_2() { let source = "u'Hello ' f'world' '!'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_string_triple_quotes_with_kind() { let source = "u'''Hello, world!'''"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_single_quoted_byte() { // single quote let source = r##"b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff'"##; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_double_quoted_byte() { // double quote let source = r##"b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff""##; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_escape_char_in_byte_literal() { // backslash does not escape let source = r##"b"omkmok\Xaa""##; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_raw_byte_literal_1() { let source = r"rb'\x1z'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_raw_byte_literal_2() { let source = r"rb'\\'"; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_escape_octet() { let source = r##"b'\43a\4\1234'"##; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_fstring_escaped_newline() { let source = r#"f"\n{x}""#; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_fstring_unescaped_newline() { let source = r#"f""" {x}""""#; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_fstring_escaped_character() { let source = r#"f"\\{x}""#; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_raw_fstring() { let source = r#"rf"{x}""#; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_triple_quoted_raw_fstring() { let source = r#"rf"""{x}""""#; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_fstring_line_continuation() { let source = r#"rf"\ {x}""#; let parse_ast = parse_program(source, "").unwrap(); insta::assert_debug_snapshot!(parse_ast); } } rustpython-parser-0.2.0/src/string_parser.rs000064400000000000000000000567071046102023000174120ustar 00000000000000use self::FStringErrorType::*; use crate::{ ast::{Constant, ConversionFlag, Expr, ExprKind, Location}, error::{FStringError, FStringErrorType, LexicalError, LexicalErrorType, ParseError}, parser::parse_expression_located, token::StringKind, }; use std::{iter, str}; /// unicode_name2 does not expose `MAX_NAME_LENGTH`, so we replicate that constant here, fix #3798 pub const MAX_UNICODE_NAME: usize = 88; pub struct StringParser<'a> { chars: iter::Peekable>, kind: StringKind, str_start: Location, str_end: Location, location: Location, } impl<'a> StringParser<'a> { pub fn new( source: &'a str, kind: StringKind, triple_quoted: bool, str_start: Location, str_end: Location, ) -> Self { let offset = kind.prefix_len() + if triple_quoted { 3 } else { 1 }; Self { chars: source.chars().peekable(), kind, str_start, str_end, location: str_start.with_col_offset(offset), } } #[inline] fn next_char(&mut self) -> Option { let Some(c) = self.chars.next() else { return None }; if c == '\n' { self.location.newline(); } else { self.location.go_right(); } Some(c) } #[inline] fn peek(&mut self) -> Option<&char> { self.chars.peek() } #[inline] fn get_pos(&self) -> Location { self.location } #[inline] fn expr(&self, node: ExprKind) -> Expr { Expr::new(self.str_start, self.str_end, node) } fn parse_unicode_literal(&mut self, literal_number: usize) -> Result { let mut p: u32 = 0u32; let unicode_error = LexicalError::new(LexicalErrorType::UnicodeError, self.get_pos()); for i in 1..=literal_number { match self.next_char() { Some(c) => match c.to_digit(16) { Some(d) => p += d << ((literal_number - i) * 4), None => return Err(unicode_error), }, None => return Err(unicode_error), } } match p { 0xD800..=0xDFFF => Ok(std::char::REPLACEMENT_CHARACTER), _ => std::char::from_u32(p).ok_or(unicode_error), } } fn parse_octet(&mut self, first: char) -> char { let mut octet_content = String::new(); octet_content.push(first); while octet_content.len() < 3 { if let Some('0'..='7') = self.peek() { octet_content.push(self.next_char().unwrap()) } else { break; } } let value = u32::from_str_radix(&octet_content, 8).unwrap(); char::from_u32(value).unwrap() } fn parse_unicode_name(&mut self) -> Result { let start_pos = self.get_pos(); match self.next_char() { Some('{') => {} _ => return Err(LexicalError::new(LexicalErrorType::StringError, start_pos)), } let start_pos = self.get_pos(); let mut name = String::new(); loop { match self.next_char() { Some('}') => break, Some(c) => name.push(c), None => { return Err(LexicalError::new( LexicalErrorType::StringError, self.get_pos(), )) } } } if name.len() > MAX_UNICODE_NAME { return Err(LexicalError::new( LexicalErrorType::UnicodeError, self.get_pos(), )); } unicode_names2::character(&name) .ok_or_else(|| LexicalError::new(LexicalErrorType::UnicodeError, start_pos)) } fn parse_escaped_char(&mut self) -> Result { match self.next_char() { Some(c) => { let char = match c { '\\' => '\\', '\'' => '\'', '\"' => '"', 'a' => '\x07', 'b' => '\x08', 'f' => '\x0c', 'n' => '\n', 'r' => '\r', 't' => '\t', 'v' => '\x0b', o @ '0'..='7' => self.parse_octet(o), 'x' => self.parse_unicode_literal(2)?, 'u' if !self.kind.is_bytes() => self.parse_unicode_literal(4)?, 'U' if !self.kind.is_bytes() => self.parse_unicode_literal(8)?, 'N' if !self.kind.is_bytes() => self.parse_unicode_name()?, // Special cases where the escape sequence is not a single character '\n' => return Ok("".to_string()), c => { if self.kind.is_bytes() && !c.is_ascii() { return Err(LexicalError { error: LexicalErrorType::OtherError( "bytes can only contain ASCII literal characters".to_owned(), ), location: self.get_pos(), }); } return Ok(format!("\\{c}")); } }; Ok(char.to_string()) } None => Err(LexicalError { error: LexicalErrorType::StringError, location: self.get_pos(), }), } } fn parse_formatted_value(&mut self, nested: u8) -> Result, LexicalError> { let mut expression = String::new(); let mut spec = None; let mut delims = Vec::new(); let mut conversion = ConversionFlag::None; let mut self_documenting = false; let mut trailing_seq = String::new(); let location = self.get_pos(); while let Some(ch) = self.next_char() { match ch { // can be integrated better with the remaining code, but as a starting point ok // in general I would do here a tokenizing of the fstrings to omit this peeking. '!' | '=' | '>' | '<' if self.peek() == Some(&'=') => { expression.push(ch); expression.push('='); self.next_char(); } '!' if delims.is_empty() && self.peek() != Some(&'=') => { if expression.trim().is_empty() { return Err(FStringError::new(EmptyExpression, self.get_pos()).into()); } conversion = match self.next_char() { Some('s') => ConversionFlag::Str, Some('a') => ConversionFlag::Ascii, Some('r') => ConversionFlag::Repr, Some(_) => { return Err( FStringError::new(InvalidConversionFlag, self.get_pos()).into() ); } None => { return Err(FStringError::new(UnclosedLbrace, self.get_pos()).into()); } }; match self.peek() { Some('}' | ':') => {} Some(_) | None => { return Err(FStringError::new(UnclosedLbrace, self.get_pos()).into()); } } } // match a python 3.8 self documenting expression // format '{' PYTHON_EXPRESSION '=' FORMAT_SPECIFIER? '}' '=' if self.peek() != Some(&'=') && delims.is_empty() => { self_documenting = true; } ':' if delims.is_empty() => { let parsed_spec = self.parse_spec(nested)?; spec = Some(Box::new(self.expr(ExprKind::JoinedStr { values: parsed_spec, }))); } '(' | '{' | '[' => { expression.push(ch); delims.push(ch); } ')' => { let last_delim = delims.pop(); match last_delim { Some('(') => { expression.push(ch); } Some(c) => { return Err(FStringError::new( MismatchedDelimiter(c, ')'), self.get_pos(), ) .into()); } None => { return Err(FStringError::new(Unmatched(')'), self.get_pos()).into()); } } } ']' => { let last_delim = delims.pop(); match last_delim { Some('[') => { expression.push(ch); } Some(c) => { return Err(FStringError::new( MismatchedDelimiter(c, ']'), self.get_pos(), ) .into()); } None => { return Err(FStringError::new(Unmatched(']'), self.get_pos()).into()); } } } '}' if !delims.is_empty() => { let last_delim = delims.pop(); match last_delim { Some('{') => { expression.push(ch); } Some(c) => { return Err(FStringError::new( MismatchedDelimiter(c, '}'), self.get_pos(), ) .into()); } None => {} } } '}' => { if expression.trim().is_empty() { return Err(FStringError::new(EmptyExpression, self.get_pos()).into()); } let ret = if !self_documenting { vec![self.expr(ExprKind::FormattedValue { value: Box::new(parse_fstring_expr(&expression, location).map_err( |e| { FStringError::new( InvalidExpression(Box::new(e.error)), location, ) }, )?), conversion: conversion as _, format_spec: spec, })] } else { vec![ self.expr(ExprKind::Constant { value: Constant::Str(expression.to_owned() + "="), kind: None, }), self.expr(ExprKind::Constant { value: trailing_seq.into(), kind: None, }), self.expr(ExprKind::FormattedValue { value: Box::new( parse_fstring_expr(&expression, location).map_err(|e| { FStringError::new( InvalidExpression(Box::new(e.error)), location, ) })?, ), conversion: (if conversion == ConversionFlag::None && spec.is_none() { ConversionFlag::Repr } else { conversion }) as _, format_spec: spec, }), ] }; return Ok(ret); } '"' | '\'' => { expression.push(ch); loop { let Some(c) = self.next_char() else { return Err(FStringError::new(UnterminatedString, self.get_pos()).into()); }; expression.push(c); if c == ch { break; } } } ' ' if self_documenting => { trailing_seq.push(ch); } '\\' => return Err(FStringError::new(UnterminatedString, self.get_pos()).into()), _ => { if self_documenting { return Err(FStringError::new(UnclosedLbrace, self.get_pos()).into()); } expression.push(ch); } } } Err(FStringError::new(UnclosedLbrace, self.get_pos()).into()) } fn parse_spec(&mut self, nested: u8) -> Result, LexicalError> { let mut spec_constructor = Vec::new(); let mut constant_piece = String::new(); while let Some(&next) = self.peek() { match next { '{' => { if !constant_piece.is_empty() { spec_constructor.push(self.expr(ExprKind::Constant { value: constant_piece.drain(..).collect::().into(), kind: None, })); } let parsed_expr = self.parse_fstring(nested + 1)?; spec_constructor.extend(parsed_expr); continue; } '}' => { break; } _ => { constant_piece.push(next); } } self.next_char(); } if !constant_piece.is_empty() { spec_constructor.push(self.expr(ExprKind::Constant { value: constant_piece.drain(..).collect::().into(), kind: None, })); } Ok(spec_constructor) } fn parse_fstring(&mut self, nested: u8) -> Result, LexicalError> { if nested >= 2 { return Err(FStringError::new(ExpressionNestedTooDeeply, self.get_pos()).into()); } let mut content = String::new(); let mut values = vec![]; while let Some(&ch) = self.peek() { match ch { '{' => { self.next_char(); if nested == 0 { match self.peek() { Some('{') => { self.next_char(); content.push('{'); continue; } None => { return Err(FStringError::new(UnclosedLbrace, self.get_pos()).into()) } _ => {} } } if !content.is_empty() { values.push(self.expr(ExprKind::Constant { value: content.drain(..).collect::().into(), kind: None, })); } let parsed_values = self.parse_formatted_value(nested)?; values.extend(parsed_values); } '}' => { if nested > 0 { break; } self.next_char(); if let Some('}') = self.peek() { self.next_char(); content.push('}'); } else { return Err(FStringError::new(SingleRbrace, self.get_pos()).into()); } } '\\' if !self.kind.is_raw() => { self.next_char(); content.push_str(&self.parse_escaped_char()?); } _ => { content.push(ch); self.next_char(); } } } if !content.is_empty() { values.push(self.expr(ExprKind::Constant { value: content.into(), kind: None, })) } Ok(values) } pub fn parse_bytes(&mut self) -> Result { let mut content = String::new(); while let Some(ch) = self.next_char() { match ch { '\\' if !self.kind.is_raw() => { content.push_str(&self.parse_escaped_char()?); } ch => { if !ch.is_ascii() { return Err(LexicalError::new( LexicalErrorType::OtherError( "bytes can only contain ASCII literal characters".to_string(), ), self.get_pos(), )); } content.push(ch); } } } Ok(self.expr(ExprKind::Constant { value: Constant::Bytes(content.chars().map(|c| c as u8).collect()), kind: None, })) } pub fn parse_string(&mut self) -> Result { let mut content = String::new(); while let Some(ch) = self.next_char() { match ch { '\\' if !self.kind.is_raw() => { content.push_str(&self.parse_escaped_char()?); } ch => content.push(ch), } } Ok(self.expr(ExprKind::Constant { value: Constant::Str(content), kind: self.kind.is_unicode().then(|| "u".to_string()), })) } pub fn parse(&mut self) -> Result, LexicalError> { if self.kind.is_fstring() { self.parse_fstring(0) } else if self.kind.is_bytes() { self.parse_bytes().map(|expr| vec![expr]) } else { self.parse_string().map(|expr| vec![expr]) } } } fn parse_fstring_expr(source: &str, location: Location) -> Result { let fstring_body = format!("({source})"); parse_expression_located(&fstring_body, "", location.with_col_offset(-1)) } pub fn parse_string( source: &str, kind: StringKind, triple_quoted: bool, start: Location, end: Location, ) -> Result, LexicalError> { StringParser::new(source, kind, triple_quoted, start, end).parse() } #[cfg(test)] mod tests { use super::*; fn parse_fstring(source: &str) -> Result, LexicalError> { StringParser::new( source, StringKind::FString, false, Location::default(), Location::default().with_col_offset(source.len() + 3), // 3 for prefix and quotes ) .parse() } #[test] fn test_parse_fstring() { let source = "{a}{ b }{{foo}}"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_fstring_nested_spec() { let source = "{foo:{spec}}"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_fstring_not_nested_spec() { let source = "{foo:spec}"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_empty_fstring() { insta::assert_debug_snapshot!(parse_fstring("").unwrap()); } #[test] fn test_fstring_parse_selfdocumenting_base() { let src = "{user=}"; let parse_ast = parse_fstring(src).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_fstring_parse_selfdocumenting_base_more() { let src = "mix {user=} with text and {second=}"; let parse_ast = parse_fstring(src).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_fstring_parse_selfdocumenting_format() { let src = "{user=:>10}"; let parse_ast = parse_fstring(src).unwrap(); insta::assert_debug_snapshot!(parse_ast); } fn parse_fstring_error(source: &str) -> FStringErrorType { parse_fstring(source) .map_err(|e| match e.error { LexicalErrorType::FStringError(e) => e, e => unreachable!("Expected FStringError: {:?}", e), }) .err() .expect("Expected error") } #[test] fn test_parse_invalid_fstring() { assert_eq!(parse_fstring_error("{5!a"), UnclosedLbrace); assert_eq!(parse_fstring_error("{5!a1}"), UnclosedLbrace); assert_eq!(parse_fstring_error("{5!"), UnclosedLbrace); assert_eq!(parse_fstring_error("abc{!a 'cat'}"), EmptyExpression); assert_eq!(parse_fstring_error("{!a"), EmptyExpression); assert_eq!(parse_fstring_error("{ !a}"), EmptyExpression); assert_eq!(parse_fstring_error("{5!}"), InvalidConversionFlag); assert_eq!(parse_fstring_error("{5!x}"), InvalidConversionFlag); assert_eq!( parse_fstring_error("{a:{a:{b}}}"), ExpressionNestedTooDeeply ); assert_eq!(parse_fstring_error("{a:b}}"), SingleRbrace); assert_eq!(parse_fstring_error("}"), SingleRbrace); assert_eq!(parse_fstring_error("{a:{b}"), UnclosedLbrace); assert_eq!(parse_fstring_error("{"), UnclosedLbrace); assert_eq!(parse_fstring_error("{}"), EmptyExpression); // TODO: check for InvalidExpression enum? assert!(parse_fstring("{class}").is_err()); } #[test] fn test_parse_fstring_not_equals() { let source = "{1 != 2}"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_fstring_equals() { let source = "{42 == 42}"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_fstring_selfdoc_prec_space() { let source = "{x =}"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_fstring_selfdoc_trailing_space() { let source = "{x= }"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } #[test] fn test_parse_fstring_yield_expr() { let source = "{yield}"; let parse_ast = parse_fstring(source).unwrap(); insta::assert_debug_snapshot!(parse_ast); } } rustpython-parser-0.2.0/src/token.rs000064400000000000000000000210221046102023000156260ustar 00000000000000//! Different token definitions. //! Loosely based on token.h from CPython source: use num_bigint::BigInt; use std::fmt; /// Python source code can be tokenized in a sequence of these tokens. #[derive(Clone, Debug, PartialEq)] pub enum Tok { Name { name: String, }, Int { value: BigInt, }, Float { value: f64, }, Complex { real: f64, imag: f64, }, String { value: String, kind: StringKind, triple_quoted: bool, }, Newline, Indent, Dedent, StartModule, StartInteractive, StartExpression, EndOfFile, Lpar, Rpar, Lsqb, Rsqb, Colon, Comma, Comment(String), Semi, Plus, Minus, Star, Slash, Vbar, // '|' Amper, // '&' Less, Greater, Equal, Dot, Percent, Lbrace, Rbrace, EqEqual, NotEqual, LessEqual, GreaterEqual, Tilde, CircumFlex, LeftShift, RightShift, DoubleStar, DoubleStarEqual, // '**=' PlusEqual, MinusEqual, StarEqual, SlashEqual, PercentEqual, AmperEqual, // '&=' VbarEqual, CircumflexEqual, // '^=' LeftShiftEqual, RightShiftEqual, DoubleSlash, // '//' DoubleSlashEqual, ColonEqual, At, AtEqual, Rarrow, Ellipsis, // Keywords (alphabetically): False, None, True, And, As, Assert, Async, Await, Break, Class, Continue, Def, Del, Elif, Else, Except, Finally, For, From, Global, If, Import, In, Is, Lambda, Nonlocal, Not, Or, Pass, Raise, Return, Try, While, With, Yield, } impl fmt::Display for Tok { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use Tok::*; match self { Name { name } => write!(f, "'{name}'"), Int { value } => write!(f, "'{value}'"), Float { value } => write!(f, "'{value}'"), Complex { real, imag } => write!(f, "{real}j{imag}"), String { value, kind, triple_quoted, } => { let quotes = "\"".repeat(if *triple_quoted { 3 } else { 1 }); write!(f, "{kind}{quotes}{value}{quotes}") } Newline => f.write_str("Newline"), Indent => f.write_str("Indent"), Dedent => f.write_str("Dedent"), StartModule => f.write_str("StartProgram"), StartInteractive => f.write_str("StartInteractive"), StartExpression => f.write_str("StartExpression"), EndOfFile => f.write_str("EOF"), Lpar => f.write_str("'('"), Rpar => f.write_str("')'"), Lsqb => f.write_str("'['"), Rsqb => f.write_str("']'"), Colon => f.write_str("':'"), Comma => f.write_str("','"), Comment(value) => f.write_str(value), Semi => f.write_str("';'"), Plus => f.write_str("'+'"), Minus => f.write_str("'-'"), Star => f.write_str("'*'"), Slash => f.write_str("'/'"), Vbar => f.write_str("'|'"), Amper => f.write_str("'&'"), Less => f.write_str("'<'"), Greater => f.write_str("'>'"), Equal => f.write_str("'='"), Dot => f.write_str("'.'"), Percent => f.write_str("'%'"), Lbrace => f.write_str("'{'"), Rbrace => f.write_str("'}'"), EqEqual => f.write_str("'=='"), NotEqual => f.write_str("'!='"), LessEqual => f.write_str("'<='"), GreaterEqual => f.write_str("'>='"), Tilde => f.write_str("'~'"), CircumFlex => f.write_str("'^'"), LeftShift => f.write_str("'<<'"), RightShift => f.write_str("'>>'"), DoubleStar => f.write_str("'**'"), DoubleStarEqual => f.write_str("'**='"), PlusEqual => f.write_str("'+='"), MinusEqual => f.write_str("'-='"), StarEqual => f.write_str("'*='"), SlashEqual => f.write_str("'/='"), PercentEqual => f.write_str("'%='"), AmperEqual => f.write_str("'&='"), VbarEqual => f.write_str("'|='"), CircumflexEqual => f.write_str("'^='"), LeftShiftEqual => f.write_str("'<<='"), RightShiftEqual => f.write_str("'>>='"), DoubleSlash => f.write_str("'//'"), DoubleSlashEqual => f.write_str("'//='"), At => f.write_str("'@'"), AtEqual => f.write_str("'@='"), Rarrow => f.write_str("'->'"), Ellipsis => f.write_str("'...'"), False => f.write_str("'False'"), None => f.write_str("'None'"), True => f.write_str("'True'"), And => f.write_str("'and'"), As => f.write_str("'as'"), Assert => f.write_str("'assert'"), Async => f.write_str("'async'"), Await => f.write_str("'await'"), Break => f.write_str("'break'"), Class => f.write_str("'class'"), Continue => f.write_str("'continue'"), Def => f.write_str("'def'"), Del => f.write_str("'del'"), Elif => f.write_str("'elif'"), Else => f.write_str("'else'"), Except => f.write_str("'except'"), Finally => f.write_str("'finally'"), For => f.write_str("'for'"), From => f.write_str("'from'"), Global => f.write_str("'global'"), If => f.write_str("'if'"), Import => f.write_str("'import'"), In => f.write_str("'in'"), Is => f.write_str("'is'"), Lambda => f.write_str("'lambda'"), Nonlocal => f.write_str("'nonlocal'"), Not => f.write_str("'not'"), Or => f.write_str("'or'"), Pass => f.write_str("'pass'"), Raise => f.write_str("'raise'"), Return => f.write_str("'return'"), Try => f.write_str("'try'"), While => f.write_str("'while'"), With => f.write_str("'with'"), Yield => f.write_str("'yield'"), ColonEqual => f.write_str("':='"), } } } #[derive(PartialEq, Eq, Debug, Clone)] pub enum StringKind { String, FString, Bytes, RawString, RawFString, RawBytes, Unicode, } impl TryFrom for StringKind { type Error = String; fn try_from(ch: char) -> Result { match ch { 'r' | 'R' => Ok(StringKind::RawString), 'f' | 'F' => Ok(StringKind::FString), 'u' | 'U' => Ok(StringKind::Unicode), 'b' | 'B' => Ok(StringKind::Bytes), c => Err(format!("Unexpected string prefix: {c}")), } } } impl TryFrom<[char; 2]> for StringKind { type Error = String; fn try_from(chars: [char; 2]) -> Result { match chars { ['r' | 'R', 'f' | 'F'] => Ok(StringKind::RawFString), ['f' | 'F', 'r' | 'R'] => Ok(StringKind::RawFString), ['r' | 'R', 'b' | 'B'] => Ok(StringKind::RawBytes), ['b' | 'B', 'r' | 'R'] => Ok(StringKind::RawBytes), [c1, c2] => Err(format!("Unexpected string prefix: {c1}{c2}")), } } } impl fmt::Display for StringKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use StringKind::*; match self { String => f.write_str(""), FString => f.write_str("f"), Bytes => f.write_str("b"), RawString => f.write_str("r"), RawFString => f.write_str("rf"), RawBytes => f.write_str("rb"), Unicode => f.write_str("u"), } } } impl StringKind { pub fn is_raw(&self) -> bool { use StringKind::{RawBytes, RawFString, RawString}; matches!(self, RawString | RawFString | RawBytes) } pub fn is_fstring(&self) -> bool { use StringKind::{FString, RawFString}; matches!(self, FString | RawFString) } pub fn is_bytes(&self) -> bool { use StringKind::{Bytes, RawBytes}; matches!(self, Bytes | RawBytes) } pub fn is_unicode(&self) -> bool { matches!(self, StringKind::Unicode) } pub fn prefix_len(&self) -> usize { use StringKind::*; match self { String => 0, RawString | FString | Unicode | Bytes => 1, RawFString | RawBytes => 2, } } }