pax_global_header00006660000000000000000000000064146551041560014521gustar00rootroot0000000000000052 comment=ed583ec2c53eeff05d9234e641735f893388cb8b parser-3.3.4.2/000077500000000000000000000000001465510415600131645ustar00rootroot00000000000000parser-3.3.4.2/.github/000077500000000000000000000000001465510415600145245ustar00rootroot00000000000000parser-3.3.4.2/.github/workflows/000077500000000000000000000000001465510415600165615ustar00rootroot00000000000000parser-3.3.4.2/.github/workflows/test.yml000066400000000000000000000020611465510415600202620ustar00rootroot00000000000000name: Tests on: push: branches: - master pull_request: jobs: test: runs-on: ubuntu-latest env: BUNDLE_JOBS: 4 BUNDLE_RETRY: 3 CI: true TRUFFLERUBYOPT: "--engine.Mode=latency" strategy: fail-fast: false matrix: ruby: ["3.0.7", "3.1.6", "3.2.5", "3.3.4", "jruby-9.2"] test_command: ["bundle exec rake test"] include: - ruby: "head" test_command: "bundle exec rake test || true" - ruby: "truffleruby" test_command: "bundle exec rake test || true" - ruby: "3.2.5" test_command: "./ci/run_rubocop_specs || true" - ruby: "3.3.4" test_command: "./ci/run_rubocop_specs || true" steps: - uses: actions/checkout@v4 - name: Install Ragel run: | sudo apt-get update sudo apt-get install ragel - uses: ruby/setup-ruby@v1 with: ruby-version: ${{ matrix.ruby }} bundler-cache: true - name: Run tests run: | ${{ matrix.test_command }} parser-3.3.4.2/.gitignore000066400000000000000000000012131465510415600151510ustar00rootroot00000000000000*.gem *.rbc .bundle .config .yardoc Gemfile.lock InstalledFiles _yardoc coverage lib/bundler/man/* pkg rdoc yardoc spec/reports test/tmp test/version_tmp tmp *.output .ruby-version .ruby-gemset lib/parser/lexer-F0.rb lib/parser/lexer-F1.rb lib/parser/lexer-strings.rb lib/parser/ruby18.rb lib/parser/ruby19.rb lib/parser/ruby20.rb lib/parser/ruby21.rb lib/parser/ruby22.rb lib/parser/ruby23.rb lib/parser/ruby24.rb lib/parser/ruby25.rb lib/parser/ruby26.rb lib/parser/ruby27.rb lib/parser/ruby28.rb lib/parser/ruby30.rb lib/parser/ruby31.rb lib/parser/ruby32.rb lib/parser/ruby33.rb lib/parser/ruby34.rb lib/parser/macruby.rb lib/parser/rubymotion.rb parser-3.3.4.2/.yardopts000066400000000000000000000010221465510415600150250ustar00rootroot00000000000000./lib/parser/**/*.rb ./lib/parser.rb -m markdown -M kramdown -o ./yardoc -r ./README.md --asset ./doc/css/common.css:css/common.css --verbose --api public --exclude lib/parser/lexer-F0.rb --exclude lib/parser/lexer-F1.rb --exclude lib/parser/ruby18.rb --exclude lib/parser/ruby19.rb --exclude lib/parser/ruby20.rb --exclude lib/parser/ruby21.rb --exclude lib/parser/ruby22.rb --exclude lib/parser/ruby23.rb --exclude lib/parser/ruby24.rb --exclude lib/parser/macruby.rb --exclude lib/parser/rubymotion.rb - ./doc/*.md LICENSE.txt parser-3.3.4.2/CHANGELOG.md000066400000000000000000001503311465510415600150000ustar00rootroot00000000000000Changelog ========= Not released (2024-08-08) ------------------------- Bugs fixed: * Fix errros in the ascii specs of RuboCop (#1037) (Koichi ITO) v3.3.4.1 (2024-08-07) --------------------- API modifications: * Bump 3.2 branch to 3.2.5. (#1036) (Ilya Bylich) * Bump Racc to 1.8.1 (#1031) (Koichi ITO) Bugs fixed: * builder.rb: catch encoding errors when parsing invalid encoding regexp (#1033) (Earlopain) v3.3.4.0 (2024-07-10) --------------------- API modifications: * Bump maintenance branches to 3.3.4 (#1027) (Koichi ITO) v3.3.3.0 (2024-06-12) --------------------- API modifications: * Bump maintenance branches to 3.3.3 (#1023) (Koichi ITO) * Bump Racc to 1.8.0 (#1018) (Koichi ITO) v3.3.2.0 (2024-05-30) --------------------- API modifications: * Bump 3.3 branch to 3.3.2 (Ilya Bylich) * Bump 3.1 branch to 3.1.6 (#1014) (Koichi ITO) v3.3.1.0 (2024-04-26) --------------------- API modifications: * Bump parser branches to 3.0.7, 3.1.5, 3.2.4, 3.3.1 (#1011) (Ilya Bylich) * Use `require_relative` in the Parser codebase (#1003) (Koichi ITO) Features implemented: * ruby{33,34}.y: allow blocks inherit anonymous args. (#1010) (Ilya Bylich) * Raise a more specific error when encountering an unknown magic comment encoding (#999) (Earlopain) v3.3.0.5 (2024-01-21) --------------------- API modifications: * Bump 3.2 branch to 3.2.3 (#993) (Koichi ITO) v3.3.0.4 (2024-01-15) --------------------- Features implemented: * Register a Ruby 3.4 parser (#991) (Jean byroot Boussier) v3.3.0.3 (2024-01-12) --------------------- Bugs fixed: * lexer.rl: accept tabs before closing heredoc delimiter (#990) (Ilya Bylich) v3.3.0.2 (2024-01-07) --------------------- Bugs fixed: * Fix an error when using heredoc with non-word delimiters (#987) (Koichi ITO) v3.3.0.1 (2024-01-06) --------------------- Bugs fixed: * Supports Ruby 2.0+ runtime (#986) (Koichi ITO) v3.3.0.0 (2024-01-05) --------------------- API modifications: * current.rb: mark 3.3 branch as stable (#984) (Ilya Bylich) * ruby33.y: extract string_dend (#969) (Ilya Bylich) * lexer.rl: treat numparams as locals (#968) (Ilya Bylich) * ruby33.y: extract words_sep (#967) (Ilya Bylich) * literal.rb: match heredoc identifier from end of line (#965) (Ilya Bylich) * ruby33.y: extract {endless_command,endless_arg} rules (#964) (Ilya Bylich) * Bump Racc to 1.7.3 (#954) (Koichi ITO) Features implemented: * ruby33.y: reject ambiguous anonymous arguments (#983) (Ilya Bylich) * ruby33.y: extract arg_splat rule. (#981) (Ilya Bylich) * builder.rb: warn `it` in a block with no ordinary params. (#980) (Ilya Bylich) * builder.rb: extract named captures only from static regexes. (#979) (Ilya Bylich) * ruby33.y: accept expr_value in sclass definition. (#978) (Ilya Bylich) * ruby33.y: extract p_in_kwarg (#977) (Ilya Bylich) * ruby33.y: extract p_assoc and p_in rules (#976) (Ilya Bylich) * ruby33.y: reject invalid gvar as symbol (#974) (Ilya Bylich) * ruby33.y: properly restore in_defined flag, extract begin_defined rule (#973) (Ilya Bylich) * builder.rb: reject multi-char gvar names starting with 0 (#972) (Ilya Bylich) * ruby33.y: allow semicolon in parenthesis at the first argument of command call (#971) (Ilya Bylich) * ruby33.y: parse qualified const with brace block as a method call (#970) (Ilya Bylich) v3.2.2.4 (2023-10-04) --------------------- API modifications: * bump Racc to 1.7.1 (#930) (Koichi ITO) Bugs fixed: * Fix an error for flip-flop with beginless or endless ranges (#946) (Koichi ITO) v3.2.2.3 (2023-06-08) --------------------- API modifications: * Add Racc to runtime dependencies (#929) (Koichi ITO) v3.2.2.2 (2023-06-08) --------------------- API modifications: * bump racc to 1.7.0 (#928) (Ilya Bylich) v3.2.2.0 (2023-04-01) --------------------- API modifications: * Bump maintenance branches to 3.2.2, 3.1.4, 3.0.6, and 2.7.8 (#920) (Koichi ITO) v3.2.1.1 (2023-03-08) --------------------- API modifications: * bump racc to 1.6.2 (#916) (Ilya Bylich) * backport tests for mixed usage of anonymous kwarg/kwrestarg and forwarded-arguments (#915) (Ilya Bylich) * add tests for kwopt+kwrest arguments and forwarded_kwrestarg. (#913) (Ilya Bylich) Bugs fixed: * builders/default: allow forwarded_kwrestarg with additional kwargs (#912) (Owen Stephens) v3.2.1.0 (2023-02-09) --------------------- API modifications: * Bump maintenance branches to 3.2.1 (#908) (Koichi ITO) * lexer.rl: extract strings lexing to lexer-strings.rl (#905) (Ilya Bylich) Features implemented: * Ruby 3.3 support (#904) (Ole Friis Østergaard) v3.2.0.0 (2023-01-03) --------------------- API modifications: * lexer.rl: extract `extend_string_escaped` to a separate method (#893) (Ilya Bylich) * bump Racc to 1.6.1 (#890) (Koichi ITO) Features implemented: * lexer.rl: use Ragel -F0 on non-CRuby (#894) (Benoit Daloze) * ruby32.y: Allow optional newlines before closing parenthesis (#892) (Koichi ITO) Bugs fixed: * ruby32.y: fix a parse error when using forwarded argument with restarg (#889) (Koichi ITO) v3.1.3.0 (2022-11-25) --------------------- API modifications: * Bump maintenance branches to 3.1.3, 3.0.5, 2.7.7 (#886) (Koichi ITO) * ruby32.y: reuse nonlocal_var rule. (#879) (Ilya Bylich) * ruby32.y: reuse opt_nl rule. (#878) (Ilya Bylich) Features implemented: * ruby32.y: implement forwarded restarg and kwrestarg. (#874) (Ilya Bylich) v3.1.2.1 (2022-08-08) --------------------- Bugs fixed: * ruby31.y: fix preparing the state for one-line patterns without braces (#864) (Vladimir Dementyev) v3.1.2.0 (2022-04-12) --------------------- API modifications: * Bump maintenance branches to 3.1.2, 3.0.4, 2.7.6, and 2.6.10 (#850) (Koichi ITO) Bugs fixed: * ruby32.y: fix using p_kwnorest in f_no_kwarg (#848) (Vladimir Dementyev) v3.1.1.0 (2022-02-21) --------------------- API modifications: * Bump 3.1 branch to 3.1.1 (Koichi ITO) Features implemented: * ruby32.y: Reuse operation rule for operation2 (Koichi ITO) * ruby32.y: Reuse p_kwnorest rule for f_no_kwarg (Koichi ITO) * ruby-parse: add ability to read fragment from stdin (#844) (Jeremy Stanley) * ruby32.y: reduce p_args with p_rest (#843) (Vladimir Dementyev) * ruby32.y: branch parser (#841) (Ilya Bylich) v3.1.0.0 (2022-01-03) --------------------- API modifications: * parser/current: update for 3.1.0 release. (#840) (Ilya Bylich) Features implemented: * ruby31.y: accept forward arg without parentheses. (#839) (Ilya Bylich) v3.0.3.2 (2021-12-08) --------------------- Bugs fixed: * dedenter.rb: fix an encoding/squiggly heredoc regression (#836) (hmdne) v3.0.3.1 (2021-11-29) --------------------- Features implemented: * ruby31.y: parse anonymous block argument. (#833) (Ilya Bylich) * ruby31.y: parse forward argument without parentheses (#832) (Ilya Bylich) v3.0.3.0 (2021-11-25) --------------------- API modifications: * Bump maintenance branches to 3.0.3, 2.7.5, and 2.6.9 (#829) (Koichi ITO) Features implemented: * lexer.rl: parse meta-control-hex chars in regexes starting from 3.1 (#828) (Ilya Bylich) * builder.rb: warn on duplicate hash key literals. (#827) (Ilya Bylich) * ruby31.y: Allow value omission in Hash literals (#818) (Svyatoslav Kryukov) * ruby31.y: Allow omission of parentheses in one line pattern matching (#816) (Koichi ITO) * ruby31.y: Add pattern matching pin support for instance/class/global variables (#813) (Koichi ITO) Bugs fixed: * ruby31.y: handle local variables as hash labels with omitted values (#820) (Ilya Bylich) * lexer.rl: Fix handling of beginless ranges at start of line (#822) (Matijs van Zuijlen) * dedenter.rb: Fix squiggly heredoc line continuation handling (#819) (Matijs van Zuijlen) * lexer.rl: fix incompatible delimiters on percent literal (#808) (Masataka Pocke Kuwabara) v3.0.2.0 (2021-07-08) --------------------- API modifications: * Bump maintenance branches to 3.0.2, 2.7.4, and 2.6.8 (#805) (Koichi ITO) Features implemented: * lexer.rl: reject `\u` after control/meta escape chars. (#807) (Ilya Bylich) * ruby31.y: allow "command" syntax in endless method definition (#801) (Koichi ITO) v3.0.1.1 (2021-05-02) --------------------- Features implemented: * Add `associate_by_identity` as an alternate to `associate` (#798) (Marc-André Lafortune) * ruby31.y: pin operator with expression (#797) (Ilya Bylich) * ruby31.y: branch parser (#792) (Koichi ITO) Bugs fixed: * fix a false positive for endless method definition (#796) (Koichi ITO) v3.0.1.0 (2021-04-06) --------------------- API modifications: * Bump maintenance branches to 3.0.1, 2.7.3, 2.6.7, and 2.5.9 (#791) (Koichi ITO) Features implemented: * `Source::TreeRewriter#inspect` [#728] (#788) (Marc-André Lafortune) * `Source::Buffer#inspect` [#728] (#787) (Marc-André Lafortune) v3.0.0.0 (2020-12-25) --------------------- API modifications: * current.rb: bump default branch to parser/ruby30. (#782) (Ilya Bylich) * do not emit truncated parts of squiggly heredoc (#774) (Ilya Bylich) * Optimize `SourceBuffer` line and column handling (#755) (Marc-André Lafortune) Features implemented: * ruby30.y: reintroduce `expr in pat` (#777) (Ilya Bylich) * builder: emit implicit hash passed to a method call as kwargs (#769) (Ilya Bylich) * lexer.rl: use more specific warning for ambiguous slash. (#768) (Ilya Bylich) * ruby30.y: allow endless method without arglist. (#765) (Ilya Bylich) * ruby30.y: use `=>` for one-line pattern matching. (#760) (Ilya Bylich) * ruby30.y: reject setters in all endless method defs. (#758) (Ilya Bylich) Bugs fixed: * lexer.rl: don't perform lookahead after tASSOC. (#764) (Ilya Bylich) v2.7.2.0 (2020-10-06) --------------------- API modifications: * Bump 2.7 branch to 2.7.2 (#748) (Koichi ITO) v2.7.1.5 (2020-09-24) --------------------- API modifications: * right assignment: use existing AST node types (#738) (#739) (Marc-André Lafortune) * ruby28.y -> ruby30.y (#729) (Vladimir Dementyev) Features implemented: * Optimize Range#column_range (#741) (Marc-André Lafortune) * ruby30.y: reject endless setter. (#736) (Ilya Bylich) * ruby28.y: reject assignment to numparam. (#725) (Ilya Bylich) * emit "endless method def" as `:def` node. (#716) (Ilya Bylich) Bugs fixed: * rename 2.8 to 3.0 everywhere (#732) (Ilya Bylich) * test_current.rb: 2.8 -> 3.0 (#731) (Vladimir Dementyev) * Parser#parse returns nil instead of false if error is thrown (#722) (Marc-André Lafortune) * unify locations for normal and endless method definition (#718) (Marc-André Lafortune) * ruby27.y: accept const names as hash keys for pattern matching (#717) (Koichi ITO) * Fix locations for alias / undef nodes with internal symbols (#715) (Marc-André Lafortune) v2.7.1.4 (2020-06-19) --------------------- Features implemented: * ruby28.y: add find pattern. (#714) (Ilya Bylich) * lexer.rl: reject `->...` and `->(...)` with the same error. (#713) (Ilya Bylich) * ruby28.y: accept leading args before forward arg. (#712) (Ilya Bylich) * Added `emit_forward_arg` compatibility flag. (#710) (Ilya Bylich) * ruby28.y: include entire lambda expr in lambda rule. (#708) (Ilya Bylich) * ruby28.y: extracted excessed_comma rule. (#706) (Ilya Bylich) * Source::TreeRewriter: Improved merging and representations (#703) (Marc-André Lafortune) Bugs fixed: * ruby*.y: fixed context inside lambda args and module. (#709) (Ilya Bylich) v2.7.1.3 (2020-05-26) --------------------- API modifications: * fixed all warnings. tests are running in verbose mode now. (#685) (Ilya Bylich) Features implemented: * ruby-[parse, rewrite]: add legacy switches (#699) (Marc-André Lafortune) * Added Parser::Source::Range#to_range. (#697) (Ilya Bylich) * ruby28.y: support rescue modifier in endless method definition. (#696) (Ilya Bylich) * ruby28.y: unify kwrest and no-kwrest rules. (#694) (Ilya Bylich) * ruby28.y: add right hand assignment (#682) (Vladimir Dementyev) Bugs fixed: * fix Comment.associate for postfix conditions/loops (#688) (Marc-André Lafortune) v2.7.1.2 (2020-04-30) --------------------- Features implemented: * ruby28.y: endless method definition (#676) (Vladimir Dementyev) * ruby28.y: branch parser (#677) (Vladimir Dementyev) Bugs fixed: * ruby27.y: reject invalid lvar in pattern matching (#680) (Vladimir Dementyev) v2.7.1.1 (2020-04-15) --------------------- Features implemented: * Add Source::Range#eql? and hash (#675) (Marc-André Lafortune) * Source::TreeRewriter: Add #merge, #merge! and #empty? (#674) (Marc-André Lafortune) v2.7.1.0 (2020-04-03) --------------------- API modifications: * Bump ruby versions to 2.4.10, 2.5.8, 2.6.6, 2.7.1. (#665) (Ilya Bylich) Features implemented: * ruby27.y: allow newlines inside braced pattern. (#664) (Ilya Bylich) * ruby27.y: Allow trailing comma in hash pattern (#661) (Koichi ITO) v2.7.0.5 (2020-03-20) --------------------- Features implemented: * ruby27.y: fix array pattern with tail source map (#659) (Vladimir Dementyev) Bugs fixed: * builder.rb: fix constant_pattern source map (#660) (Vladimir Dementyev) v2.7.0.4 (2020-03-02) --------------------- Bugs fixed: * lexer.rl: allow spaces before comments-before-leading-dot. (#654) (Ilya Bylich) v2.7.0.2 (2020-01-08) --------------------- Bugs fixed: * lexer.rl: fix paren_nest for curly braces (#646) (Ilya Bylich) v2.7.0.1 (2019-12-30) --------------------- Bugs fixed: * dedenter.rb: prevent `ArgumentError` when processing binary en… (#642) (Koichi ITO) v2.7.0.0 (2019-12-26) --------------------- API modifications: * README.md: documented compatibility issue with EOF chars after… (#637) (Ilya Bylich) * ruby27.y: refactor logic around 'circular argument reference'(#628) (Ilya Bylich) Features implemented: * ruby27.y: added pattern matching (#574) (Ilya Bylich) * lexer.rl: parse embedded ivars/cvars starting with digit as str (#639) (Ilya Bylich) * lexer.rl: warn on `...` at EOL. (#636) (Ilya Bylich) * ruby27.y: removed opt_block_args_tail: tOROP rule. (#635) (Ilya Bylich) * ruby27.y: reverted method reference operator (added in #634) (Ilya Bylich) * ruby27.y: treat numparams as locals outside numblock. (#633) (Ilya Bylich) Bugs fixed: * dedenter.rb: fixed over-dedenting of squiggly heredocs (#641) (Ilya Bylich) * ruby27.y: added "arguments forwarding" (#625) (Ilya Bylich) * ruby27.y: reject circular argument reference. (#622) (Ilya Bylich) * ruby27.y: changed prefix of numparams to "_" (#620) (Ilya Bylich) v2.6.5.0 (2019-10-03) --------------------- API modifications: * Bump ruby versions to 2.4.9, 2.5.7 and 2.6.5. (#619) (Ilya Bylich) Features implemented: * lexer.rl: changed max numparam to `@9` (#617) (Ilya Bylich) * lexer.rl: support comments before leading dot in 27 mode. (#613) (Ilya Bylich) Bugs fixed: * lexer.rl: emit tMETHREF as tDOT+tCOLON for rubies \< 27. (#614) (Ilya Bylich) v2.6.4.1 (2019-09-12) --------------------- Bugs fixed: * lexer.rl: fix parsing of 'm a + b do end' (#605) (Ilya Bylich) v2.6.4.0 (2019-08-30) --------------------- API modifications: * Added specs for heredocs with mixed encoding. (#581) (Ilya Bylich) Features implemented: * ruby27.y: Revert "pipeline operator" (#601) (Koichi ITO) * ruby27.y: Fix parsing of mutiple assignment with rescue modifier (#600) (Koichi ITO) * ruby27.y: hoisted out f_rest_marg. (#594) (Ilya Bylich) * ruby27.y: added pipeline operator. (#592) (Ilya Bylich) * ruby27.y: reject safe navigator in LHS of mass-assignment. (#586) (Ilya Bylich) * lexer.rl: reject whitespaces in meta and control chars. (#585) (Ilya Bylich) * lexer.rl: Reject numparams as symbol literals. (#582) (Ilya Bylich) * ruby27.y: Added numbered parameters support. (#565) (Ilya Bylich) * lexer.rl: Reject \n and \r in heredoc identifiers starting from 2.7. (#575) (Ilya Bylich) Bugs fixed: * ruby-parse: print empty string when --emit-json and empty input are given. (#590) (Ilya Bylich) * AST_FORMAT: fixed documentation of the string with interpolation. (#589) (Ilya Bylich) * builder.rb, processor.rb: Changed format of the procarg0 node. (#587) (Ilya Bylich) v2.6.3.0 (2019-04-28) --------------------- Features implemented: * ruby27.y: Added beginless ranges support. (#570) (Ilya Bylich) v2.6.2.1 (2019-04-05) --------------------- API modifications: * Bump 2.4 branch to 2.4.6. (#569) (Ilya Bylich) * Lexer should know about current parsing context. (#566) (Ilya Bylich) v2.6.2.0 (2019-03-21) --------------------- API modifications: * Bump ruby versions to 2.5.5 and 2.6.2. (#563) (Ilya Bylich) * Bump Ruby version to 2.6.1. (#554) (Ilya Bylich) Features implemented: * ruby27.y: dsym should be treated as string. (#560) (Ilya Bylich) * ruby27.y: Refactored symbol rules. (#557) (Ilya Bylich) * ruby27.y: Added method reference operator. (#556) (Ilya Bylich) * ruby27.y: branch parser. (#546) (Ilya Bylich) v2.6.0.0 (2019-01-16) --------------------- API modifications: * 2.6.0 was released, unmark is as -dev. (#538) (Ilya Bylich) Bugs fixed: * Fix parsing of "\\\n" escaped sequences in various literals. (#539) (Ilya Bylich) v2.5.3.0 (2018-10-29) --------------------- Bugs fixed: * lexer.rl: Fix parsing of 'm :key => m do; m() do end; end'. (#526) (Ilya Bylich) * lexer.rl: Fix parsing of ambiguous 1re. (#523) (Ilya Bylich) v2.5.1.2 (2018-07-10) --------------------- Bugs fixed: * lexer.rl: Partially revert 5ba072d and properly handle 'm = -> *args do end'. (Ilya Bylich) v2.5.1.1 (2018-07-10) --------------------- Features implemented: * ruby26.y: Endless ranges support. (Ilya Bylich) Bugs fixed: * lexer.rl: Fix parsing of 'm = -> *args do end'. (Ilya Bylich) * AST::Processor: Properly recurse into "kwsplat" nodes (Nelson Elhage) * ruby24, ruby25, ruby26: Fix cmdargs after command_args followed by tLBRACE_ARG. This commit tracks upstream commit ruby/ruby@f168dbd. (Ilya Bylich) * lexer.rl: Fix parsing of `let (:a) { m do; end }`. (Ilya Bylich) v2.5.1.0 (2018-04-12) --------------------- API modifications: * Parser::Current: bump latest 2.2 branch to 2.2.10. (Ilya Bylich) Features implemented: * ruby26.y: Raise a syntax error on 'else' without 'rescue'. This commit tracks upstream commit ruby/ruby@140512d. (Ilya Bylich) Bugs fixed: * lexer.rl, Dedenter: Treat slash in heredocs as a line continuation. (Ilya Bylich) * lexer.rl: Fix parsing of `a ? b + '': nil`. (Ilya Bylich) * lexer.rl: Reject `m a: {} {}` and `m [] {}` since 25. (Ilya Bylich) * builders/default: allow class/module/dynamic constant definition in the method body if there's a sclass between them. (bug #490) (Ilya Bylich) * lexer.rl: Emit :!@ as :!, :~@ as :~. (Ilya Bylich) * parse{23,24,25}.y: use only CMDARG/COND _PUSH/POP for cmdarg/cond management. (bug #481) (Ilya Bylich) * lexer.rl: emit tSTRING_BEG and tSTRING_DBEG one by one to allow parser to properly manipulate cmdarg stack. (bug #480) (Ilya Bylich) v2.5.0.4 (2018-03-13) --------------------- Bugs fixed: * AST::Processor: handle on_index, on_indexasgn, on_lambda. (Ilya Bylich) v2.5.0.3 (2018-03-06) --------------------- Bugs fixed: * Accept `BEGIN` and `END` as correct method name (#463) (Masataka Pocke Kuwabara) * Parser::Source::Buffer: Fixed parsing of encoding comment when the file contains only shebang. (Ilya Bylich) v2.5.0.2 (2018-02-22) --------------------- Bugs fixed: * ruby24.y, ruby25.y: Replicate cmdargs handling from MRI. (#453) (Ilya Bylich) v2.5.0.1 (2018-02-21) --------------------- Features implemented: * builders/default: __ENCODING__: emit as s(:__ENCODING__) via AST opt-in. (whitequark) * ruby25.y: Extract expr_value_do rule. This commit tracks upstream commit ruby/ruby@508533f. (Ilya Bylich) * ruby25.y: Extract begin_block rule. This commit tracks upstream commit ruby/ruby@762d23c. (Ilya Bylich) * ruby25.y: Allow class and method definition in the while condition. (#432) (Ilya Bylich) * ruby25: Allow `-> do rescue; end`. (#431) (Ilya Bylich) Bugs fixed: * parser/current: latest released Ruby series is 2.5.x. (whitequark) * builders/default: x[], x[]=1: emit as s(:index), s(:indexasgn) via AST opt-in. (whitequark) * lexer.rl: "#{-> foo {}}": fix parsing of interpolated lambda with open args. (Ilya Bylich) v2.5.0.0 (2018-02-16) --------------------- API modifications: * Parser::Current: bump to 2.2.9 and 2.3.6. (Stan Hu) * Deprecate Parser::Rewriter (Marc-Andre Lafortune) * Deprecate Parser::Source::Rewriter (Marc-Andre Lafortune) * Change relative order of insert_after_multi and insert_before_multi for non-empty ranges (#399). (Marc-Andre Lafortune) Features implemented: * parse.y: Reject brace after literal arg. This commit tracks upstream commits ruby/ruby@9987109 and ruby/ruby@7d6965f. (Ilya Bylich) * ruby-parse: add an option for emitting AST as JSON. (Alex Rattray) * Add Parser::TreeRewriter (Marc-Andre Lafortune) * Add Parser::Rewriter#wrap to ease compatibility (Marc-Andre Lafortune) * Add Parser::Source::TreeRewriter (Marc-Andre Lafortune) * Add Range#\<=> and include Comparable (Marc-Andre Lafortune) * parse.y: disable tLPAREN_ARG state after local variable. (Ilya Bylich) * SourceBuffer#source_range (Marc-Andre Lafortune) * Range#adjust (Marc-Andre Lafortune) * Range#contains?, contained?, crossing? (Marc-Andre Lafortune) * Add Range#with. (Marc-André Lafortune) * lexer.rl: Relax restriction spaces inside "\u{...}". This commit tracks upstream commit ruby/ruby@7e8b910. (Ilya Bylich) * lexer.rl: Allow newlines in heredoc identifiers. This commit tracks upstream commit ruby/ruby@d25faa4. (Ilya Bylich) * lexer.rl: allow do after cmdarg in paren. This commit tracks upstream commit ruby/ruby@046c943. (Ilya Bylich) Bugs fixed: * Fixed magic encoding comment parsing when encoding comment is on the first line but not in the beginning. (Ilya Bylich) * lexer.rl: Parse '1if' as '1 if', '1rescue' as '1 rescue'. (Ilya Bylich) * lexer.rl: Save state before entering a block comment to enter it after =end. (Ilya Bylich) * parse.y: Prohibit return in class/module body except for singleton class. Replaced @def_level/in_def? in favor of context.indirectly_in_def?. This commit tracks upstream commit ruby/ruby@8c15f40ac. (Ilya Bylich) * lexer.rl: Emit :&& as tSYMBEG + tANDOP, :|| as tSYMBEG + tOROP. (Ilya Bylich) * ruby{24,25}.y: preserve cmdarg stack around do/end block [Bug #13073]. (Mateusz Lenik) * Parser::Lexer::State: Fixed #lexpop to match MRI behavior. (Ilya Bylich) v2.4.0.2 (2017-11-13) --------------------- API modifications: * parser/current: update for 2.3.5 release. (whitequark) v2.4.0.1 (2017-11-13) --------------------- API modifications: * parser/current: update for 2.3.4 release. (whitequark) * parser/current: update for Ruby 2.1.10 and 2.2.7. (Koichi ITO) Features implemented: * Allow rescue/else/ensure inside do/end blocks. [Feature #12906] (Ilya Bylich) * ruby25.y: branch parser. (Ilya Bylich) Bugs fixed: * Source::Comment::Associator: skip -*- magic comments -*-. (Denis Defreyne) * lexer.rl: "- 5": allow whitespace after tUNARY_NUM. (whitequark) * *ruby*.y, Builders::Default: "+5": don't lose location of the "+". (whitequark) * ruby-rewrite: allow passing paths to --load. (whitequark) * builders/default: "def x; else; end": don't crash. (whitequark) v2.4.0.0 (2017-02-07) --------------------- API modifications: * parser/current: update for the 2.4 release. (whitequark) * rubymotion.y: "a&.b": implement safe navigation operator in RubyMotion. (Mark Villacampa) Bugs fixed: * lexer.rl: "a &. b": accept &. in EXPR_ARG also. (whitequark) v2.3.3.1 (2016-12-02) --------------------- API modifications: * parser/current: update 2.2 warning to 2.2.6 (Jordan Moncharmont) v2.3.3.0 (2016-11-28) --------------------- API modifications: * parser/current: update 2.3 branch to 2.3.3. (Philip Arndt) Bugs fixed: * ruby24.y: "a += b += raise :x": fix errors with chained op-assignments. (whitequark) * ruby24.y: "p p{p(p);p p}, tap do end": preserve cmdarg stack. (whitequark) * ruby24.y: "a b{c d}, :e do end": go to EXPR_ENDARG after literal. (whitequark) v2.3.2.0 (2016-11-20) --------------------- API modifications: * parser/current: update 2.3 branch to 2.3.2. (whitequark) * Introduce (procarg0) node for a single required block argument. (Ilya Bylich) Bugs fixed: * {macruby,ruby{19,20,21,22,23,24}}.y: "x::A += m x": treat as constant assignment. (whitequark) * ruby24.y: "x += raise y rescue nil": bind rescue tighter than tOP_ASGN. (whitequark) * ruby24.y: "x = raise y rescue nil": bind rescue tighter than =. (whitequark) * Builders::Default: "begin; else; 1; end": fix a crash. (whitequark) v2.3.1.3 (2016-09-17) --------------------- API modifications: * parser/current: latest 2.3 release is 2.3.2. (Chris Riddoch) Features implemented: * ruby24.y: "f (g rescue nil)": allow stmt after tLPAREN_ARG. (whitequark) Bugs fixed: * ruby{18,19,20,21,22,23,24}.y: parse trailing commas as mlhs for block args "a.b{ |c,| d }" (fixes #312) (John Backus) * Builders::Default: "begin; 1; else; 2; end": do not drop else. (whitequark) * Builders::Default: "a&.b &&= c": fix safe navigation in lhs of op-assignment. (Ilya Bylich) * AST::Processor: handle "csend" as "send". (#301) (Ilya Bylich) * Parser::AST::Processor: do not spuriously modify ASTs. (Ilya Bylich) * lexer.rl: "%w\a b\": lex backslash-delimited words literals. (Masataka Kuwabara) v2.3.1.2 (2016-06-02) --------------------- API modifications: * parser/current: update 2.2 warning to 2.2.5 (#295) (Kohei Suzuki) Bugs fixed: * AST_FORMAT: Fix `kwarg` example (#294) (Magnus Holm) v2.3.1.1 (2016-06-01) --------------------- Bugs fixed: * ruby{^18}.y: "lambda{|;a|a}": treat shadowarg as local inside block. (Ilya Bylich) * Builders::Default: "foo&.bar{}": emit csend in block properly. (whitequark) * runner.rb: re-add --23 (cremno) * runner.rb: make --24 actually use the 2.4 parser (cremno) v2.3.1.0 (2016-04-27) --------------------- Features implemented: * Parser::Current: update for Ruby 2.3.1. (whitequark) * Builders::Default: allow masgn in conditional context on >=Ruby 2.4. (whitequark) * ruby24.y: branch parser. (whitequark) Bugs fixed: * lexer.rl: "def x y; y A::B, ''; end": reject X:: in EXPR_ENDFN (fixes #285). (whitequark) v2.3.0.7 (2016-03-25) --------------------- API modifications: * Source::Diagnostic: handle ranges pointing to newlines (#273). (whitequark) Features implemented: * Parser::Base#tokenize: allow recovery from syntax errors. (whitequark) * lexer.rl: "a=1; a b: 1": allow label after command clashing with local. (whitequark) * lexer.rl: "undef %s(x)": emit %s literals in expr_fname in 2.3 mode. (whitequark) Bugs fixed: * Builders::Default: reject non-UTF-8 compatible literals. (whitequark) v2.3.0.6 (2016-02-14) --------------------- Bugs fixed: * lexer.rl: fix EOF location (closes #273). (whitequark) v2.3.0.5 (2016-02-12) --------------------- Bugs fixed: * lexer.rl: "%Q{\あ}": fix encoding of UTF-8 noop escapes. (whitequark) v2.3.0.3 (2016-02-06) --------------------- API modifications: * lexer.rl: "a?=b": lex via tCHARACTER (closes #255). (whitequark) v2.3.0.2 (2016-01-24) --------------------- Bugs fixed: * Add :csend to Parser::Meta::NODE_TYPES (Markus Schirp) * lexer/dedenter: "\<\ a: {}": state after -> is ENDFN, not END (fixes #203). (whitequark) * ruby{21,22}.y: "p -> { :hello }, a: 1 do end": lexpop cmdarg. (whitequark) v2.2.2.6 (2015-06-30) --------------------- API modifications: * parser/current: link to README from syntax deviation warning. (whitequark) v2.3.0.pre.2 (2015-06-15) ------------------------- Bugs fixed: * {macruby,rubymotion}.rb: add to gemspec. (whitequark) v2.3.0.pre.1 (2015-06-13) ------------------------- API modifications: * ruby20.y: revert 7f7f2a45. (whitequark) Features implemented: * Add RubyMotion support. (whitequark) * Add MacRuby support. (whitequark) Bugs fixed: * lexer.rl: "[/()\\1/, ?#]": fixes #198. (whitequark) v2.2.2.5 (2015-05-25) --------------------- API modifications: * Source::Comment::Associator: rework entirely; fixes #194 (Oleg Zubchenko) Features implemented: * Source::Map: add last_line, last_column delegation (Oleg Zubchenko) * Source::Range: add last_line, last_column methods (Oleg Zubchenko) Bugs fixed: * AST::Processor: add missing on_block_pass (fixes #196) (whitequark) v2.2.2.3 (2015-05-17) --------------------- API modifications: * lexer.rl: "a?? 1 : 0": squelch "invalid character syntax" warning. (whitequark) * parser/current: bump warnings to 2.0.1, 2.1.7, 2.2.3. (whitequark) Bugs fixed: * Source::Map: do not include :node in to_hash. (whitequark) * ruby{20,21,22}.y: "p ->() do a() do end end": save cmdarg. (whitequark) v2.2.2.2 (2015-04-28) --------------------- Bugs fixed: * lexer.rl: "%r.\..", "%r'\''": leave regexp metacharacters escaped (fixes #192). (whitequark) v2.2.2.1 (2015-04-18) --------------------- Bugs fixed: * builders/default: don't falsely diagnose multiline regexps (fixes #190). (whitequark) v2.2.0.4 (2015-04-15) --------------------- Features implemented: * Add Parser::Source::Map#node. (whitequark) * Add Parser::Source::Comment.associate_locations. (kubicle) v2.2.0.1 (2014-12-27) --------------------- Bugs fixed: * lexer.rl, lexer/literal: "{'a'::": don't parse as quoted label. (Peter Zotov) * Update syntax deviation warning to reflect 2.2 release. (Peter Zotov) v2.2.0 (2014-12-25) ------------------- Bugs fixed: * lexer.rl: "{'x':1,'y':{}}": fix lex_state after tLABEL_END. (Peter Zotov) v2.2.0.pre.8 (2014-11-19) ------------------------- API modifications: * parser/current: update 2.1 to 2.1.5 (fixes #174). (Peter Zotov) v2.2.0.pre.7 (2014-11-03) ------------------------- Bugs fixed: * parser/meta: add missing nodes (fixes #171). (Peter Zotov) v2.2.0.pre.6 (2014-10-28) ------------------------- API modifications: * parser/current: latest stable 2.1.x is 2.1.4, update warning. (hirocaster) v2.2.0.pre.5 (2014-10-03) ------------------------- Features implemented: * parser/current: add syntax deviation warning for 2.1.2. (Peter Zotov) * lexer.rl, ruby22.y: "{'x':1}": add tLABEL_END. (Peter Zotov) Bugs fixed: * lexer.rl, ruby{21,22}.y: "def a b:\nreturn": fix #164. (Peter Zotov) * Fix for `ruby-rewrite` not rewriting files if any rewriter was loaded, due to it getting confused about the filename. (Jon Frisby) v2.2.0.pre.4 (2014-08-09) ------------------------- Bugs fixed: * builders/default: "not(x)": fix source map (fixes #158). (Peter Zotov) v2.2.0.pre.3 (2014-07-02) ------------------------- Features implemented: * Raise EncodingError when source includes invalid byte sequence (Yuji Nakayama) Bugs fixed: * ruby{19,20,21}.y: "x def y; z do end end": save cmdarg stack to isolate command argument state from outer scope. (Peter Zotov) * ruby{19,20,21}.y: "tap (proc do end)": allow parenthesed do-block in cmdarg. (Peter Zotov) v2.2.0.pre.2 (2014-06-14) ------------------------- Bugs fixed: * ruby22.rb: include in gemspec. (Peter Zotov) v2.2.0.pre.1 (2014-06-12) ------------------------- Features implemented: * Add Source::Rewriter#transaction for atomic rewrite actions (Yuji Nakayama) * Raise Parser::ClobberingError for clobbering rewrite error (Yuji Nakayama) Bugs fixed: * parser/current: fix the fallback case (refs #146). (Peter Zotov) * ruby22.y: "tap (proc do end)": allow parenthesed do-block in cmdarg. (Peter Zotov) v2.1.9 (2014-04-21) ------------------- API modifications: * Extend ast dependency to >= 1.1 \< 3.0. (Peter Zotov) * parser/current: fallback to latest released Ruby instead of raising (fixes #140). (Peter Zotov) Features implemented: * ruby-rewrite: add a --modify switch for rewriters that modify AST (fixes #143). (Peter Zotov) Bugs fixed: * lexer.rl: don't fail to parse string literals in huge files (fixes #142). (Peter Zotov) v2.1.7 (2014-03-05) ------------------- Bugs fixed: * lexer.rl: make sure all invalid escapes lead to fatal errors (fixes #136). (Peter Zotov) v2.1.6 (2014-03-04) ------------------- Features implemented: * Add the list of all node types within Parser::Meta. (Markus Schirp) v2.1.5 (2014-02-24) ------------------- Bugs fixed: * Parser::Base, ruby18.y: don't try to lookup Encoding on 1.8 (fixes #133). (Peter Zotov) v2.1.4 (2014-01-11) ------------------- Features implemented: * ruby22.y: "x def y; z do end end": save cmdarg stack to isolate command argument state from outer scope. (Peter Zotov) * Add Ruby 2.2 syntax. (Peter Zotov) Bugs fixed: * Builders::Default: "super do end": super and zsuper are like send (fixes #131). (Peter Zotov) v2.1.3 (2014-01-10) ------------------- Bugs fixed: * lexer.rl: "/\//": don't include escaped delimiter in AST in its escaped form (fixes #125). (Peter Zotov) * Builders::Default: "return x y do end": correctly build AST for keywords followed by command (closes #129). (Peter Zotov) * Fix a bug where "ambiguous first argument" diagnostic was not emitted (Yuji Nakayama) * Source::Comment::Associator: don't die while associating with "__ENCODING__". (Peter Zotov) * ruby-parse: don't die when invoked with -L -e "__ENCODING__". (Peter Zotov) * Add missing source map for match-current-line (Yuji Nakayama) v2.1.2 (2014-01-05) ------------------- Bugs fixed: * lexer.rl: in "foo!= x", foo is tIDENTIFIER, not tFID (closes #126). (Peter Zotov) v2.1.1 (2013-12-25) ------------------- API modifications: * ruby21.y: Ruby 2.1 is released already. (Peter Zotov) v2.1.0 (2013-12-25) ------------------- API modifications: * Parser::Diagnostic: expose reason symbolically (closes #115, #116). (Ian MacLeod) * lexer.rl: coerce literals to UTF-8 in ASCII-encoded files if they contain \uXXXX (Peter Zotov) Bugs fixed: * builders/default: represent heredocs with dedicated map (fixes #100). (Peter Zotov) v2.1.0.pre1 (2013-11-12) ------------------------ API modifications: * lexer.rl: correctly handle __END__ with non-whitespace after it (Peter Zotov) * lexer.rl: handle \r in middle of a line as mere whitespace (Peter Zotov) * ruby{18,19,20,21}.y, builders/default: precisely point to tUMINUS_NUM. (Peter Zotov) Features implemented: * lexer.rl, ruby21.y, builders/default: rational/complex literals. (Peter Zotov) v2.0.0 (2013-10-06) ------------------- API modifications: * Source::Rewriter: raise an exception if updates clobber each other. (Peter Zotov) * Source::Range#inspect: use full class name. (Peter Zotov) * lexer.rl: make EOF tokens actually pointing at EOF and zero-length. (Peter Zotov) * Source::Range#column_range: raise RangeError if range spans >1 line. (Peter Zotov) * Source::Comment::Associator: fix argument order. (Peter Zotov) Features implemented: * Source::Comment: implement #inspect. (Peter Zotov) * Backport Array#bsearch from Ruby 2.0. (Peter Zotov) v2.0.0.pre8 (2013-09-15) ------------------------ API modifications: * lexer.rl: make lexing faster and improve parsing speed by ~60%. (Peter Zotov) v2.0.0.pre7 (2013-09-10) ------------------------ Features implemented: * Parser::Base: add #parse_with_comments, #parse_file_with_comments. (Trent Ogren) * lexer.rl (Ruby 2.1): "1end": lex non-exponent `e' separate from number. (Peter Zotov) Bugs fixed: * lexer.rl: "->*{}": tLAMBEG at expr_beg (fixes #103). (Peter Zotov) * Source::Rewriter: apply actions in the insertion order. (Josh Cheek) v2.0.0.pre5 (2013-07-31) ------------------------ Bugs fixed: * Remove a forgotten require. (Peter Zotov) v2.0.0.pre4 (2013-07-31) ------------------------ API modifications: * source/comment: make #loc/#location return Source::Map for consistency (fixes #96). (Peter Zotov) Features implemented: * source/comment/associator: skip shebang and encoding line by default (fixes #95). (Peter Zotov) Bugs fixed: * ruby{19,20,21}.y, lexer.rl, builders/default: correct begin for ?a (fixes #92). (Peter Zotov) * ruby{18,19,20,21}.y, builders/default: don't add spurious begin/end for string parts (refs #92). (Peter Zotov) * Activate `diagnostics.all_errors_are_fatal` on non-MRI Rubies as a workaround (closes #93). (Peter Zotov) v2.0.0.pre3 (2013-07-26) ------------------------ API modifications: * lexer.rl: add simple explicit output encoding for strings. (Peter Zotov) Features implemented: * Source::Buffer: support for -(dos|unix|mac) and utf8-mac encodings. (Peter Zotov) * Source::Range#resize. (Peter Zotov) * Significantly improve speed for large (>100k) and very large (>1M) files. (Peter Zotov) Bugs fixed: * ruby21.y: fix typos. (Peter Zotov) * builders/default: respect regexp encoding. (Peter Zotov) * lexer.rl: literal EOF (\0, \x04, \x1a) inside literals and comments. (Peter Zotov) * lexer.rl: "meth (lambda do end)" (1.8), "f x: -> do meth do end end": expr_cmdarg. (Peter Zotov) * lexer.rl: "\<\(scope){}; scope :foo": lambda identifier leakage. (Peter Zotov) * lexer.rl: "eh ?\r\n": don't eat tEH if followed by CRLF. (Peter Zotov) * lexer.rl: "f \<\<-TABLE\ndo |a,b|\nTABLE\nend": leave FSM after lexing heredoc. (Peter Zotov) * lexer.rl: "foo %\n bar": don't % at expr_arg as tSTRING_BEG. (Peter Zotov) * lexer.rl, lexer/literal: use lexer encoding for literal buffer. (Peter Zotov) * lexer.rl: "\u{9}": one-digit braced unicode escapes. (Peter Zotov) * Source::Buffer: don't chew \r from source lines. (Peter Zotov) * builders/default: don't die in eh_keyword_map if else branch is empty. (Peter Zotov) * lexer.rl: "0777_333": octal literals with internal underscores. (Peter Zotov) * lexer.rl: "let [] {}": goto tLBRACE_ARG after any closing braces. (Peter Zotov) * lexer.rl: "while not (1) do end": emit proper kDO* when in cond/cmdarg state. (Peter Zotov) * lexer.rl: "rescue=>": correctly handle rescue+assoc at expr_beg. (Peter Zotov) * lexer.rl: "puts 123do end": only trailing `_' and `e' in number are errors. (Peter Zotov) * lexer.rl: "begin; rescue rescue1; end": accept barewords at expr_mid. (Peter Zotov) * lexer.rl: "f.x!if 1": correct modifier handling in expr_arg. (Peter Zotov) * lexer.rl: "=begin\n#=end\n=end": only recognize =end at bol. (Peter Zotov) * builders/default: don't check for duplicate arguments in 1.8 mode. (Peter Zotov) * Don't attempt to parse magic encoding comment in 1.8 mode. (Peter Zotov) * lexer.rl: "\777": octal literals overflow. (Peter Zotov) * lexer.rl: "foo;\n__END__", "\na:b": whitespace in expr_value. (Peter Zotov) * lexer.rl: "\xE2\x80\x99": concatenation of byte escape sequences. (Peter Zotov) * lexer.rl: "E10", "E4U": don't conflate floats and identifiers. (Peter Zotov) * lexer.rl: "foo.bar= {1=>2}": return fid, = as separate tokens in expr_dot. (Peter Zotov) * lexer.rl: "def defined?": properly return defined? in expr_fname. (Peter Zotov) * lexer.rl: "Rainbows! do end", "foo.S?": allow bareword fid in expr_beg/dot. (Peter Zotov) v2.0.0.pre2 (2013-07-11) ------------------------ Features implemented: * Allow to differentiate between __FILE__/__LINE__ and literals (closes #89). (Peter Zotov) * Add attribute `diagnostic' to Parser::SyntaxError (closes #88). (Peter Zotov) Bugs fixed: * Don't treat byte order mark as an identifier (closes #91). (Peter Zotov) v2.0.0.beta10 (2013-07-02) -------------------------- Bugs fixed: * ruby-parse, ruby-rewrite: fix require of removed compatibility shim. (Peter Zotov) * lexer.rl: "def !@; end" unary bang. (Peter Zotov) v2.0.0.beta9 (2013-06-28) ------------------------- API modifications: * ruby{18,19,20,21}.y: removed obsolete warnings and linting. (Peter Zotov) Features implemented: * builders/default: add keyword source range for BEGIN/END (fixes #85). (Peter Zotov) Bugs fixed: * lexer.rl: "t=1;(a)?t:T" context sensitivity in expr_value (fixes #87). (Peter Zotov) * lexer.rl: keywords as labels, e.g. "unless:" (fixes #83, #84). (Peter Zotov) * lexer.rl: rectify usage of c_space/c_space_nl (fixes #81). (Peter Zotov) * ruby{18,19,20,21}.y: fix warnings for class/module in method body. (Peter Zotov) * lexer.rl: fix warning for ?\s. (Peter Zotov) * lexer.rl: expr_fname emits expr_beg-like keywords (fixes #82). (Peter Zotov) * lexer.rl: get rid of harmful nondeterminism in w_space (fixes #80). (Peter Zotov) * lexer/explanation: 1.8, 1.9 compatibility (fixes #76). (Peter Zotov) v2.0.0.beta8 (2013-06-24) ------------------------- Bugs fixed: * ast/processor: add missing node handlers (Yuji Nakayama) * ast/processor: rename some obsolete node handlers (Yuji Nakayama) v2.0.0.beta7 (2013-06-22) ------------------------- API modifications: * Implement a much more sane encoding model (closes #60). (Peter Zotov) Features implemented: * builders/default: (while-post) and (until-post); (kwbegin) (fixes #70). (Peter Zotov) Bugs fixed: * builders/default: don't swallow (begin) in "if (foo); end" (fixes #75). (Peter Zotov) v2.0.0.beta6 (2013-06-17) ------------------------- API modifications: * Get rid of "synthesized (nil)". If it's not in source, it's not in AST (fixes #71). (Peter Zotov) * lexer.rl, ruby{18,19,20,21}.y: source maps for interpolation (fixes #27). (Peter Zotov) Features implemented: * ruby{18,19,20,21}.y, builders/default: lvar-injecting match (closes #69). (Peter Zotov) * builders/default: implicit matches (refs #69). (Peter Zotov) * builders/default: flip-flops (refs #69). (Peter Zotov) Bugs fixed: * lexer.rl: fix an off-by-1 error in heredoc parsing. (Peter Zotov) * lexer.rl: don't fail on "alias $a $b\n# comment\nalias $c $d". (Peter Zotov) * builders/default: fix treatment of masgn in &&/|| (refs #69). (Peter Zotov) * ruby-parse: make -L command line option work again. (Peter Zotov) * ruby{18,19,20,21}.y: begin source map for "if foo\nthen bar end" (fixes #68). (Peter Zotov) * Source::Comment::Associator: gracefully terminate when out of comments (fixes #67). (Peter Zotov) v2.0.0.beta5 (2013-06-08) ------------------------- Bugs fixed: * Source::Buffer: better magic encoding comment recognition (fixes #65). (Peter Zotov) * lexer.rl: "{success?: true}" (fixes #66). (Peter Zotov) * Source::Buffer: if in doubt, treat data as UTF-8 (closes #60). (Peter Zotov) v2.0.0.beta4 (2013-06-05) ------------------------- Bugs fixed: * lexer.rl: fix heredoc parsing with CRLF line endings (closes #61). (Peter Zotov) * lexer.rl: fix premature ending of heredoc "\<\=1.9), _.* args (>1.9) (fixes #5). (Peter Zotov) * builders/default: detect duplicate argument names (refs #5). (Peter Zotov) * lexer.rl: "def foo bar: 1; end" (for ruby 2.1) (fixes #15). (Peter Zotov) * ruby21.y: required keyword arguments. (Peter Zotov) Bugs fixed: * ruby20.y, ruby21.y: "foo::A += 1" and friends (scoped constant op-asgn). (Peter Zotov) v1.0.1 (2013-04-18) ------------------- Bugs fixed: * builders/default: %Q{#{1}} and friends (fixes #14). (Peter Zotov) v1.0.0 (2013-04-17) ------------------- Features implemented: * ruby20.y: "meth 1 do end.fun(bar) {}" and friends. (Peter Zotov) * ruby20.y: keyword arguments. (Peter Zotov) * ruby20.y: { **kwsplat }. (Peter Zotov) v0.9.2 (2013-04-16) ------------------- Features implemented: * lexer.rl: "-> (a) {}". (Peter Zotov) * builders/default: treat &&/|| lhs/rhs as conditional context. (Peter Zotov) * ruby20.y: "class Foo \< a:b; end". (Peter Zotov) * lexer.rl: "class \<\< a:b". (Peter Zotov) * ruby19.y, ruby20.y: "f { || a:b }". (Peter Zotov) * ruby19.y, ruby20.y: "def foo() a:b end", "def foo\n a:b end". (Peter Zotov) * lexer.rl: %i/%I. (Peter Zotov) * lexer.rl: warn at "foo **bar". (Peter Zotov) * lexer.rl: ** at expr_beg is tDSTAR. (Peter Zotov) * ruby20.y: "f {|;\nvar\n|}". (Peter Zotov) * ruby20.y: "p () {}". (Peter Zotov) * ruby20.y: "p begin 1.times do 1 end end". (Peter Zotov) * ruby20.y: better error message for BEGIN{} in a method body. (Peter Zotov) Bugs fixed: * lexer.rl, ruby18.y, ruby19.y, ruby20.y: "%W[#{a}#@b foo #{c}]". (Peter Zotov) * lexer.rl: parse "foo=1; foo / bar #/" as method call on 1.8, division on 1.9. (Peter Zotov) * ruby18.y, ruby19.y: BEGIN{} does not introduce a scope. (Peter Zotov) * lexer.rl: improve whitespace handling. (Peter Zotov) v0.9.0 (2013-04-15) ------------------- API modifications: * runtime compatibility with 1.8.7. (Peter Zotov) Features implemented: * builders/default: check for multiple assignment in conditions (fixes #4). (Peter Zotov) * builders/default: check if actual block and blockarg are passed (fixes #6). (Peter Zotov) * ruby19.y: "foo::A += m foo". (Peter Zotov) * ruby18.y, ruby19.y: "rescue without else is useless" warning. (Peter Zotov) * ruby19.y: 99.16% coverage, 100% sans error recovery. (Peter Zotov) * ruby19.y: mlhs arguments "def foo((a, *, p)) end". (Peter Zotov) * ruby19.y: "fun (1) {}" and friends. (Peter Zotov) * ruby19.y: mlhs post variables "a, *b, c = ...". (Peter Zotov) * builders/default: @@a |= 1; def f; @@a |= 1; end. (Peter Zotov) * ruby18.y: fun (&foo). (Peter Zotov) * ruby18.y: block formal arguments. 99.33% coverage. (Peter Zotov) * ruby18.y: fun(meth 1 do end); fun(1, meth 1 do end). (Peter Zotov) * ruby18.y: "meth 1 do end.fun(bar)" and friends. (Peter Zotov) * ruby18.y: foo () {}; a.foo () {}; a::foo () {}. (Peter Zotov) * ruby18.y: various call argument combinations. (Peter Zotov) * ruby18.y: foo (1, 2); foo (). (Peter Zotov) * ruby18.y: foo (1).to_i. (Peter Zotov) * ruby18.y: fun{}; fun(){}; fun(1){}; fun do end. (Peter Zotov) * ruby18.y: foo.fun bar. (Peter Zotov) * lexer.rl, ruby18.y: add support for cond/cmdarg stack states. (Peter Zotov) * ruby18.y: rescue. (Peter Zotov) * ruby18.y: begin end while|until (tests only). (Peter Zotov) * ruby18.y: case. (Peter Zotov) * ruby18.y: foo[m bar]. (Peter Zotov) * ruby18.y: for..in. (Peter Zotov) Bugs fixed: * lexer.rl: handle : at expr_beg as a symbol, at expr_end as tCOLON. (Peter Zotov) * lexer.rl: handle "rescue #foo\nbar". (Peter Zotov) * lexer.rl: handle "foo.#bar\nbaz". (Peter Zotov) * lexer.rl: fix location info for symbols. (Peter Zotov) * lexer.rl: handle \\ at expr_beg. (Peter Zotov) * lexer.rl: emit tCONSTANT/tIDENTIFIER/tFID in expr_dot. (Peter Zotov) * lexer.rl: correctly disambiguate "x ::Foo" as tIDENT, tCOLON3, ... (Peter Zotov) * lexer.rl: correctly disambiguate ident!= as tIDENTIFIER, tNEQ. (Peter Zotov) * lexer.rl: correctly report the %r%% tREGEXP_BEG value as %r%. (Peter Zotov) * ruby19.y: emit correct error on "nil = 1" and friends. (Peter Zotov) * ruby19.y: 1.9 permits empty symbol literals. (Peter Zotov) * ruby18.y: foo(&bar). (Peter Zotov) * lexer.rl: don't lookahead two tokens on "func %{str} do". (Peter Zotov) * lexer.rl: fix lexing of non-interp heredoc with trailing backslash. (Peter Zotov) * lexer.rl: fix erroneous number and =begin lookahead in expr_beg. (Peter Zotov) * lexer.rl: fix stack corruption. (Peter Zotov) * lexer.rl: /= at expr_beg. (Peter Zotov) * lexer.rl: class\<\= 1.1', '< 3.0' gem 'racc', '1.8.1' parser-3.3.4.2/LICENSE.txt000066400000000000000000000022451465510415600150120ustar00rootroot00000000000000Copyright (c) 2013-2016 whitequark Parts of the source are derived from ruby_parser: Copyright (c) Ryan Davis, seattle.rb MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. parser-3.3.4.2/README.md000066400000000000000000000300441465510415600144440ustar00rootroot00000000000000# Parser [![Gem Version](https://badge.fury.io/rb/parser.svg)](https://badge.fury.io/rb/parser) [![Tests](https://github.com/whitequark/parser/workflows/Tests/badge.svg?branch=master)](https://github.com/whitequark/parser/actions?query=workflow%3ATests+branch%3Amaster) _Parser_ is a production-ready Ruby parser written in pure Ruby. It recognizes as much or more code than Ripper, Melbourne, JRubyParser or ruby\_parser, and is vastly more convenient to use. You can also use [unparser](https://github.com/mbj/unparser) to produce equivalent source code from Parser's ASTs. Sponsored by [Evil Martians](http://evilmartians.com). MacRuby and RubyMotion support sponsored by [CodeClimate](http://codeclimate.com). ## Installation $ gem install parser ## Usage Load Parser (see the [backwards compatibility](#backwards-compatibility) section below for explanation of `emit_*` calls): ```ruby require 'parser/current' # opt-in to most recent AST format: Parser::Builders::Default.emit_lambda = true Parser::Builders::Default.emit_procarg0 = true Parser::Builders::Default.emit_encoding = true Parser::Builders::Default.emit_index = true Parser::Builders::Default.emit_arg_inside_procarg0 = true Parser::Builders::Default.emit_forward_arg = true Parser::Builders::Default.emit_kwargs = true Parser::Builders::Default.emit_match_pattern = true ``` Parse a chunk of code: ```ruby p Parser::CurrentRuby.parse("2 + 2") # (send # (int 2) :+ # (int 2)) ``` Access the AST's source map: ```ruby p Parser::CurrentRuby.parse("2 + 2").loc # #, # @expression=#> p Parser::CurrentRuby.parse("2 + 2").loc.selector.source # "+" ``` Traverse the AST: see the documentation for [gem ast](https://whitequark.github.io/ast/). Parse a chunk of code and display all diagnostics: ```ruby parser = Parser::CurrentRuby.new parser.diagnostics.consumer = lambda do |diag| puts diag.render end buffer = Parser::Source::Buffer.new('(string)', source: "foo *bar") p parser.parse(buffer) # (string):1:5: warning: `*' interpreted as argument prefix # foo *bar # ^ # (send nil :foo # (splat # (send nil :bar))) ``` If you reuse the same parser object for multiple `#parse` runs, you need to `#reset` it. You can also use the `ruby-parse` utility (it's bundled with the gem) to play with Parser: $ ruby-parse -L -e "2+2" (send (int 2) :+ (int 2)) 2+2 ~ selector ~~~ expression (int 2) 2+2 ~ expression (int 2) 2+2 $ ruby-parse -E -e "2+2" 2+2 ^ tINTEGER 2 expr_end [0 <= cond] [0 <= cmdarg] 2+2 ^ tPLUS "+" expr_beg [0 <= cond] [0 <= cmdarg] 2+2 ^ tINTEGER 2 expr_end [0 <= cond] [0 <= cmdarg] 2+2 ^ false "$eof" expr_end [0 <= cond] [0 <= cmdarg] (send (int 2) :+ (int 2)) ## Features * Precise source location reporting. * [Documented](doc/AST_FORMAT.md) AST format which is convenient to work with. * A simple interface and a powerful, tweakable one. * Parses 1.8, 1.9, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7, 3.0, 3.1, and 3.2 syntax with backwards-compatible AST formats. * Parses MacRuby and RubyMotion syntax extensions. * [Rewriting][rewriting] support. * Parsing error recovery. * Improved [clang-like][] diagnostic messages with location information. * Written in pure Ruby, runs on MRI >=2.0.0, JRuby and Rubinius (and historically, all versions of Ruby since 1.8) * Only one runtime dependency: the [ast][] gem. * [Insane][insane-lexer] Ruby lexer rewritten from scratch in Ragel. * 100% test coverage for Bison grammars (except error recovery). * Readable, commented source code. [clang-like]: http://clang.llvm.org/diagnostics.html [ast]: https://rubygems.org/gems/ast [insane-lexer]: http://web.archive.org/web/20210621201915/http://whitequark.org/blog/2013/04/01/ruby-hacking-guide-ch-11-finite-state-lexer/ [rewriting]: http://web.archive.org/web/20220123050223/http://whitequark.org/blog/2013/04/26/lets-play-with-ruby-code/ ## Documentation Documentation for Parser is available [online](https://whitequark.github.io/parser/). ### Node names Several Parser nodes seem to be confusing enough to warrant a dedicated README section. #### (block) The `(block)` node passes a Ruby block, that is, a closure, to a method call represented by its first child, a `(send)`, `(super)` or `(zsuper)` node. To demonstrate: ```bash $ ruby-parse -e 'foo { |x| x + 2 }' (block (send nil :foo) (args (arg :x)) (send (lvar :x) :+ (int 2))) ``` #### (begin) and (kwbegin) **TL;DR: Unless you perform rewriting, treat `(begin)` and `(kwbegin)` as the same node type.** Both `(begin)` and `(kwbegin)` nodes represent compound statements, that is, several expressions which are executed sequentally and the value of the last one is the value of entire compound statement. They may take several forms in the source code: * `foo; bar`: without delimiters * `(foo; bar)`: parenthesized * `begin foo; bar; end`: grouped with `begin` keyword * `def x; foo; bar; end`: grouped inside a method definition and so on. ```bash $ ruby-parse -e '(foo; bar)' (begin (send nil :foo) (send nil :bar)) $ ruby-parse -e 'def x; foo; bar end' (def :x (args) (begin (send nil :foo) (send nil :bar))) ``` Note that, despite its name, `kwbegin` node only has tangential relation to the `begin` keyword. Normally, Parser AST is semantic, that is, if two constructs look differently but behave identically, they get parsed to the same node. However, there exists a peculiar construct called post-loop in Ruby: ``` begin body end while condition ``` This specific syntactic construct, that is, keyword `begin..end` block followed by a postfix `while`, [behaves][postloop] very unlike other similar constructs, e.g. `(body) while condition`. While the body itself is wrapped into a `while-post` node, Parser also supports rewriting, and in that context it is important to not accidentally convert one kind of loop into another. [postloop]: http://rosettacode.org/wiki/Loops/Do-while#Ruby ``` $ ruby-parse -e 'begin foo end while cond' (while-post (send nil :cond) (kwbegin (send nil :foo))) $ ruby-parse -e 'foo while cond' (while (send nil :cond) (send nil :foo)) $ ruby-parse -e '(foo) while cond' (while (send nil :cond) (begin (send nil :foo))) ``` (Parser also needs the `(kwbegin)` node type internally, and it is highly problematic to map it back to `(begin)`.) ## Backwards compatibility Parser does _not_ use semantic versioning. Parser versions are structured as `x.y.z.t`, where `x.y.z` indicates the most recent supported Ruby release (support for every Ruby release that is chronologically earlier is implied), and `t` is a monotonically increasing number. The public API of Parser as well as the AST format (as listed in the documentation) are considered stable forever, although support for old Ruby versions may be removed at some point. Sometimes it is necessary to modify the format of AST nodes that are already being emitted in a way that would break existing applications. To avoid such breakage, applications must opt-in to these modifications; without explicit opt-in, Parser will continue to emit the old AST node format. The most recent set of opt-ins is specified in the [usage section](#usage) of this README. ## Compatibility with Ruby MRI Unfortunately, Ruby MRI often changes syntax in patchlevel versions. This has happened, at least, for every release since 1.9; for example, commits [c5013452](https://github.com/ruby/ruby/commit/c501345218dc5fb0fae90d56a0c6fd19d38df5bb) and [04bb9d6b](https://github.com/ruby/ruby/commit/04bb9d6b75a55d4000700769eead5a5cb942c25b) were backported all the way from HEAD to 1.9. Moreover, there is no simple way to track these changes. This policy makes it all but impossible to make Parser precisely compatible with the Ruby MRI parser. Indeed, at September 2014, it would be necessary to maintain and update ten different parsers together with their lexer quirks in order to be able to emulate any given released Ruby MRI version. As a result, Parser chooses a different path: the `parser/rubyXY` parsers recognize the syntax of the latest minor version of Ruby MRI X.Y at the time of the gem release. ## Compatibility with MacRuby and RubyMotion Parser implements the MacRuby 0.12 and RubyMotion mid-2015 parsers precisely. However, the lexers of these have been forked off Ruby MRI and independently maintained for some time, and because of that, Parser may accept some code that these upstream implementations are unable to parse. ## Known issues Adding support for the following Ruby MRI features in Parser would needlessly complicate it, and as they all are very specific and rarely occurring corner cases, this is not done. Parser has been extensively tested; in particular, it parses almost entire [Rubygems][rg] corpus. For every issue, a breakdown of affected gems is offered. [rg]: https://rubygems.org ### Void value expressions Ruby MRI prohibits so-called "void value expressions". For a description of what a void value expression is, see [this gist](https://gist.github.com/JoshCheek/5625007) and [this Parser issue](https://github.com/whitequark/parser/issues/72). It is unknown whether any gems are affected by this issue. ### Syntax check of block exits Similar to "void value expression" checks Ruby MRI also checks for correct usage of `break`, `next` and `redo`, if it's used outside of a {break,next,redo}-able context Ruby returns a syntax error starting from 3.3.0. `parser` gem simply doesn't run this type of checks. It is unknown whether any gems are affected by this issue. ### Invalid characters inside comments and literals Ruby MRI permits arbitrary non-7-bit byte sequences to appear in comments, as well as in string or symbol literals in form of escape sequences, regardless of source encoding. Parser requires all source code, including the expanded escape sequences, to consist of valid byte sequences in the source encoding that are convertible to UTF-8. As of 2013-07-25, there are about 180 affected gems. ### \u escape in 1.8 mode Ruby MRI 1.8 permits to specify a bare `\u` escape sequence in a string; it treats it like `u`. Ruby MRI 1.9 and later treat `\u` as a prefix for Unicode escape sequence and do not allow it to appear bare. Parser follows 1.9+ behavior. As of 2013-07-25, affected gems are: activerdf, activerdf_net7, fastreader, gkellog-reddy. ### Dollar-dash (This one is so obscure I couldn't even think of a saner name for this issue.) Pre-2.1 Ruby allows to specify a global variable named `$-`. Ruby 2.1 and later treat it as a syntax error. Parser follows 2.1 behavior. No known code is affected by this issue. ### EOF characters after embedded documents before 2.7 Code like `"=begin\n""=end\0"` is invalid for all versions of Ruby before 2.7. Ruby 2.7 and later parses it normally. Parser follows 2.7 behavior. It is unknown whether any gems are affected by this issue. ## Contributors * [whitequark][] * Markus Schirp ([mbj][]) * Yorick Peterse ([yorickpeterse][]) * Magnus Holm ([judofyr][]) * Bozhidar Batsov ([bbatsov][]) [whitequark]: https://github.com/whitequark [mbj]: https://github.com/mbj [yorickpeterse]: https://github.com/yorickpeterse [judofyr]: https://github.com/judofyr [bbatsov]: https://github.com/bbatsov ## Acknowledgements The lexer testsuite is derived from [ruby\_parser](https://github.com/seattlerb/ruby_parser). The Bison parser rules are derived from [Ruby MRI](https://github.com/ruby/ruby) parse.y. ## Contributing 1. Make sure you have [Ragel ~> 6.7](http://www.colm.net/open-source/ragel/) installed 2. Fork it 3. Create your feature branch (`git checkout -b my-new-feature`) 4. Commit your changes (`git commit -am 'Add some feature'`) 5. Push to the branch (`git push origin my-new-feature`) 6. Create new Pull Request parser-3.3.4.2/Rakefile000066400000000000000000000115671465510415600146430ustar00rootroot00000000000000# encoding: utf-8 # frozen_string_literal: true require 'bundler/gem_tasks' require 'rake/testtask' require 'rake/clean' require 'date' task :default => [:test] Rake::TestTask.new do |t| t.libs = %w(test/ lib/) t.test_files = FileList["test/**/test_*.rb"] t.warning = true end task :test_cov do ENV['COVERAGE'] = '1' Rake::Task['test'].invoke end task :build => [:generate_release, :changelog] GENERATED_FILES = %w(lib/parser/lexer-F0.rb lib/parser/lexer-F1.rb lib/parser/lexer-strings.rb lib/parser/ruby18.rb lib/parser/ruby19.rb lib/parser/ruby20.rb lib/parser/ruby21.rb lib/parser/ruby22.rb lib/parser/ruby23.rb lib/parser/ruby24.rb lib/parser/ruby25.rb lib/parser/ruby26.rb lib/parser/ruby27.rb lib/parser/ruby30.rb lib/parser/ruby31.rb lib/parser/ruby32.rb lib/parser/ruby33.rb lib/parser/ruby34.rb lib/parser/macruby.rb lib/parser/rubymotion.rb) CLEAN.include(GENERATED_FILES) ENCODING_COMMENT = "# -*- encoding:utf-8; warn-indent:false; frozen_string_literal: true -*-\n" desc 'Generate the Ragel lexer and Racc parser.' task :generate => GENERATED_FILES do Rake::Task[:ragel_check].invoke GENERATED_FILES.each do |filename| content = File.read(filename) content = ENCODING_COMMENT + content unless content.start_with?(ENCODING_COMMENT) File.open(filename, 'w') do |io| io.write content end end end task :regenerate => [:clean, :generate] desc 'Generate the Ragel lexer and Racc parser in release mode.' task :generate_release => [:clean_env, :regenerate] task :clean_env do ENV.delete 'RACC_DEBUG' end task :ragel_check do require 'cliver' Cliver.assert('ragel', '~> 6.7') end desc 'Generate YARD documentation' task :yard => :generate do sh('yard doc') end PAGES_REPO = 'git@github.com:whitequark/parser' desc "Build and deploy documentation to GitHub pages" task :pages do system "git clone #{PAGES_REPO} gh-temp/ -b gh-pages; rm gh-temp/* -rf; touch gh-temp/.nojekyll" or abort system "yardoc -o gh-temp/;" or abort system "cd gh-temp/; git add -A; git commit -m 'Updated pages.'; git push -f origin gh-pages" or abort FileUtils.rm_rf 'gh-temp' end desc 'Generate Changelog' task :changelog do fs = "\u{fffd}" format = "%d#{fs}%s#{fs}%an#{fs}%ai" # Format: version => { commit-class => changes } changelog = Hash.new do |hash, version| hash[version] = Hash.new do |hash, klass| hash[klass] = [] end end branch = `git describe HEAD --all`.strip.gsub(/.+\/([^\/]+)$/, '\1') IO.popen("git log --pretty='#{format}' " \ "remotes/origin/2.0 remotes/origin/2.1 remotes/origin/2.2 #{branch}", 'r') do |io| current_version = nil io.each_line do |line| version, message, author, date = line. match(/^(?: \((.*)\))?#{fs}(.*)#{fs}(.*)#{fs}(.*)$/o).captures date = Date.parse(date) current_version = "#{$1} (#{date})" if version =~ /(v[\d\w.]+)/ current_version = "Not released (#{date})" \ if version =~ /(^| |\/)#{Regexp.escape branch}$/ && !branch.start_with?('v') next if current_version.nil? changelog[current_version] # add a hash next if message !~ /^[+*-]/ changelog[current_version][message[0]] << "#{message[1..-1]} (#{author})" end end commit_classes = { '*' => 'API modifications:', '+' => 'Features implemented:', '-' => 'Bugs fixed:', } File.open('CHANGELOG.md', 'w') do |io| io.puts 'Changelog' io.puts '=========' io.puts changelog.each do |version, commits| next if commits.empty? io.puts version io.puts '-' * version.length io.puts commit_classes.each do |sigil, description| next unless commits[sigil].any? io.puts description commits[sigil].uniq.each do |commit| io.puts " * #{commit.gsub('<', '\<').lstrip}" end io.puts end end end sh('git commit CHANGELOG.md -m "Update changelog." || true') end file 'lib/parser/lexer-F1.rb' => 'lib/parser/lexer.rl' do |t| sh "ragel -F1 -R #{t.source} -o #{t.name}" end file 'lib/parser/lexer-F0.rb' => 'lib/parser/lexer.rl' do |t| sh "ragel -F0 -R #{t.source} -o #{t.name}" end file 'lib/parser/lexer-strings.rb' => 'lib/parser/lexer-strings.rl' do |t| sh "ragel -F0 -R #{t.source} -o #{t.name}" end rule '.rb' => '.y' do |t| opts = [ "--superclass=Parser::Base", t.source, "-o", t.name ] opts << "--no-line-convert" unless ENV['RACC_DEBUG'] opts << "--debug" if ENV['RACC_DEBUG'] sh "racc", *opts end task :test => [:generate] parser-3.3.4.2/bin/000077500000000000000000000000001465510415600137345ustar00rootroot00000000000000parser-3.3.4.2/bin/ruby-parse000077500000000000000000000002671465510415600157600ustar00rootroot00000000000000#! /usr/bin/env ruby # frozen_string_literal: true $LOAD_PATH.unshift(File.expand_path('../../lib', __FILE__)) require 'parser/runner/ruby_parse' Parser::Runner::RubyParse.go(ARGV) parser-3.3.4.2/bin/ruby-rewrite000077500000000000000000000002731465510415600163240ustar00rootroot00000000000000#! /usr/bin/env ruby # frozen_string_literal: true $LOAD_PATH.unshift(File.expand_path('../../lib', __FILE__)) require 'parser/runner/ruby_rewrite' Parser::Runner::RubyRewrite.go(ARGV) parser-3.3.4.2/ci/000077500000000000000000000000001465510415600135575ustar00rootroot00000000000000parser-3.3.4.2/ci/run_rubocop_specs000077500000000000000000000003701465510415600172370ustar00rootroot00000000000000#!/usr/bin/env bash set -eux bundle exec rake generate git clone https://github.com/rubocop-hq/rubocop.git --depth=1 cd rubocop export BUNDLE_GEMFILE=Gemfile echo "gem 'parser', path: '../'" > Gemfile.local bundle install bundle exec rake spec parser-3.3.4.2/doc/000077500000000000000000000000001465510415600137315ustar00rootroot00000000000000parser-3.3.4.2/doc/AST_FORMAT.md000066400000000000000000001052501465510415600157550ustar00rootroot00000000000000AST and Source Location ======================= ## Literals ### Singletons Format: ~~~ (true) "true" ~~~~ expression (false) "false" ~~~~~ expression (nil) "nil" ~~~ expression ~~~ ### Integer Format: ~~~ (int 123) "123" ~~~ expression (int -123) "-123" ^ operator ~~~ expression (int 1) "__LINE__" ~~~~~~~~ expression ~~~ ### Float Format: ~~~ (float 1.0) "1.0" ~~~ expression (float -1.0) "-1.0" ^ operator ~~~~ expression ~~~ ### Complex Format: ~~~ (complex (0+1i)) "1i" ~~ expression (complex (0+(1/1)*i)) "1ri" ~~~ expression ~~~ ### Rational Format: ~~~ (rational (2/1)) "2.0r" ~~~~ expression ~~~ ### String #### Plain Format: ~~~ (str "foo") "'foo'" ^ begin ^ end ~~~~~ expresion (string "foo.rb") "__FILE__" ~~~~~~~~ expression ~~~ #### With interpolation Format: ~~~ (dstr (str "foo") (begin (lvar bar)) (str "baz")) '"foo#{bar}baz"' ^ begin ^ end ~~~~~~~~~~~~~~ expression ^^ begin (begin) ^ end (begin) ^^^^^^ expression (begin) ~~~ #### Here document Format: ~~~ (str "foo\nbar\n") '< 2" ~~ operator ~~~~~~ expression ~~~ ##### With label (1.9) Format: ~~~ (pair (sym :answer) (int 42)) "answer: 42" ^ operator (pair) ~~~~~~ expression (sym) ~~~~~~~~~~ expression (pair) ~~~ #### With local variable Format: ~~~ (pair (sym :foo) (lvar :foo)) "{foo:}" ^ operator (pair) ~~~ expression (sym) ~~~ expression (lvar) ~~~ #### With constant Format: ~~~ (pair (sym :foo) (const nil :foo)) "{FOO:}" ^ operator (pair) ~~~ expression (const) ~~~ expression (lvar) ~~~ #### With method call Format: ~~~ (pair (sym :puts) (send nil :puts)) "{puts:}" ^ operator (pair) ~~~~ expression (sym) ~~~~ expression (send) ~~~ #### Plain Format: ~~~ (hash (pair (int 1) (int 2)) (pair (int 3) (int 4))) "{1 => 2, 3 => 4}" ^ begin ^ end ~~~~~~~~~~~~~~~~ expression ~~~ ### Kwargs Starting from Ruby 2.7 only implicit hash literals (that are not wrapped into `{ .. }`) are passed as keyword arguments. Explicit hash literals are passed as positional arguments. This is reflected in AST as `kwargs` node that is emitted only for implicit hash literals and only if `emit_kwargs` compatibility flag is enabled. Note that it can be a part of `send`, `csend`, `index` and `yield` nodes. Format: ~~~ (kwargs (pair (int 1) (int 2)) (kwsplat (lvar :bar)) (pair (sym :baz) (int 3))) "foo(1 => 2, **bar, baz: 3)" ~~~~~~~~~~~~~~~~~~~~~ expression ~~~ #### Keyword splat (2.0) Can also be used in argument lists: `foo(bar, **baz)` Format: ~~~ (kwsplat (lvar :foo)) "**foo" ~~ operator ~~~~~ expression ~~~ #### With interpolation (2.0) Format: ~~~ (hash (pair (sym :foo) (int 2)) (kwsplat (lvar :bar))) "{ foo: 2, **bar }" ^ begin ^ end ~~~~~~~~~~~~~~~~~ expression ~~~ ### Range #### Inclusive Format: ~~~ (irange (int 1) (int 2)) "1..2" ~~ operator ~~~~ expression ~~~ #### Exclusive Format: ~~~ (erange (int 1) (int 2)) "1...2" ~~~ operator ~~~~~ expression ~~~ ### Endless (2.6) Format: ~~~ (irange (int 1) nil) "1.." ~~ operator ~~~ expression (erange (int 1) nil) "1..." ~~~ operator ~~~~ expression ~~~ ### Beginless (2.7) Format: ~~~ (irange nil (int 1)) "..1" ~~ operator ~~~ expression (erange nil (int 1)) "...1" ~~~ operator ~~~~ expression ~~~ ## Access ### Self Format: ~~~ (self) "self" ~~~~ expression ~~~ ### Local variable Format: ~~~ (lvar :foo) "foo" ~~~ expression ~~~ ### Instance variable Format: ~~~ (ivar :@foo) "@foo" ~~~~ expression ~~~ ### Class variable Format: ~~~ (cvar :@@foo) "@@foo" ~~~~~ expression ~~~ ### Global variable #### Regular global variable Format: ~~~ (gvar :$foo) "$foo" ~~~~ expression ~~~ #### Regular expression capture groups Format: ~~~ (nth-ref 1) "$1" ~~ expression ~~~ #### Regular expression back-references Format: ~~~ (back-ref :$&) "$&" ~~ expression (back-ref :$`) "$`" (back-ref :$') "$'" (back-ref :$+) "$+" ~~~ ### Constant #### Top-level constant Format: ~~~ (const (cbase) :Foo) "::Foo" ~~~ name ~~ double_colon ~~~~~ expression ~~~ #### Scoped constant Format: ~~~ (const (lvar :a) :Foo) "a::Foo" ~~~ name ~~ double_colon ~~~~~~ expression ~~~ #### Unscoped constant Format: ~~~ (const nil :Foo) "Foo" ~~~ name ~~~ expression ~~~ ### defined? Format: ~~~ (defined? (lvar :a)) "defined? a" ~~~~~~~~ keyword ~~~~~~~~~~ expression "defined?(a)" ~~~~~~~~ keyword ^ begin ^ end ~~~~~~~~~~~ expression ~~~ ## Assignment ### To local variable Format: ~~~ (lvasgn :foo (lvar :bar)) "foo = bar" ^ operator ~~~~~~~~~ expression ~~~ ### To instance variable Format: ~~~ (ivasgn :@foo (lvar :bar)) "@foo = bar" ^ operator ~~~~~~~~~~ expression ~~~ ### To class variable Format: ~~~ (cvasgn :@@foo (lvar :bar)) "@@foo = bar" ^ operator ~~~~~~~~~~~ expression ~~~ ### To global variable Format: ~~~ (gvasgn :$foo (lvar :bar)) "$foo = bar" ^ operator ~~~~~~~~~~ expression ~~~ ### To constant #### Top-level constant Format: ~~~ (casgn (cbase) :Foo (int 1)) "::Foo = 1" ~~~ name ~ operator ~~~~~~~ expression ~~~ #### Scoped constant Format: ~~~ (casgn (lvar :a) :Foo (int 1)) "a::Foo = 1" ~~~ name ~ operator ~~~~~~~~ expression ~~~ #### Unscoped constant Format: ~~~ (casgn nil :Foo (int 1)) "Foo = 1" ~~~ name ~ operator ~~~~~~~ expression ~~~ ### To attribute Format: ~~~ (send (self) :foo= (int 1)) "self.foo = 1" ^ dot ~~~ selector ^ operator ~~~~~~~~~~~~ expression ~~~ ### To attribute, using "safe navigation operator" Format: ~~~ (csend (self) :foo= (int 1)) "self&.foo = 1" ^^ dot ~~~ selector ^ operator ~~~~~~~~~~~~~ expression ~~~ ### Multiple assignment #### Multiple left hand side Format: ~~~ (mlhs (lvasgn :a) (lvasgn :b)) "a, b" ~~~~ expression "(a, b)" ^ begin ^ end ~~~~~~ expression ~~~ #### Assignment Rule of thumb: every node inside `(mlhs)` is "incomplete"; to make it "complete", one could imagine that a corresponding node from the mrhs is "appended" to the node in question. This applies both to side-effect free assignments (`lvasgn`, etc) and side-effectful assignments (`send`). Format: ~~~ (masgn (mlhs (lvasgn :foo) (lvasgn :bar)) (array (int 1) (int 2))) "foo, bar = 1, 2" ^ operator ~~~~~~~~~~~~~~~ expression (masgn (mlhs (ivasgn :@a) (cvasgn :@@b)) (array (splat (lvar :c)))) "@a, @@b = *c" (masgn (mlhs (lvasgn :a) (mlhs (lvasgn :b)) (lvasgn :c)) (lvar :d)) "a, (b, c) = d" (masgn (mlhs (send (self) :a=) (send (self) :[]= (int 1))) (lvar :a)) "self.a, self[1] = a" ~~~ ### Binary operator-assignment Binary operator-assignment features the same "incomplete assignments" and "incomplete calls" as [multiple assignment](#assignment-1). #### Variable binary operator-assignment Format: ~~~ (op-asgn (lvasgn :a) :+ (int 1)) "a += 1" ~~ operator ~~~~~~ expression (op-asgn (ivasgn :a) :+ (int 1)) "@a += 1" ~~~ #### Method binary operator-assignment Format: ~~~ (op-asgn (send (ivar :@a) :b) :+ (int 1)) "@a.b += 1" ~ selector (send) ~~~~ expression (send) ~~ operator (op-asgn) ~~~~~~~~~ expression (op-asgn) (op-asgn (send (ivar :@a) :[] (int 0) (int 1))) :+ (int 1)) "@a[0, 1] += 1" ~~~~~~ selector (send) ~~~~~~~~ expression (send) ~~ operator (op-asgn) ~~~~~~~~~~~~~ expression (op-asgn) ~~~ ### Logical operator-assignment Logical operator-assignment features the same "incomplete assignments" and "incomplete calls" as [multiple assignment](#assignment-1). #### Variable logical operator-assignment Format: ~~~ (or-asgn (ivasgn :@a) (int 1)) "@a ||= 1" ~~~ operator ~~~~~~~~ expression (and-asgn (lvasgn :a) (int 1)) "a &&= 1" ~~~ operator ~~~~~~~ expression ~~~ #### Method logical operator-assignment Format: ~~~ (or-asgn (send (ivar :@foo) :bar) (int 1)) "@foo.bar ||= 1" ~~~ selector (send) ~~~~~~~~ expr (send) ~~~ operator (or-asgn) ~~~~~~~~~~~~~~ expression (or-asgn) (and-asgn (send (lvar :@foo) :bar) (int 1)) "foo.bar &&= 1" ~~~ selector (send) ~~~~~~~ expr (send) ~~~ operator (and-asgn) ~~~~~~~~~~~~~ expression (and-asgn) (or-asgn (send (ivar :@foo) :[] (int 1) (int 2)) (int 1)) "@foo[1, 2] ||= 1" ~~~~~~ selector (send) ~~~~~~~~~~ expr (send) ~~~ operator (or-asgn) ~~~~~~~~~~~~~~~~ expression (or-asgn) ~~~ ### Right-hand assignment Format: ~~~ (lvasgn :a (int 1)) "1 => a" ~~~~~~ expression ~ name ~~ operator ~~~ #### Multiple right-hand assignment Format: ~~~ (masgn (mlhs (lvasgn :a) (lvasgn :b)) (send (int 13) :divmod (int 5))) "13.divmod(5) => a,b" ~~~~~~~~~~~~~~~~~~~ expression ^^ operator ~~~ ## Class and module definition ### Module Format: ~~~ (module (const nil :Foo) (nil)) "module Foo; end" ~~~~~~ keyword ~~~ end ~~~ ### Class Format: ~~~ (class (const nil :Foo) (const nil :Bar) (nil)) "class Foo < Bar; end" ~~~~~ keyword ~~~ end ~ operator ~~~~~~~~~~~~~~~~~~~~ expression (class (const nil :Foo) nil (nil)) "class Foo; end" ~~~~~ keyword ~~~ end ~~~~~~~~~~~~~~ expression ~~~ ### Singleton class Format: ~~~ (sclass (lvar :a) (nil)) "class << a; end" ~~~~~ keyword ~~ operator ~~~ end ~~~~~~~~~~~~~~~ expression ~~~ ## Method (un)definition ### Instance methods Format: ~~~ (def :foo (args) nil) "def foo; end" ~~~ keyword ~~~ name ~~~ end ~~~~~~~~~~~~ expression ~~~ ### Singleton methods Format: ~~~ (defs (self) :foo (args) nil) "def self.foo; end" ~~~ keyword ~~~ name ~~~ end ~~~~~~~~~~~~~~~~~ expression ~~~ ### "Endless" method Format: ~~~ (def :foo (args) (int 42)) "def foo() = 42" ~~~ keyword ~~~ name ^ assignment ~~~~~~~~~~~~~~ expression ~~~ ### "Endless" singleton method Format: ~~~ (defs (self) :foo (args) (int 42)) "def self.foo() = 42" ~~~ keyword ~~~ name ^ assignment ~~~~~~~~~~~~~~~~~~~ expression ~~~ ### Undefinition Format: ~~~ (undef (sym :foo) (sym :bar) (dsym (str "foo") (int 1))) "undef foo :bar :"foo#{1}"" ~~~~~ keyword ~~~~~~~~~~~~~~~~~~~~~~~~~ expression ~~~ ## Aliasing ### Method aliasing Format: ~~~ (alias (sym :foo) (dsym (str "foo") (int 1))) "alias foo :"foo#{1}"" ~~~~~ keyword ~~~~~~~~~~~~~~~~~~~~ expression ~~~ ### Global variable aliasing Format: ~~~ (alias (gvar :$foo) (gvar :$bar)) "alias $foo $bar" ~~~~~ keyword ~~~~~~~~~~~~~~~ expression (alias (gvar :$foo) (back-ref :$&)) "alias $foo $&" ~~~~~ keyword ~~~~~~~~~~~~~~~ expression ~~~ ## Formal arguments Format: ~~~ (args (arg :foo)) "(foo)" ~~~~~ expression ~~~ ### Required argument Format: ~~~ (arg :foo) "foo" ~~~ expression ~~~ name ~~~ ### Optional argument Format: ~~~ (optarg :foo (int 1)) "foo = 1" ~~~~~~~ expression ^ operator ~~~ name ~~~ ### Named splat argument Format: ~~~ (restarg :foo) "*foo" ~~~~ expression ~~~ name ~~~ Begin of the `expression` points to `*`. ### Unnamed splat argument Format: ~~~ (restarg) "*" ^ expression ~~~ ### Block argument Format: ~~~ (blockarg :foo) "&foo" ~~~ name ~~~~ expression ~~~ Begin of the `expression` points to `&`. ### Anonymous block argument Format: ~~~ (blockarg nil) "&" ~ expression ~~~ ### Auto-expanding proc argument (1.9) In Ruby 1.9 and later, when a proc-like closure (i.e. a closure created by capturing a block or with the `proc` method, but not with the `->{}` syntax or the `lambda` method) has exactly one argument, and it is called with more than one argument, the behavior is as if the array of all arguments was instead passed as the sole argument. This behavior can be prevented by adding a comma after the sole argument (e.g. `|foo,|`). Format: ~~~ (procarg0 (arg :foo)) "|foo|" ~~~ expression (procarg0 (arg :foo) (arg :bar)) "|(foo, bar)|" ~ begin ~ end ~~~~~~~~~~ expression ~~~ ### Expression arguments Ruby 1.8 allows to use arbitrary expressions as block arguments, such as `@var` or `foo.bar`. Such expressions should be treated as if they were on the lhs of a multiple assignment. Format: ~~~ (args (arg_expr (ivasgn :@bar))) "|@bar|" (args (arg_expr (send (send nil :foo) :a=))) "|foo.a|" (args (restarg_expr (ivasgn :@bar))) "|*@bar|" (args (blockarg_expr (ivasgn :@bar))) "|&@bar|" ~~~ ### Block shadow arguments Format: ~~~ (args (shadowarg :foo) (shadowarg :bar)) "|; foo, bar|" ~~~ ### Decomposition Format: ~~~ (def :f (args (arg :a) (mlhs (arg :foo) (restarg :bar)))) "def f(a, (foo, *bar)); end" ^ begin ^ end ~~~~~~~~~~~ expression ~~~ ### Required keyword argument Format: ~~~ (kwarg :foo) "foo:" ~~~~ expression ~~~~ name ~~~ ### Optional keyword argument Format: ~~~ (kwoptarg :foo (int 1)) "foo: 1" ~~~~~~ expression ~~~~ name ~~~ ### Named keyword splat argument Format: ~~~ (kwrestarg :foo) "**foo" ~~~~~ expression ~~~ name ~~~ ### Unnamed keyword splat argument Format: ~~~ (kwrestarg) "**" ~~ expression ~~~ ### Keyword nil argument Format: ~~~ (kwnilarg) "**nil" ~~~ name ~~~~~ expression ~~~ ### Objective-C arguments MacRuby includes a few more syntactic "arguments" whose name becomes the part of the Objective-C method name, despite looking like Ruby 2.0 keyword arguments, and are thus treated differently. #### Objective-C label-like keyword argument Format: ~~~ (objc-kwarg :a :b) "a: b" ~ keyword ~ operator ~ argument ~~~~ expression ~~~ #### Objective-C pair-like keyword argument Format: ~~~ (objc-kwarg :a :b) "a => b" ~ keyword ~~ operator ~ argument ~~~~~~ expression ~~~ #### Objective-C keyword splat argument Format: ~~~ (objc-restarg (objc-kwarg :foo)) "(*a: b)" ~ objc-kwarg.keyword ~ objc-kwarg.operator ~ objc-kwarg.argument ~ operator ~~~~~ expression ~~~ Note that these splat arguments will only be parsed inside parentheses, e.g. in the following code: ~~~ def f((*a: b)); end ~~~ However, the following code results in a parse error: ~~~ def f(*a: b); end ~~~ ## Numbered parameters ### Block with numbered parameters Ruby 2.7 introduced a feature called "numbered parameters". Numbered and ordinal parameters are mutually exclusive, so if the block has only numbered parameters it also has a different AST node. Note that the second child represents a total number of numbered parameters. Format: ~~~ s(:numblock, s(:send, nil, :proc), 3, s(:send, s(:lvar, :_1), :+, s(:lvar, :_3))) "proc { _1 + _3 }" ~ begin ~ end ~~~~~~~~~~~~~~~~ expression ~~~ ## Forward arguments ### Method definition accepting only forwarding arguments Ruby 2.7 introduced a feature called "arguments forwarding". When a method takes any arguments for forwarding them in the future the whole `args` node gets replaced with `forward-args` node. Format if `emit_forward_arg` compatibility flag is disabled: ~~~ (def :foo (forward-args) nil) "def foo(...); end" ~ end ~ begin ~~~~~ expression ~~~ However, Ruby 3.0 added support for leading arguments before `...`, and so it can't be used as a replacement of the `(args)` node anymore. To solve it `emit_forward_arg` should be enabled. Format if `emit_forward_arg` compatibility flag is enabled: ~~~ (def :foo (args (forward-arg)) nil) "def foo(...); end" ~ begin (args) ~ end (args) ~~~~~ expression (args) ~~~ expression (forward_arg) ~~~ Note that the node is called `forward_arg` when emitted separately. ### Method call taking arguments of the currently forwarding method Format: ~~~ (send nil :foo (forwarded-args)) "foo(...)" ~~~ expression ~~~ ### Method call taking positional arguments of the currently called method Format: ~~~ (send nil :foo (forwarded-restarg)) "foo(*)" ~ expression ~~~ ### Method call taking keyword arguments of the currently called method Format: ~~~ (send nil :foo (forwarded-kwrestarg)) "foo(**)" ~~ expression ~~~ ## Send ### To self Format: ~~~ (send nil :foo (lvar :bar)) "foo(bar)" ~~~ selector ^ begin ^ end ~~~~~~~~ expression ~~~ ### To receiver Format: ~~~ (send (lvar :foo) :bar (int 1)) "foo.bar(1)" ^ dot ~~~ selector ^ begin ^ end ~~~~~~~~~~ expression (send (lvar :foo) :+ (int 1)) "foo + 1" ^ selector ~~~~~~~ expression (send (lvar :foo) :-@) "-foo" ^ selector ~~~~ expression (send (lvar :foo) :a= (int 1)) "foo.a = 1" ~ selector ^ operator ~~~~~~~~~ expression ~~~ ### To superclass Format of super with arguments: ~~~ (super (lvar :a)) "super a" ~~~~~ keyword ~~~~~~~ expression (super) "super()" ^ begin ^ end ~~~~~ keyword ~~~~~~~ expression ~~~ Format of super without arguments (**z**ero-arity): ~~~ (zsuper) "super" ~~~~~ keyword ~~~~~ expression ~~~ ### To block argument Format: ~~~ (yield (lvar :foo)) "yield(foo)" ~~~~~ keyword ^ begin ^ end ~~~~~~~~~~ expression ~~~ ### Indexing Format: ~~~ (index (lvar :foo) (int 1)) "foo[1]" ^ begin ^ end ~~~~~~ expression (indexasgn (lvar :bar) (int 1) (int 2) (lvar :baz)) "bar[1, 2] = baz" ^ begin ^ end ^ operator ~~~~~~~~~~~~~~~ expression ~~~ ### Passing a literal block ~~~ (block (send nil :foo) (args (arg :bar)) (begin ...)) "foo do |bar|; end" ~~ begin ~~~ end ~~~~~~~~~~~~~ expression ~~~ ### Passing expression as block Used when passing expression as block `foo(&bar)` ~~~ (send nil :foo (int 1) (block-pass (lvar :foo))) "foo(1, &foo)" ^ operator ~~~~ expression ~~~ ### Passing expression as anonymous block `foo(&)` ~~~ (send nil :foo (int 1) (block-pass nil)) "foo(1, &)" ^ operator ~ expression ~~~ ### "Stabby lambda" ~~~ (block (lambda) (args) nil) "-> {}" ~~ lambda.expression ~~~ ### "Safe navigation operator" ~~~ (csend (send nil :foo) :bar) "foo&.bar" ~~ dot ~~~ ### Objective-C variadic send MacRuby allows to pass a variadic amount of arguments via the last keyword "argument". Semantically, these, together with the pair value of the last pair in the hash implicitly passed as the last argument, form an array, which replaces the pair value. Despite that, the node is called `objc-varargs` to distinguish it from a literal array passed as a value. ~~~ (send nil :foo (int 1) (hash (pair (sym :bar) (objc-varargs (int 1) (int 2) (nil))))) "foo(1, bar: 2, 3, nil)" ~~~~~~~~~ expression (array) ~~~ ## Control flow ### Logical operators #### Binary (and or && ||) Format: ~~~ (and (lvar :foo) (lvar :bar)) "foo and bar" ~~~ operator ~~~~~~~~~~~ expression ~~~ ~~~ (or (lvar :foo) (lvar :bar)) "foo or bar" ~~ operator ~~~~~~~~~~ expression ~~~ #### Unary (! not) (1.8) Format: ~~~ (not (lvar :foo)) "!foo" ^ operator "not foo" ~~~ operator ~~~ ### Branching #### Without else Format: ~~~ (if (lvar :cond) (lvar :iftrue) nil) "if cond then iftrue; end" ~~ keyword ~~~~ begin ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~ expression "if cond; iftrue; end" ~~ keyword ~~~ end ~~~~~~~~~~~~~~~~~~~~ expression "iftrue if cond" ~~ keyword ~~~~~~~~~~~~~~ expression (if (lvar :cond) nil (lvar :iftrue)) "unless cond then iftrue; end" ~~~~~~ keyword ~~~~ begin ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression "unless cond; iftrue; end" ~~~~~~ keyword ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~ expression "iftrue unless cond" ~~~~~~ keyword ~~~~~~~~~~~~~~~~~~ expression ~~~ #### With else Format: ~~~ (if (lvar :cond) (lvar :iftrue) (lvar :iffalse)) "if cond then iftrue; else; iffalse; end" ~~ keyword ~~~~ begin ~~~~ else ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression "if cond; iftrue; else; iffalse; end" ~~ keyword ~~~~ else ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (if (lvar :cond) (lvar :iffalse) (lvar :iftrue)) "unless cond then iftrue; else; iffalse; end" ~~~~~~ keyword ~~~~ begin ~~~~ else ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression "unless cond; iftrue; else; iffalse; end" ~~~~~~ keyword ~~~~ else ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression ~~~ #### With elsif Format: ~~~ (if (lvar :cond1) (int 1) (if (lvar :cond2 (int 2) (int 3)))) "if cond1; 1; elsif cond2; 2; else 3; end" ~~ keyword (left) ~~~~~ else (left) ~~~ end (left) ~~~~~ keyword (right) ~~~~ else (right) ~~~ end (right) ~~~ #### Ternary Format: ~~~ (if (lvar :cond) (lvar :iftrue) (lvar :iffalse)) "cond ? iftrue : iffalse" ^ question ^ colon ~~~~~~~~~~~~~~~~~~~~~~~ expression ~~~ ### Case matching #### When clause Format: ~~~ (when (regexp "foo" (regopt)) (begin (lvar :bar))) "when /foo/ then bar" ~~~~ keyword ~~~~ begin ~~~~~~~~~~~~~~~~~~~ expression (when (int 1) (int 2) (send nil :meth)) "when 1, 2; meth" (when (int 1) (splat (lvar :foo)) (send nil :meth)) "when 1, *foo; meth" (when (splat (lvar :foo)) (send nil :meth)) "when *foo; meth" ~~~ #### Case-expression clause ##### Without else Format: ~~~ (case (lvar :foo) (when (str "bar") (lvar :bar)) nil) "case foo; when "bar"; bar; end" ~~~~ keyword ~~~ end ~~~ ##### With else Format: ~~~ (case (lvar :foo) (when (str "bar") (lvar :bar)) (lvar :baz)) "case foo; when "bar"; bar; else baz; end" ~~~~ keyword ~~~~ else ~~~ end ~~~ #### Case-conditions clause ##### Without else Format: ~~~ (case nil (when (lvar :bar) (lvar :bar)) nil) "case; when bar; bar; end" ~~~~ keyword ~~~ end ~~~ ##### With else Format: ~~~ (case nil (when (lvar :bar) (lvar :bar)) (lvar :baz)) "case; when bar; bar; else baz; end" ~~~~ keyword ~~~~ else ~~~ end (case nil (lvar :baz)) "case; else baz; end" ~~~~ keyword ~~~~ else ~~~ end ~~~ ### Looping #### With precondition Format: ~~~ (while (lvar :condition) (send nil :foo)) "while condition do foo; end" ~~~~~ keyword ~~ begin ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression "while condition; foo; end" ~~~~~ keyword ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~ expression "foo while condition" ~~~~~ keyword ~~~~~~~~~~~~~~~~~~~ expression (until (lvar :condition) (send nil :foo)) "until condition do foo; end" ~~~~~ keyword ~~ begin ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (until (lvar :condition) (send nil :foo)) "until condition; foo; end" ~~~~~ keyword ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~ expression "foo until condition" ~~~~~ keyword ~~~~~~~~~~~~~~~~~~~ expression ~~~ #### With postcondition Format: ~~~ (while-post (lvar :condition) (kwbegin (send nil :foo))) "begin; foo; end while condition" ~~~~~ begin (begin) ~~~ end (begin) ~~~~~ keyword (while-post) (until-post (lvar :condition) (kwbegin (send nil :foo))) "begin; foo; end until condition" ~~~~~ begin (begin) ~~~ end (begin) ~~~~~ keyword (until-post) ~~~ #### For-in Format: ~~~ (for (lvasgn :a) (lvar :array) (send nil :p (lvar :a))) "for a in array do p a; end" ~~~ keyword ~~ in ~~ begin ~~~ end "for a in array; p a; end" ~~~ keyword ~~ in ~~~ end (for (mlhs (lvasgn :a) (lvasgn :b)) (lvar :array) (send nil :p (lvar :a) (lvar :b))) "for a, b in array; p a, b; end" ~~~ #### Break Format: ~~~ (break (int 1)) "break 1" ~~~~~ keyword ~~~~~~~ expression ~~~ #### Next Format: ~~~ (next (int 1)) "next 1" ~~~~ keyword ~~~~~~ expression ~~~ #### Redo Format: ~~~ (redo) "redo" ~~~~ keyword ~~~~ expression ~~~ ### Return Format: ~~~ (return (lvar :foo)) "return(foo)" ~~~~~~ keyword ~~~~~~~~~~~ expression ~~~ ### Exception handling #### Rescue body Format: ~~~ (resbody (array (const nil :Exception) (const nil :A)) (lvasgn :bar) (int 1)) "rescue Exception, A => bar; 1" ~~~~~~ keyword ~~ assoc ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression "rescue Exception, A => bar then 1" ~~~~~~ keyword ~~ assoc ~~~~ begin (resbody (array (const nil :Exception)) (ivasgn :bar) (int 1)) "rescue Exception => @bar; 1" ~~~~~~ keyword ~~ assoc (resbody nil (lvasgn :bar) (int 1)) "rescue => bar; 1" ~~~~~~ keyword ~~ assoc (resbody nil nil (int 1)) "rescue; 1" ~~~~~~ keyword ~~~ #### Rescue statement ##### Without else Format: ~~~ (begin (rescue (send nil :foo) (resbody ...) (resbody ...) nil)) "begin; foo; rescue Exception; rescue; end" ~~~~~ begin ~~~ end ~~~~~~~~~~~~~~~~~ expression (rescue.resbody/1) ~~~~~~~ expression (rescue.resbody/2) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (rescue) ~~~ ##### With else Format: ~~~ (begin (rescue (send nil :foo) (resbody ...) (resbody ...) (true))) "begin; foo; rescue Exception; rescue; else true end" ~~~~~ begin ~~~~ else (rescue) ~~~ end ~~~ #### Ensure statement Format: ~~~ (begin (ensure (send nil :foo) (send nil :bar)) "begin; foo; ensure; bar; end" ~~~~~ begin ~~~~~~ keyword (ensure) ~~~ end ~~~ #### Rescue with ensure Format: ~~~ (begin (ensure (rescue (send nil :foo) (resbody ...) (int 1)) (send nil :bar)) "begin; foo; rescue; nil; else; 1; ensure; bar; end" ~~~~~ begin ~~~~ else (ensure.rescue) ~~~~~~~~~~~~~~~~~~~~~ expression (rescue) ~~~~~~ keyword (ensure) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (ensure) ~~~ end ~~~ #### Retry Format: ~~~ (retry) "retry" ~~~~~ keyword ~~~~~ expression ~~~ ### BEGIN and END Format: ~~~ (preexe (send nil :puts (str "foo"))) "BEGIN { puts "foo" }" ~~~~~ keyword ^ begin ^ end ~~~~~~~~~~~~~~~~~~~~ expression (postexe (send nil :puts (str "bar"))) "END { puts "bar" }" ~~~ keyword ^ begin ^ end ~~~~~~~~~~~~~~~~~~ expression ~~~ ## Miscellanea ### Flip-flops Format: ~~~ (iflipflop (lvar :a) (lvar :b)) "if a..b; end" ~~ operator ~~~~ expression (eflipflop (lvar :a) (lvar :b)) "if a...b; end" ~~~ operator ~~~~~ expression ~~~ ### Implicit matches Format: ~~~ (match-current-line (regexp (str "a") (regopt))) "if /a/; end" ~~~ expression ~~~ ### Local variable injecting matches Format: ~~~ (match-with-lvasgn (regexp (str "(?bar)") (regopt)) (lvar :baz)) "/(?bar)/ =~ baz" ~~ selector ~~~~~~~~~~~~~~~~~~~~~~ expression ~~~ ## Special constants ### File Format: ~~~ (__FILE__) "__FILE__" ~~~~~~~~ expression ~~~ ### Line Format: ~~~ (__LINE__) "__LINE__" ~~~~~~~~ expression ~~~ ### Encoding Format: ~~~ (__ENCODING__) "__ENCODING__" ~~~~~~~~~~~~ expression ~~~ ## Pattern matching ### Using `in` operator Ruby 2.7 throws a `NoMatchingPatternError` for `foo in bar` if given value doesn't match pattern. Format when `emit_match_pattern` compatibility attribute is disabled (the default): ~~~ (in-match (int 1) (match-var :a)) "1 in a" ~~ operator ~~~~~~ expression ~~~ Format when `emit_match_pattern` is enabled: ~~~ (match-pattern (int 1) (match-var :a)) "1 in a" ~~ operator ~~~~~~ expression ~~~ Starting from 3.0 Ruby returns `true`/`false` for the same code construction. Ruby 3.0 format (compatibility attribute has no effect): ~~~ (match-pattern-p (int 1) (match-var :a)) "1 in a" ~~ operator ~~~~~~ expression ~~~ ### Using `=>` operator This node appears in AST only starting from Ruby 3.0. Format: ~~~ (match-pattern (int 1) (match-var :a)) "1 => a" ~~ operator ~~~~~~ expression ~~~ ### Case with pattern matching #### Without else Format: ~~~ (case-match (str "str") (in-pattern (match-var :foo) (lvar :bar)) nil) "case "str"; in foo; bar; end" ~~~~ keyword ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression ~~~ #### With else Format: ~~~ (case-match, (str "str") (in-pattern (match-var :foo) (lvar :bar)) (lvar :baz)) "case "str"; in foo; bar; else; baz; end" ~~~~ keyword ~~~~ else ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression ~~~ #### With empty else Empty `else` differs from the missing (or _implicit_) `else` for pattern matching, since the latter one raises a `NoMatchingPattern` exception. Thus, we need a way to distinguish this two cases in the resulting AST. Format: ~~~ (case-match, (str "str") (in-pattern (match-var :foo) (lvar :bar)) (empty-else)) "case "str"; in foo; bar; else; end" ~~~~ keyword ~~~~ else ~~~ end ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression ~~~ ### In clause Format: ~~~ (in-pattern (match-var :foo) (lvar :bar)) "in foo then bar" ~~ keyword ~~~~ begin ~~~~~~~~~~~~~~~ expression ~~~ ### If guard This guard runs after matching, so it's not an `if` modifier. Format: ~~~ (in-pattern (match-var :foo) (if-guard (lvar :bar)) nil) "in foo if bar" ~~ keyword ~~~~~~ expression ~~~ ### Unless guard This guard runs after matching, so it's not an `unless` modifier. Format: ~~~ (in-pattern (match-var :foo) (unless-guard (lvar :bar)) nil) "in foo unless bar" ~~~~~~ keyword ~~~~~~~~~~ expression ~~~ ### Match variable Format: ~~~ (match-var :foo) "in foo" ~~~ name ~~~ expression ~~~ ### Match rest #### With name Format: ~~~ (match-rest (match-var :foo)) "in *foo" ~ operator ~~~~ expression ~~~ #### Without name Format: ~~~ (match-rest) "in *" ~ operator ~ expression ~~~ ### Pin operator Format: ~~~ (pin (lvar :foo)) "in ^foo" ~ selector ~~~~ expression ~~~ ### Pin operator with expression Format: ~~~ (pin (begin (send (int 2) :+ (int 2)))) "in ^(2 + 2)" ~ selector ~~~~~~~~ expression ~ begin (begin) ~ end (begin) ~~~~~~~ expression (begin) ~~~ ### Match alternative Format: ~~~ (match-alt (pin (lvar :foo)) (int 1)) "in ^foo | 1" ~ operator ~~~~~~~~ expression ~~~ ### Match with alias Format: ~~~ (match-as (int 1) (match-var :foo)) "in 1 => foo" ~~ operator ~~~~~~~~ expression ~~~ ### Match using array pattern #### Explicit Format: ~~~ (array-pattern (pin (lvar :foo)) (match-var :bar)) "in [^foo, bar]" ~ begin ~ end ~~~~~~~~~~~ expression ~~~ #### Explicit with tail Adding a trailing comma in the end works as `, *` Format: ~~~ (array-pattern-with-tail (pin (lvar :foo)) (match-var :bar)) "in [^foo, bar,]" ~ begin ~ end ~~~~~~~~~~~~ expression ~~~ #### Implicit Format: ~~~ (array-pattern (pin (lvar :foo)) (match-var :bar)) "in ^foo, bar" ~~~~~~~~~ expression ~~~ #### Implicit with tail Format: Adding a trailing comma in the end works as `, *`, so a single item match with comma gets interpreted as an array. ~~~ (array-pattern-with-tail (match-var :foo)) "in foo," ~~~~ expression ~~~ ### Matching using hash pattern #### Explicit Format: ~~~ (hash-pattern (pair (sym :a) (int 10))) "in { a: 10 }" ~ begin ~ end ~~~~~~~~~ expression ~~~ #### Implicit Format: ~~~ (hash-pattern (pair (sym :a) (int 10))) "in a: 10" ~~~~~ expression ~~~ #### Assignment using hash pattern Format: ~~~ (hash-pattern (match-var :a)) "in a:" ~ name (match-var) ~~ expression (match-var) ~~~ #### Nil hash pattern Format: ~~~ (hash-pattern (match-nil-pattern)) "in **nil" ~~~~~ expression (match-nil-pattern) ~~~ name (match-nil-pattern) ~~~ ### Matching using find pattern Format: ~~~ (find-pattern (match-rest (match-var :a)) (int 42) (match-rest)) "in [*, 42, *]" ~ begin ~ end ~~~~~~~~~~ expression ~~~ Note that it can be used as a top-level pattern only when used in a `case` statement. In that case `begin` and `end` are empty. ### Matching using const pattern #### With array pattern Format: ~~~ (const-pattern (const nil :X) (array-pattern (pin (lvar :foo)) (match-var :bar))) "in X[^foo bar]" ~ begin (const-pattern) ~ end (const-pattern) ~~~~~~~~~~~~ expression (const-pattern) ~ name (const-pattern.const) ~ expression (const-pattern.const) ~~~ #### With hash pattern Format: ~~~ (const-pattern (const nil :X) (hash-pattern (match-var :foo) (match-var :bar))) "in X[foo:, bar:]" ~ begin (const-pattern) ~ end (const-pattern) ~~~~~~~~~~~~~ expression (const-pattern) ~ name (const-pattern.const) ~ expression (const-pattern.const) ~~~ #### With array pattern without elements Format: ~~~ (const-pattern (const nil :X) (array-pattern)) "in X[]" ~ begin (const-pattern) ~ end (const-pattern) ~~~ expression (const-pattern) ~ name (const-pattern.const) ~ expression (const-pattern.const) ~~ expression (const-pattern.array_pattern) ~~~ #### With find pattern Format: ~~~ (const-pattern (const nil :X) (find-pattern (match-rest) (int 42) (match-rest))) "in X[*, 42, *]" ~ begin ~ end ~~~~~~~~~~~ expression ~~~ parser-3.3.4.2/doc/CUSTOMIZATION.md000066400000000000000000000023031465510415600163210ustar00rootroot00000000000000# Customizing Parsers While the default setup of the parsers provided by this Gem should be suitable for most some developers might want to change parts of it. An example would be the use of a custom class for nodes instead of `Parser::AST::Node`. Customizing the AST is done by creating a custom builder class and passing it to the constructor method of a parser. The default setup comes down to the following: builder = Parser::Builders::Default.new parser = Parser::Ruby19.new(builder) When creating your own builder class it's best to subclass the default one so that you don't have to redefine every used method again: class MyBuilder < Parser::Builders::Default end builder = MyBuilder.new parser = Parser::Ruby19.new(builder) ## Custom Node Classes To use a custom node class you have to override the method `Parser::Builders::Default#n`: class MyBuilder < Parser::Builders::Default def n(type, children, location) return MyNodeClass.new(type, children, :location => location) end end Note that the used class (and corresponding instance) must be compatible with `Parser::AST::Node` so it's best to subclass it and override/add code where needed. parser-3.3.4.2/doc/INTERNALS.md000066400000000000000000000016631465510415600156200ustar00rootroot00000000000000Entry points ------------ Parser should be kept as slim as possible. This includes not loading any potentially large files when they are likely to be unused in practice. Parser has five main (classes of) `require` entry points: * `require 'parser'`. Main entry point, requires all classes which are used across the entire library. * `require 'parser/rubyXX'`. Version-specific entry point. Can raise a NotImplementedError if current Ruby runtime is unable to parse the requested Ruby version. * `require 'parser/all'`. Requires all available parsers for released versions of Ruby. Can raise NotImplementedError. * `require 'parser/runner'`. Requires all the stuff which is useful for command-line tools but not otherwise. * `require 'parser/runner/X'`. Runner-specific entry point. All non-main entry points internally `require 'parser'`. Additionally, all runner-specific entry points internally `requre 'parser/runner'`. parser-3.3.4.2/doc/css/000077500000000000000000000000001465510415600145215ustar00rootroot00000000000000parser-3.3.4.2/doc/css/.gitkeep000066400000000000000000000000001465510415600161400ustar00rootroot00000000000000parser-3.3.4.2/doc/css/common.css000066400000000000000000000021111465510415600165160ustar00rootroot00000000000000body { font-size: 14px; line-height: 1.6; margin: 0 auto; max-width: 960px; } p code { background: #f2f2f2; padding-left: 3px; padding-right: 3px; } pre.code { font-size: 13px; line-height: 1.4; } /** * YARD uses generic table styles, using a special class means those tables * don't get messed up. */ .table { border: 1px solid #ccc; border-right: none; border-collapse: separate; border-spacing: 0; text-align: left; } .table.full { width: 100%; } .table .field_name { min-width: 160px; } .table thead tr th.no_sort:first-child { width: 25px; } .table thead tr th, .table tbody tr td { border-bottom: 1px solid #ccc; border-right: 1px solid #ccc; min-width: 20px; padding: 8px 5px; text-align: left; vertical-align: top; } .table tbody tr:last-child td { border-bottom: none; } .table tr:nth-child(odd) td { background: #f9f9f9; } parser-3.3.4.2/lib/000077500000000000000000000000001465510415600137325ustar00rootroot00000000000000parser-3.3.4.2/lib/gauntlet_parser.rb000066400000000000000000000051001465510415600174520ustar00rootroot00000000000000# frozen_string_literal: true require 'gauntlet' require_relative 'parser/all' require 'shellwords' class ParserGauntlet < Gauntlet RUBY20 = 'ruby' RUBY19 = 'ruby1.9.1' RUBY18 = '/opt/rubies/ruby-1.8.7-p370/bin/ruby' def try(parser, ruby, file, show_ok: false) try_ruby = lambda do |e| Process.spawn(%{#{ruby} -c #{Shellwords.escape file}}, :err => '/dev/null', :out => '/dev/null') _, status = Process.wait2 if status.success? # Bug in Parser. puts "Parser bug." @result[file] = { parser.to_s => "#{e.class}: #{e.to_s}" } else # No, this file is not Ruby. yield if block_given? end end begin parser.parse_file(file) rescue Parser::SyntaxError => e if e.diagnostic.location.resize(2).is?('<%') puts "ERb." return end try_ruby.call(e) rescue ArgumentError, RegexpError, Encoding::UndefinedConversionError => e puts "#{file}: #{e.class}: #{e.to_s}" try_ruby.call(e) rescue Interrupt raise rescue Exception => e puts "Parser bug: #{file} #{e.class}: #{e.to_s}" @result[file] = { parser.to_s => "#{e.class}: #{e.to_s}" } else puts "Ok." if show_ok end end def parse(name) puts "GEM: #{name}" @result = {} if ENV.include?('FAST') total_size = Dir["**/*.rb"].map(&File.method(:size)).reduce(:+) if total_size > 300_000 puts "Skip." return end end Dir["**/*.rb"].each do |file| next if File.directory? file try(Parser::Ruby20, RUBY20, file) do puts "Trying 1.9:" try(Parser::Ruby19, RUBY19, file, show_ok: true) do puts "Trying 1.8:" try(Parser::Ruby18, RUBY18, file, show_ok: true) do puts "Invalid syntax." end end end end @result end def run(name) data[name] = parse(name) self.dirty = true end def should_skip?(name) data[name] == {} end def load_yaml(*) data = super @was_errors = data.count { |_name, errs| errs != {} } data end def shutdown super errors = data.count { |_name, errs| errs != {} } total = data.count percent = "%.5f" % [100 - errors.to_f / total * 100] puts "!!! was: #{@was_errors} now: #{errors} total: #{total} frac: #{percent}%" end end filter = ARGV.shift filter = Regexp.new filter if filter gauntlet = ParserGauntlet.new if ENV.include? 'UPDATE' gauntlet.source_index gauntlet.update_gem_tarballs end gauntlet.run_the_gauntlet filter parser-3.3.4.2/lib/parser.rb000066400000000000000000000054121465510415600155550ustar00rootroot00000000000000# frozen_string_literal: true if RUBY_VERSION =~ /^1\.[89]\./ require_relative 'parser/version' raise LoadError, <<-UNSUPPORTED_VERSION_MSG parser v#{Parser::VERSION} cannot run on Ruby #{RUBY_VERSION}. Please upgrade to Ruby 2.0.0 or higher, or use an older version of the parser gem. UNSUPPORTED_VERSION_MSG end require 'set' require 'racc/parser' require 'ast' ## # @api public # module Parser require_relative 'parser/version' require_relative 'parser/messages' require_relative 'parser/deprecation' module AST require_relative 'parser/ast/node' require_relative 'parser/ast/processor' require_relative 'parser/meta' end module Source require_relative 'parser/source/buffer' require_relative 'parser/source/range' require_relative 'parser/source/comment' require_relative 'parser/source/comment/associator' require_relative 'parser/source/rewriter' require_relative 'parser/source/rewriter/action' require_relative 'parser/source/tree_rewriter' require_relative 'parser/source/tree_rewriter/action' require_relative 'parser/source/map' require_relative 'parser/source/map/operator' require_relative 'parser/source/map/collection' require_relative 'parser/source/map/constant' require_relative 'parser/source/map/variable' require_relative 'parser/source/map/keyword' require_relative 'parser/source/map/definition' require_relative 'parser/source/map/method_definition' require_relative 'parser/source/map/send' require_relative 'parser/source/map/index' require_relative 'parser/source/map/condition' require_relative 'parser/source/map/ternary' require_relative 'parser/source/map/for' require_relative 'parser/source/map/rescue_body' require_relative 'parser/source/map/heredoc' require_relative 'parser/source/map/objc_kwarg' end require_relative 'parser/syntax_error' require_relative 'parser/clobbering_error' require_relative 'parser/unknown_encoding_in_magic_comment_error' require_relative 'parser/diagnostic' require_relative 'parser/diagnostic/engine' require_relative 'parser/static_environment' if RUBY_ENGINE == 'truffleruby' require_relative 'parser/lexer-F0' else require_relative 'parser/lexer-F1' end require_relative 'parser/lexer-strings' require_relative 'parser/lexer/literal' require_relative 'parser/lexer/stack_state' require_relative 'parser/lexer/dedenter' module Builders require_relative 'parser/builders/default' end require_relative 'parser/context' require_relative 'parser/max_numparam_stack' require_relative 'parser/current_arg_stack' require_relative 'parser/variables_stack' require_relative 'parser/base' require_relative 'parser/rewriter' require_relative 'parser/tree_rewriter' end parser-3.3.4.2/lib/parser/000077500000000000000000000000001465510415600152265ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/all.rb000066400000000000000000000006451465510415600163300ustar00rootroot00000000000000# frozen_string_literal: true require_relative 'ruby18' require_relative 'ruby19' require_relative 'ruby20' require_relative 'ruby21' require_relative 'ruby22' require_relative 'ruby23' require_relative 'ruby24' require_relative 'ruby25' require_relative 'ruby26' require_relative 'ruby27' require_relative 'ruby30' require_relative 'ruby31' require_relative 'ruby32' require_relative 'ruby33' require_relative 'ruby34' parser-3.3.4.2/lib/parser/ast/000077500000000000000000000000001465510415600160155ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/ast/node.rb000066400000000000000000000020141465510415600172640ustar00rootroot00000000000000# frozen_string_literal: true module Parser module AST ## # {Parser::AST::Node} contains information about a single AST node and its # child nodes. It extends the basic [AST::Node](https://www.rubydoc.info/gems/ast/AST/Node) # class provided by gem [ast](https://www.rubydoc.info/gems/ast). # # @api public # # @!attribute [r] location # Source map for this Node. # @return [Parser::Source::Map] # class Node < ::AST::Node attr_reader :location alias loc location ## # Assigns various properties to this AST node. Currently only the # location can be set. # # @param [Hash] properties # @option properties [Parser::Source::Map] :location Location information # of the node. # def assign_properties(properties) if (location = properties[:location]) location = location.dup if location.frozen? location.node = self @location = location end end end end end parser-3.3.4.2/lib/parser/ast/processor.rb000066400000000000000000000212471465510415600203670ustar00rootroot00000000000000# frozen_string_literal: true module Parser module AST ## # @api public # class Processor include ::AST::Processor::Mixin def process_regular_node(node) node.updated(nil, process_all(node)) end alias on_dstr process_regular_node alias on_dsym process_regular_node alias on_regexp process_regular_node alias on_xstr process_regular_node alias on_splat process_regular_node alias on_kwsplat process_regular_node alias on_array process_regular_node alias on_pair process_regular_node alias on_hash process_regular_node alias on_kwargs process_regular_node alias on_irange process_regular_node alias on_erange process_regular_node def on_var(node) node end # @private def process_variable_node(node) on_var(node) end alias on_lvar process_variable_node alias on_ivar process_variable_node alias on_gvar process_variable_node alias on_cvar process_variable_node alias on_back_ref process_variable_node alias on_nth_ref process_variable_node def on_vasgn(node) name, value_node = *node if !value_node.nil? node.updated(nil, [ name, process(value_node) ]) else node end end # @private def process_var_asgn_node(node) on_vasgn(node) end alias on_lvasgn process_var_asgn_node alias on_ivasgn process_var_asgn_node alias on_gvasgn process_var_asgn_node alias on_cvasgn process_var_asgn_node alias on_and_asgn process_regular_node alias on_or_asgn process_regular_node def on_op_asgn(node) var_node, method_name, value_node = *node node.updated(nil, [ process(var_node), method_name, process(value_node) ]) end alias on_mlhs process_regular_node alias on_masgn process_regular_node def on_const(node) scope_node, name = *node node.updated(nil, [ process(scope_node), name ]) end def on_casgn(node) scope_node, name, value_node = *node if !value_node.nil? node.updated(nil, [ process(scope_node), name, process(value_node) ]) else node.updated(nil, [ process(scope_node), name ]) end end alias on_args process_regular_node def on_argument(node) arg_name, value_node = *node if !value_node.nil? node.updated(nil, [ arg_name, process(value_node) ]) else node end end # @private def process_argument_node(node) on_argument(node) end alias on_arg process_argument_node alias on_optarg process_argument_node alias on_restarg process_argument_node alias on_blockarg process_argument_node alias on_shadowarg process_argument_node alias on_kwarg process_argument_node alias on_kwoptarg process_argument_node alias on_kwrestarg process_argument_node alias on_forward_arg process_argument_node def on_procarg0(node) if node.children[0].is_a?(Symbol) # This branch gets executed when the builder # is not configured to emit and 'arg' inside 'procarg0', i.e. when # Parser::Builders::Default.emit_arg_inside_procarg0 # is set to false. # # If this flag is set to true this branch is unreachable. # s(:procarg0, :a) on_argument(node) else # s(:procarg0, s(:arg, :a), s(:arg, :b)) process_regular_node(node) end end alias on_arg_expr process_regular_node alias on_restarg_expr process_regular_node alias on_blockarg_expr process_regular_node alias on_block_pass process_regular_node alias on_forwarded_restarg process_regular_node alias on_forwarded_kwrestarg process_regular_node alias on_module process_regular_node alias on_class process_regular_node alias on_sclass process_regular_node def on_def(node) name, args_node, body_node = *node node.updated(nil, [ name, process(args_node), process(body_node) ]) end def on_defs(node) definee_node, name, args_node, body_node = *node node.updated(nil, [ process(definee_node), name, process(args_node), process(body_node) ]) end alias on_undef process_regular_node alias on_alias process_regular_node def on_send(node) receiver_node, method_name, *arg_nodes = *node receiver_node = process(receiver_node) if receiver_node node.updated(nil, [ receiver_node, method_name, *process_all(arg_nodes) ]) end alias on_csend on_send alias on_index process_regular_node alias on_indexasgn process_regular_node alias on_block process_regular_node alias on_lambda process_regular_node def on_numblock(node) method_call, max_numparam, body = *node node.updated(nil, [ process(method_call), max_numparam, process(body) ]) end alias on_while process_regular_node alias on_while_post process_regular_node alias on_until process_regular_node alias on_until_post process_regular_node alias on_for process_regular_node alias on_return process_regular_node alias on_break process_regular_node alias on_next process_regular_node alias on_redo process_regular_node alias on_retry process_regular_node alias on_super process_regular_node alias on_yield process_regular_node alias on_defined? process_regular_node alias on_not process_regular_node alias on_and process_regular_node alias on_or process_regular_node alias on_if process_regular_node alias on_when process_regular_node alias on_case process_regular_node alias on_iflipflop process_regular_node alias on_eflipflop process_regular_node alias on_match_current_line process_regular_node alias on_match_with_lvasgn process_regular_node alias on_resbody process_regular_node alias on_rescue process_regular_node alias on_ensure process_regular_node alias on_begin process_regular_node alias on_kwbegin process_regular_node alias on_preexe process_regular_node alias on_postexe process_regular_node alias on_case_match process_regular_node alias on_in_match process_regular_node alias on_match_pattern process_regular_node alias on_match_pattern_p process_regular_node alias on_in_pattern process_regular_node alias on_if_guard process_regular_node alias on_unless_guard process_regular_node alias on_match_var process_variable_node alias on_match_rest process_regular_node alias on_pin process_regular_node alias on_match_alt process_regular_node alias on_match_as process_regular_node alias on_array_pattern process_regular_node alias on_array_pattern_with_tail process_regular_node alias on_hash_pattern process_regular_node alias on_const_pattern process_regular_node alias on_find_pattern process_regular_node # @private def process_variable_node(node) warn 'Parser::AST::Processor#process_variable_node is deprecated as a' \ ' public API and will be removed. Please use ' \ 'Parser::AST::Processor#on_var instead.' on_var(node) end # @private def process_var_asgn_node(node) warn 'Parser::AST::Processor#process_var_asgn_node is deprecated as a' \ ' public API and will be removed. Please use ' \ 'Parser::AST::Processor#on_vasgn instead.' on_vasgn(node) end # @private def process_argument_node(node) warn 'Parser::AST::Processor#process_argument_node is deprecated as a' \ ' public API and will be removed. Please use ' \ 'Parser::AST::Processor#on_argument instead.' on_argument(node) end def on_empty_else(node) node end end end end parser-3.3.4.2/lib/parser/base.rb000066400000000000000000000177341465510415600165010ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # Base class for version-specific parsers. # # @api public # # @!attribute [r] diagnostics # @return [Parser::Diagnostic::Engine] # # @!attribute [r] static_env # @return [Parser::StaticEnvironment] # class Base < Racc::Parser ## # Parses a string of Ruby code and returns the AST. If the source # cannot be parsed, {SyntaxError} is raised and a diagnostic is # printed to `stderr`. # # @example # Parser::Base.parse('puts "hello"') # # @param [String] string The block of code to parse. # @param [String] file The name of the file the code originated from. # @param [Numeric] line The initial line number. # @return [Parser::AST::Node] # def self.parse(string, file='(string)', line=1) parser = default_parser source_buffer = setup_source_buffer(file, line, string, parser.default_encoding) parser.parse(source_buffer) end ## # Parses a string of Ruby code and returns the AST and comments. If the # source cannot be parsed, {SyntaxError} is raised and a diagnostic is # printed to `stderr`. # # @example # Parser::Base.parse_with_comments('puts "hello"') # # @param [String] string The block of code to parse. # @param [String] file The name of the file the code originated from. # @param [Numeric] line The initial line number. # @return [Array] # def self.parse_with_comments(string, file='(string)', line=1) parser = default_parser source_buffer = setup_source_buffer(file, line, string, parser.default_encoding) parser.parse_with_comments(source_buffer) end ## # Parses Ruby source code by reading it from a file. If the source # cannot be parsed, {SyntaxError} is raised and a diagnostic is # printed to `stderr`. # # @param [String] filename Path to the file to parse. # @return [Parser::AST::Node] # @see #parse # def self.parse_file(filename) parse(File.read(filename), filename) end ## # Parses Ruby source code by reading it from a file and returns the AST and # comments. If the source cannot be parsed, {SyntaxError} is raised and a # diagnostic is printed to `stderr`. # # @param [String] filename Path to the file to parse. # @return [Array] # @see #parse # def self.parse_file_with_comments(filename) parse_with_comments(File.read(filename), filename) end ## # @return [Parser::Base] parser with the default options set. # def self.default_parser parser = new parser.diagnostics.all_errors_are_fatal = true parser.diagnostics.ignore_warnings = true parser.diagnostics.consumer = lambda do |diagnostic| $stderr.puts(diagnostic.render) end parser end def self.setup_source_buffer(file, line, string, encoding) string = string.dup.force_encoding(encoding) source_buffer = Source::Buffer.new(file, line) if name == 'Parser::Ruby18' source_buffer.raw_source = string else source_buffer.source = string end source_buffer end private_class_method :setup_source_buffer attr_reader :lexer attr_reader :diagnostics attr_reader :builder attr_reader :static_env attr_reader :source_buffer attr_reader :context attr_reader :max_numparam_stack attr_reader :current_arg_stack attr_reader :pattern_variables attr_reader :pattern_hash_keys ## # @param [Parser::Builders::Default] builder The AST builder to use. # def initialize(builder=Parser::Builders::Default.new) @diagnostics = Diagnostic::Engine.new @static_env = StaticEnvironment.new # Stack that holds current parsing context @context = Context.new # Maximum numbered parameters stack @max_numparam_stack = MaxNumparamStack.new # Current argument names stack @current_arg_stack = CurrentArgStack.new # Stack of set of variables used in the current pattern @pattern_variables = VariablesStack.new # Stack of set of keys used in the current hash in pattern matchinig @pattern_hash_keys = VariablesStack.new @lexer = Lexer.new(version) @lexer.diagnostics = @diagnostics @lexer.static_env = @static_env @lexer.context = @context @builder = builder @builder.parser = self # Last emitted token @last_token = nil if self.class::Racc_debug_parser && ENV['RACC_DEBUG'] @yydebug = true end reset end ## # Resets the state of the parser. # def reset @source_buffer = nil @lexer.reset @static_env.reset @context.reset @current_arg_stack.reset @pattern_variables.reset @pattern_hash_keys.reset self end ## # Parses a source buffer and returns the AST, or `nil` in case of a non fatal error. # # @param [Parser::Source::Buffer] source_buffer The source buffer to parse. # @return [Parser::AST::Node, nil] # def parse(source_buffer) @lexer.source_buffer = source_buffer @source_buffer = source_buffer do_parse || nil # Force `false` to `nil`, see https://github.com/ruby/racc/pull/136 ensure # Don't keep references to the source file. @source_buffer = nil @lexer.source_buffer = nil end ## # Parses a source buffer and returns the AST and the source code comments. # # @see #parse # @see Parser::Source::Comment#associate # @return [Array] # def parse_with_comments(source_buffer) @lexer.comments = [] [ parse(source_buffer), @lexer.comments ] ensure @lexer.comments = nil end ## # Parses a source buffer and returns the AST, the source code comments, # and the tokens emitted by the lexer. In case of a fatal error, a {SyntaxError} # is raised, unless `recover` is true. In case of an error # (non-fatal or recovered), `nil` is returned instead of the AST, and # comments as well as tokens are only returned up to the location of # the error. # # Currently, token stream format returned by #tokenize is not documented, # but is considered part of a public API and only changed according # to Semantic Versioning. # # However, note that the exact token composition of various constructs # might vary. For example, a string `"foo"` is represented equally well # by `:tSTRING_BEG " :tSTRING_CONTENT foo :tSTRING_END "` and # `:tSTRING "foo"`; such details must not be relied upon. # # @param [Parser::Source::Buffer] source_buffer # @param [Boolean] recover If true, recover from syntax errors. False by default. # @return [Array] # def tokenize(source_buffer, recover=false) @lexer.tokens = [] @lexer.comments = [] begin ast = parse(source_buffer) rescue Parser::SyntaxError raise if !recover end [ ast, @lexer.comments, @lexer.tokens ] ensure @lexer.tokens = nil @lexer.comments = nil end private def next_token token = @lexer.advance @last_token = token token end def check_kwarg_name(name_t) case name_t[0] when /^[a-z_]/ # OK when /^[A-Z]/ diagnostic :error, :argument_const, nil, name_t end end def diagnostic(level, reason, arguments, location_t, highlights_ts=[]) _, location = location_t highlights = highlights_ts.map do |token| _, range = token range end @diagnostics.process( Diagnostic.new(level, reason, arguments, location, highlights)) if level == :error yyerror end end def on_error(error_token_id, error_value, value_stack) token_name = token_to_str(error_token_id) _, location = error_value @diagnostics.process(Diagnostic.new( :error, :unexpected_token, { :token => token_name }, location)) end end end parser-3.3.4.2/lib/parser/builders/000077500000000000000000000000001465510415600170375ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/builders/default.rb000066400000000000000000001735371465510415600210300ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # Default AST builder. Uses {AST::Node}s. # class Builders::Default class << self ## # AST compatibility attribute; since `-> {}` is not semantically # equivalent to `lambda {}`, all new code should set this attribute # to true. # # If set to false (the default), `-> {}` is emitted as # `s(:block, s(:send, nil, :lambda), s(:args), nil)`. # # If set to true, `-> {}` is emitted as # `s(:block, s(:lambda), s(:args), nil)`. # # @return [Boolean] attr_accessor :emit_lambda end @emit_lambda = false class << self ## # AST compatibility attribute; block arguments of `m { |a| }` are # not semantically equivalent to block arguments of `m { |a,| }` or `m { |a, b| }`, # all new code should set this attribute to true. # # If set to false (the default), arguments of `m { |a| }` are emitted as # `s(:args, s(:arg, :a))`. # # If set to true, arguments of `m { |a| }` are emitted as # `s(:args, s(:procarg0, :a)). # # @return [Boolean] attr_accessor :emit_procarg0 end @emit_procarg0 = false class << self ## # AST compatibility attribute; locations of `__ENCODING__` are not the same # as locations of `Encoding::UTF_8` causing problems during rewriting, # all new code should set this attribute to true. # # If set to false (the default), `__ENCODING__` is emitted as # ` s(:const, s(:const, nil, :Encoding), :UTF_8)`. # # If set to true, `__ENCODING__` is emitted as # `s(:__ENCODING__)`. # # @return [Boolean] attr_accessor :emit_encoding end @emit_encoding = false class << self ## # AST compatibility attribute; indexed assignment, `x[] = 1`, is not # semantically equivalent to calling the method directly, `x.[]=(1)`. # Specifically, in the former case, the expression's value is always 1, # and in the latter case, the expression's value is the return value # of the `[]=` method. # # If set to false (the default), `self[1]` is emitted as # `s(:send, s(:self), :[], s(:int, 1))`, and `self[1] = 2` is # emitted as `s(:send, s(:self), :[]=, s(:int, 1), s(:int, 2))`. # # If set to true, `self[1]` is emitted as # `s(:index, s(:self), s(:int, 1))`, and `self[1] = 2` is # emitted as `s(:indexasgn, s(:self), s(:int, 1), s(:int, 2))`. # # @return [Boolean] attr_accessor :emit_index end @emit_index = false class << self ## # AST compatibility attribute; causes a single non-mlhs # block argument to be wrapped in s(:procarg0). # # If set to false (the default), block arguments `|a|` are emitted as # `s(:args, s(:procarg0, :a))` # # If set to true, block arguments `|a|` are emitted as # `s(:args, s(:procarg0, s(:arg, :a))` # # @return [Boolean] attr_accessor :emit_arg_inside_procarg0 end @emit_arg_inside_procarg0 = false class << self ## # AST compatibility attribute; arguments forwarding initially # didn't have support for leading arguments # (i.e. `def m(a, ...); end` was a syntax error). However, Ruby 3.0 # added support for any number of arguments in front of the `...`. # # If set to false (the default): # 1. `def m(...) end` is emitted as # s(:def, :m, s(:forward_args), nil) # 2. `def m(a, b, ...) end` is emitted as # s(:def, :m, # s(:args, s(:arg, :a), s(:arg, :b), s(:forward_arg))) # # If set to true it uses a single format: # 1. `def m(...) end` is emitted as # s(:def, :m, s(:args, s(:forward_arg))) # 2. `def m(a, b, ...) end` is emitted as # s(:def, :m, s(:args, s(:arg, :a), s(:arg, :b), s(:forward_arg))) # # It does't matter that much on 2.7 (because there can't be any leading arguments), # but on 3.0 it should be better enabled to use a single AST format. # # @return [Boolean] attr_accessor :emit_forward_arg end @emit_forward_arg = false class << self ## # AST compatibility attribute; Starting from Ruby 2.7 keyword arguments # of method calls that are passed explicitly as a hash (i.e. with curly braces) # are treated as positional arguments and Ruby 2.7 emits a warning on such method # call. Ruby 3.0 given an ArgumentError. # # If set to false (the default) the last hash argument is emitted as `hash`: # # ``` # (send nil :foo # (hash # (pair # (sym :bar) # (int 42)))) # ``` # # If set to true it is emitted as `kwargs`: # # ``` # (send nil :foo # (kwargs # (pair # (sym :bar) # (int 42)))) # ``` # # Note that `kwargs` node is just a replacement for `hash` argument, # so if there's are multiple arguments (or a `kwsplat`) all of them # are wrapped into `kwargs` instead of `hash`: # # ``` # (send nil :foo # (kwargs # (pair # (sym :a) # (int 42)) # (kwsplat # (send nil :b)) # (pair # (sym :c) # (int 10)))) # ``` attr_accessor :emit_kwargs end @emit_kwargs = false class << self ## # AST compatibility attribute; Starting from 3.0 Ruby returns # true/false from single-line pattern matching with `in` keyword. # # Before 3.0 there was an exception if given value doesn't match pattern. # # NOTE: This attribute affects only Ruby 2.7 grammar. # 3.0 grammar always emits `match_pattern`/`match_pattern_p` # # If compatibility attribute set to false `foo in bar` is emitted as `in_match`: # # ``` # (in-match # (send nil :foo) # (match-var :bar)) # ``` # # If set to true it's emitted as `match_pattern_p`: # ``` # (match-pattern-p # (send nil :foo) # (match-var :bar)) # ``` attr_accessor :emit_match_pattern end @emit_match_pattern = false class << self ## # @api private def modernize @emit_lambda = true @emit_procarg0 = true @emit_encoding = true @emit_index = true @emit_arg_inside_procarg0 = true @emit_forward_arg = true @emit_kwargs = true @emit_match_pattern = true end end ## # @api private attr_accessor :parser ## # If set to true (the default), `__FILE__` and `__LINE__` are transformed to # literal nodes. For example, `s(:str, "lib/foo.rb")` and `s(:int, 10)`. # # If set to false, `__FILE__` and `__LINE__` are emitted as-is, # i.e. as `s(:__FILE__)` and `s(:__LINE__)` nodes. # # Source maps are identical in both cases. # # @return [Boolean] attr_accessor :emit_file_line_as_literals ## # Initializes attributes: # # * `emit_file_line_as_literals`: `true` def initialize @emit_file_line_as_literals = true end # @!parse private # # Literals # # Singletons def nil(nil_t) n0(:nil, token_map(nil_t)) end def true(true_t) n0(:true, token_map(true_t)) end def false(false_t) n0(:false, token_map(false_t)) end # Numerics def integer(integer_t) numeric(:int, integer_t) end def float(float_t) numeric(:float, float_t) end def rational(rational_t) numeric(:rational, rational_t) end def complex(complex_t) numeric(:complex, complex_t) end def numeric(kind, token) n(kind, [ value(token) ], Source::Map::Operator.new(nil, loc(token))) end private :numeric def unary_num(unary_t, numeric) value, = *numeric operator_loc = loc(unary_t) case value(unary_t) when '+' value = +value when '-' value = -value end numeric.updated(nil, [ value ], :location => Source::Map::Operator.new( operator_loc, operator_loc.join(numeric.loc.expression))) end def __LINE__(__LINE__t) n0(:__LINE__, token_map(__LINE__t)) end # Strings def string(string_t) n(:str, [ string_value(string_t) ], delimited_string_map(string_t)) end def string_internal(string_t) n(:str, [ string_value(string_t) ], unquoted_map(string_t)) end def string_compose(begin_t, parts, end_t) if collapse_string_parts?(parts) if begin_t.nil? && end_t.nil? parts.first else n(:str, parts.first.children, string_map(begin_t, parts, end_t)) end else n(:dstr, [ *parts ], string_map(begin_t, parts, end_t)) end end def character(char_t) n(:str, [ string_value(char_t) ], prefix_string_map(char_t)) end def __FILE__(__FILE__t) n0(:__FILE__, token_map(__FILE__t)) end # Symbols def symbol(symbol_t) n(:sym, [ string_value(symbol_t).to_sym ], prefix_string_map(symbol_t)) end def symbol_internal(symbol_t) n(:sym, [ string_value(symbol_t).to_sym ], unquoted_map(symbol_t)) end def symbol_compose(begin_t, parts, end_t) if collapse_string_parts?(parts) str = parts.first n(:sym, [ str.children.first.to_sym ], collection_map(begin_t, str.loc.expression, end_t)) elsif @parser.version == 18 && parts.empty? diagnostic :error, :empty_symbol, nil, loc(begin_t).join(loc(end_t)) else n(:dsym, [ *parts ], collection_map(begin_t, parts, end_t)) end end # Executable strings def xstring_compose(begin_t, parts, end_t) n(:xstr, [ *parts ], string_map(begin_t, parts, end_t)) end # Indented (interpolated, noninterpolated, executable) strings def dedent_string(node, dedent_level) if !dedent_level.nil? dedenter = Lexer::Dedenter.new(dedent_level) case node.type when :str str = node.children.first dedenter.dedent(str) when :dstr, :xstr children = node.children.map do |str_node| if str_node.type == :str str = str_node.children.first dedenter.dedent(str) next nil if str.empty? else dedenter.interrupt end str_node end node = node.updated(nil, children.compact) end end node end # Regular expressions def regexp_options(regopt_t) options = value(regopt_t). each_char.sort.uniq. map(&:to_sym) n(:regopt, options, token_map(regopt_t)) end def regexp_compose(begin_t, parts, end_t, options) begin static_regexp(parts, options) rescue RegexpError, Encoding::UndefinedConversionError => e diagnostic :error, :invalid_regexp, { :message => e.message }, loc(begin_t).join(loc(end_t)) end n(:regexp, (parts << options), regexp_map(begin_t, end_t, options)) end # Arrays def array(begin_t, elements, end_t) n(:array, elements, collection_map(begin_t, elements, end_t)) end def splat(star_t, arg=nil) if arg.nil? n0(:splat, unary_op_map(star_t)) else n(:splat, [ arg ], unary_op_map(star_t, arg)) end end def word(parts) if collapse_string_parts?(parts) parts.first else n(:dstr, [ *parts ], collection_map(nil, parts, nil)) end end def words_compose(begin_t, parts, end_t) n(:array, [ *parts ], collection_map(begin_t, parts, end_t)) end def symbols_compose(begin_t, parts, end_t) parts = parts.map do |part| case part.type when :str value, = *part part.updated(:sym, [ value.to_sym ]) when :dstr part.updated(:dsym) else part end end n(:array, [ *parts ], collection_map(begin_t, parts, end_t)) end # Hashes def pair(key, assoc_t, value) n(:pair, [ key, value ], binary_op_map(key, assoc_t, value)) end def pair_list_18(list) if list.size % 2 != 0 diagnostic :error, :odd_hash, nil, list.last.loc.expression else list. each_slice(2).map do |key, value| n(:pair, [ key, value ], binary_op_map(key, nil, value)) end end end def pair_keyword(key_t, value) key_map, pair_map = pair_keyword_map(key_t, value) key = n(:sym, [ value(key_t).to_sym ], key_map) n(:pair, [ key, value ], pair_map) end def pair_quoted(begin_t, parts, end_t, value) end_t, pair_map = pair_quoted_map(begin_t, end_t, value) key = symbol_compose(begin_t, parts, end_t) n(:pair, [ key, value ], pair_map) end def pair_label(key_t) key_l = loc(key_t) value_l = key_l.adjust(end_pos: -1) label = value(key_t) value = if label =~ /\A[[:lower:]]/ n(:ident, [ label.to_sym ], Source::Map::Variable.new(value_l)) else n(:const, [ nil, label.to_sym ], Source::Map::Constant.new(nil, value_l, value_l)) end pair_keyword(key_t, accessible(value)) end def kwsplat(dstar_t, arg) n(:kwsplat, [ arg ], unary_op_map(dstar_t, arg)) end def associate(begin_t, pairs, end_t) key_set = Set.new pairs.each do |pair| next unless pair.type.eql?(:pair) key, = *pair case key.type when :sym, :str, :int, :float when :rational, :complex, :regexp next unless @parser.version >= 31 else next end unless key_set.add?(key) diagnostic :warning, :duplicate_hash_key, nil, key.loc.expression end end n(:hash, [ *pairs ], collection_map(begin_t, pairs, end_t)) end # Ranges def range_inclusive(lhs, dot2_t, rhs) n(:irange, [ lhs, rhs ], range_map(lhs, dot2_t, rhs)) end def range_exclusive(lhs, dot3_t, rhs) n(:erange, [ lhs, rhs ], range_map(lhs, dot3_t, rhs)) end # # Access # def self(token) n0(:self, token_map(token)) end def ident(token) n(:ident, [ value(token).to_sym ], variable_map(token)) end def ivar(token) n(:ivar, [ value(token).to_sym ], variable_map(token)) end def gvar(token) gvar_name = value(token) if gvar_name.start_with?('$0') && gvar_name.length > 2 diagnostic :error, :gvar_name, { :name => gvar_name }, loc(token) end n(:gvar, [ gvar_name.to_sym ], variable_map(token)) end def cvar(token) n(:cvar, [ value(token).to_sym ], variable_map(token)) end def back_ref(token) n(:back_ref, [ value(token).to_sym ], token_map(token)) end def nth_ref(token) n(:nth_ref, [ value(token) ], token_map(token)) end def accessible(node) case node.type when :__FILE__ if @emit_file_line_as_literals n(:str, [ node.loc.expression.source_buffer.name ], node.loc.dup) else node end when :__LINE__ if @emit_file_line_as_literals n(:int, [ node.loc.expression.line ], node.loc.dup) else node end when :__ENCODING__ if !self.class.emit_encoding n(:const, [ n(:const, [ nil, :Encoding], nil), :UTF_8 ], node.loc.dup) else node end when :ident name, = *node if %w[? !].any? { |c| name.to_s.end_with?(c) } diagnostic :error, :invalid_id_to_get, { :identifier => name.to_s }, node.loc.expression end # Numbered parameters are not declared anywhere, # so they take precedence over method calls in numblock contexts if @parser.version >= 27 && @parser.try_declare_numparam(node) return node.updated(:lvar) end unless @parser.static_env.declared?(name) if @parser.version == 33 && name == :it && @parser.context.in_block && !@parser.max_numparam_stack.has_ordinary_params? diagnostic :warning, :ambiguous_it_call, nil, node.loc.expression end return n(:send, [ nil, name ], var_send_map(node)) end if name.to_s == parser.current_arg_stack.top diagnostic :error, :circular_argument_reference, { :var_name => name.to_s }, node.loc.expression end node.updated(:lvar) else node end end def const(name_t) n(:const, [ nil, value(name_t).to_sym ], constant_map(nil, nil, name_t)) end def const_global(t_colon3, name_t) cbase = n0(:cbase, token_map(t_colon3)) n(:const, [ cbase, value(name_t).to_sym ], constant_map(cbase, t_colon3, name_t)) end def const_fetch(scope, t_colon2, name_t) n(:const, [ scope, value(name_t).to_sym ], constant_map(scope, t_colon2, name_t)) end def __ENCODING__(__ENCODING__t) n0(:__ENCODING__, token_map(__ENCODING__t)) end # # Assignment # def assignable(node) case node.type when :cvar node.updated(:cvasgn) when :ivar node.updated(:ivasgn) when :gvar node.updated(:gvasgn) when :const if @parser.context.in_def diagnostic :error, :dynamic_const, nil, node.loc.expression end node.updated(:casgn) when :ident name, = *node var_name = node.children[0].to_s name_loc = node.loc.expression check_assignment_to_numparam(var_name, name_loc) check_reserved_for_numparam(var_name, name_loc) @parser.static_env.declare(name) node.updated(:lvasgn) when :match_var name, = *node var_name = node.children[0].to_s name_loc = node.loc.expression check_assignment_to_numparam(var_name, name_loc) check_reserved_for_numparam(var_name, name_loc) node when :nil, :self, :true, :false, :__FILE__, :__LINE__, :__ENCODING__ diagnostic :error, :invalid_assignment, nil, node.loc.expression when :back_ref, :nth_ref diagnostic :error, :backref_assignment, nil, node.loc.expression end end def const_op_assignable(node) node.updated(:casgn) end def assign(lhs, eql_t, rhs) (lhs << rhs).updated(nil, nil, :location => lhs.loc. with_operator(loc(eql_t)). with_expression(join_exprs(lhs, rhs))) end def op_assign(lhs, op_t, rhs) case lhs.type when :gvasgn, :ivasgn, :lvasgn, :cvasgn, :casgn, :send, :csend, :index operator = value(op_t)[0..-1].to_sym source_map = lhs.loc. with_operator(loc(op_t)). with_expression(join_exprs(lhs, rhs)) if lhs.type == :index lhs = lhs.updated(:indexasgn) end case operator when :'&&' n(:and_asgn, [ lhs, rhs ], source_map) when :'||' n(:or_asgn, [ lhs, rhs ], source_map) else n(:op_asgn, [ lhs, operator, rhs ], source_map) end when :back_ref, :nth_ref diagnostic :error, :backref_assignment, nil, lhs.loc.expression end end def multi_lhs(begin_t, items, end_t) n(:mlhs, [ *items ], collection_map(begin_t, items, end_t)) end def multi_assign(lhs, eql_t, rhs) n(:masgn, [ lhs, rhs ], binary_op_map(lhs, eql_t, rhs)) end # # Class and module definition # def def_class(class_t, name, lt_t, superclass, body, end_t) n(:class, [ name, superclass, body ], module_definition_map(class_t, name, lt_t, end_t)) end def def_sclass(class_t, lshft_t, expr, body, end_t) n(:sclass, [ expr, body ], module_definition_map(class_t, nil, lshft_t, end_t)) end def def_module(module_t, name, body, end_t) n(:module, [ name, body ], module_definition_map(module_t, name, nil, end_t)) end # # Method (un)definition # def def_method(def_t, name_t, args, body, end_t) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:def, [ value(name_t).to_sym, args, body ], definition_map(def_t, nil, name_t, end_t)) end def def_endless_method(def_t, name_t, args, assignment_t, body) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:def, [ value(name_t).to_sym, args, body ], endless_definition_map(def_t, nil, name_t, assignment_t, body)) end def def_singleton(def_t, definee, dot_t, name_t, args, body, end_t) validate_definee(definee) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:defs, [ definee, value(name_t).to_sym, args, body ], definition_map(def_t, dot_t, name_t, end_t)) end def def_endless_singleton(def_t, definee, dot_t, name_t, args, assignment_t, body) validate_definee(definee) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:defs, [ definee, value(name_t).to_sym, args, body ], endless_definition_map(def_t, dot_t, name_t, assignment_t, body)) end def undef_method(undef_t, names) n(:undef, [ *names ], keyword_map(undef_t, nil, names, nil)) end def alias(alias_t, to, from) n(:alias, [ to, from ], keyword_map(alias_t, nil, [to, from], nil)) end # # Formal arguments # def args(begin_t, args, end_t, check_args=true) args = check_duplicate_args(args) if check_args validate_no_forward_arg_after_restarg(args) map = collection_map(begin_t, args, end_t) if !self.class.emit_forward_arg && args.length == 1 && args[0].type == :forward_arg n(:forward_args, [], map) else n(:args, args, map) end end def numargs(max_numparam) n(:numargs, [ max_numparam ], nil) end def forward_only_args(begin_t, dots_t, end_t) if self.class.emit_forward_arg arg = forward_arg(dots_t) n(:args, [ arg ], collection_map(begin_t, [ arg ], end_t)) else n(:forward_args, [], collection_map(begin_t, token_map(dots_t), end_t)) end end def forward_arg(dots_t) n(:forward_arg, [], token_map(dots_t)) end def arg(name_t) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:arg, [ value(name_t).to_sym ], variable_map(name_t)) end def optarg(name_t, eql_t, value) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:optarg, [ value(name_t).to_sym, value ], variable_map(name_t). with_operator(loc(eql_t)). with_expression(loc(name_t).join(value.loc.expression))) end def restarg(star_t, name_t=nil) if name_t check_reserved_for_numparam(value(name_t), loc(name_t)) n(:restarg, [ value(name_t).to_sym ], arg_prefix_map(star_t, name_t)) else n0(:restarg, arg_prefix_map(star_t)) end end def kwarg(name_t) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:kwarg, [ value(name_t).to_sym ], kwarg_map(name_t)) end def kwoptarg(name_t, value) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:kwoptarg, [ value(name_t).to_sym, value ], kwarg_map(name_t, value)) end def kwrestarg(dstar_t, name_t=nil) if name_t check_reserved_for_numparam(value(name_t), loc(name_t)) n(:kwrestarg, [ value(name_t).to_sym ], arg_prefix_map(dstar_t, name_t)) else n0(:kwrestarg, arg_prefix_map(dstar_t)) end end def kwnilarg(dstar_t, nil_t) n0(:kwnilarg, arg_prefix_map(dstar_t, nil_t)) end def shadowarg(name_t) check_reserved_for_numparam(value(name_t), loc(name_t)) n(:shadowarg, [ value(name_t).to_sym ], variable_map(name_t)) end def blockarg(amper_t, name_t) if !name_t.nil? check_reserved_for_numparam(value(name_t), loc(name_t)) end arg_name = name_t ? value(name_t).to_sym : nil n(:blockarg, [ arg_name ], arg_prefix_map(amper_t, name_t)) end def procarg0(arg) if self.class.emit_procarg0 if arg.type == :arg && self.class.emit_arg_inside_procarg0 n(:procarg0, [ arg ], Source::Map::Collection.new(nil, nil, arg.location.expression)) else arg.updated(:procarg0) end else arg end end # Ruby 1.8 block arguments def arg_expr(expr) if expr.type == :lvasgn expr.updated(:arg) else n(:arg_expr, [ expr ], expr.loc.dup) end end def restarg_expr(star_t, expr=nil) if expr.nil? n0(:restarg, token_map(star_t)) elsif expr.type == :lvasgn expr.updated(:restarg) else n(:restarg_expr, [ expr ], expr.loc.dup) end end def blockarg_expr(amper_t, expr) if expr.type == :lvasgn expr.updated(:blockarg) else n(:blockarg_expr, [ expr ], expr.loc.dup) end end # MacRuby Objective-C arguments def objc_kwarg(kwname_t, assoc_t, name_t) kwname_l = loc(kwname_t) if assoc_t.nil? # a: b, not a => b kwname_l = kwname_l.resize(kwname_l.size - 1) operator_l = kwname_l.end.resize(1) else operator_l = loc(assoc_t) end n(:objc_kwarg, [ value(kwname_t).to_sym, value(name_t).to_sym ], Source::Map::ObjcKwarg.new(kwname_l, operator_l, loc(name_t), kwname_l.join(loc(name_t)))) end def objc_restarg(star_t, name=nil) if name.nil? n0(:restarg, arg_prefix_map(star_t)) elsif name.type == :arg # regular restarg name.updated(:restarg, nil, { :location => name.loc.with_operator(loc(star_t)) }) else # restarg with objc_kwarg inside n(:objc_restarg, [ name ], unary_op_map(star_t, name)) end end # # Method calls # def call_type_for_dot(dot_t) if !dot_t.nil? && value(dot_t) == :anddot :csend else # This case is a bit tricky. ruby23.y returns the token tDOT with # the value :dot, and the token :tANDDOT with the value :anddot. # # But, ruby{18..22}.y (which unconditionally expect tDOT) just # return "." there, since they are to be kept close to the corresponding # Ruby MRI grammars. # # Thankfully, we don't have to care. :send end end def forwarded_args(dots_t) n(:forwarded_args, [], token_map(dots_t)) end def forwarded_restarg(star_t) n(:forwarded_restarg, [], token_map(star_t)) end def forwarded_kwrestarg(dstar_t) n(:forwarded_kwrestarg, [], token_map(dstar_t)) end def call_method(receiver, dot_t, selector_t, lparen_t=nil, args=[], rparen_t=nil) type = call_type_for_dot(dot_t) if self.class.emit_kwargs rewrite_hash_args_to_kwargs(args) end if selector_t.nil? n(type, [ receiver, :call, *args ], send_map(receiver, dot_t, nil, lparen_t, args, rparen_t)) else n(type, [ receiver, value(selector_t).to_sym, *args ], send_map(receiver, dot_t, selector_t, lparen_t, args, rparen_t)) end end def call_lambda(lambda_t) if self.class.emit_lambda n0(:lambda, expr_map(loc(lambda_t))) else n(:send, [ nil, :lambda ], send_map(nil, nil, lambda_t)) end end def block(method_call, begin_t, args, body, end_t) _receiver, _selector, *call_args = *method_call if method_call.type == :yield diagnostic :error, :block_given_to_yield, nil, method_call.loc.keyword, [loc(begin_t)] end last_arg = call_args.last if last_arg && (last_arg.type == :block_pass || last_arg.type == :forwarded_args) diagnostic :error, :block_and_blockarg, nil, last_arg.loc.expression, [loc(begin_t)] end if args.type == :numargs block_type = :numblock args = args.children[0] else block_type = :block end if [:send, :csend, :index, :super, :zsuper, :lambda].include?(method_call.type) n(block_type, [ method_call, args, body ], block_map(method_call.loc.expression, begin_t, end_t)) else # Code like "return foo 1 do end" is reduced in a weird sequence. # Here, method_call is actually (return). actual_send, = *method_call block = n(block_type, [ actual_send, args, body ], block_map(actual_send.loc.expression, begin_t, end_t)) n(method_call.type, [ block ], method_call.loc.with_expression(join_exprs(method_call, block))) end end def block_pass(amper_t, arg) n(:block_pass, [ arg ], unary_op_map(amper_t, arg)) end def objc_varargs(pair, rest_of_varargs) value, first_vararg = *pair vararg_array = array(nil, [ first_vararg, *rest_of_varargs ], nil). updated(:objc_varargs) pair.updated(nil, [ value, vararg_array ], { :location => pair.loc.with_expression( pair.loc.expression.join(vararg_array.loc.expression)) }) end def attr_asgn(receiver, dot_t, selector_t) method_name = (value(selector_t) + '=').to_sym type = call_type_for_dot(dot_t) # Incomplete method call. n(type, [ receiver, method_name ], send_map(receiver, dot_t, selector_t)) end def index(receiver, lbrack_t, indexes, rbrack_t) if self.class.emit_kwargs rewrite_hash_args_to_kwargs(indexes) end if self.class.emit_index n(:index, [ receiver, *indexes ], index_map(receiver, lbrack_t, rbrack_t)) else n(:send, [ receiver, :[], *indexes ], send_index_map(receiver, lbrack_t, rbrack_t)) end end def index_asgn(receiver, lbrack_t, indexes, rbrack_t) if self.class.emit_index n(:indexasgn, [ receiver, *indexes ], index_map(receiver, lbrack_t, rbrack_t)) else # Incomplete method call. n(:send, [ receiver, :[]=, *indexes ], send_index_map(receiver, lbrack_t, rbrack_t)) end end def binary_op(receiver, operator_t, arg) source_map = send_binary_op_map(receiver, operator_t, arg) if @parser.version == 18 operator = value(operator_t) if operator == '!=' method_call = n(:send, [ receiver, :==, arg ], source_map) elsif operator == '!~' method_call = n(:send, [ receiver, :=~, arg ], source_map) end if %w(!= !~).include?(operator) return n(:not, [ method_call ], expr_map(source_map.expression)) end end n(:send, [ receiver, value(operator_t).to_sym, arg ], source_map) end def match_op(receiver, match_t, arg) source_map = send_binary_op_map(receiver, match_t, arg) if (regexp = static_regexp_node(receiver)) regexp.names.each do |name| @parser.static_env.declare(name) end n(:match_with_lvasgn, [ receiver, arg ], source_map) else n(:send, [ receiver, :=~, arg ], source_map) end end def unary_op(op_t, receiver) case value(op_t) when '+', '-' method = value(op_t) + '@' else method = value(op_t) end n(:send, [ receiver, method.to_sym ], send_unary_op_map(op_t, receiver)) end def not_op(not_t, begin_t=nil, receiver=nil, end_t=nil) if @parser.version == 18 n(:not, [ check_condition(receiver) ], unary_op_map(not_t, receiver)) else if receiver.nil? nil_node = n0(:begin, collection_map(begin_t, nil, end_t)) n(:send, [ nil_node, :'!' ], send_unary_op_map(not_t, nil_node)) else n(:send, [ check_condition(receiver), :'!' ], send_map(nil, nil, not_t, begin_t, [receiver], end_t)) end end end # # Control flow # # Logical operations: and, or def logical_op(type, lhs, op_t, rhs) n(type, [ lhs, rhs ], binary_op_map(lhs, op_t, rhs)) end # Conditionals def condition(cond_t, cond, then_t, if_true, else_t, if_false, end_t) n(:if, [ check_condition(cond), if_true, if_false ], condition_map(cond_t, cond, then_t, if_true, else_t, if_false, end_t)) end def condition_mod(if_true, if_false, cond_t, cond) n(:if, [ check_condition(cond), if_true, if_false ], keyword_mod_map(if_true || if_false, cond_t, cond)) end def ternary(cond, question_t, if_true, colon_t, if_false) n(:if, [ check_condition(cond), if_true, if_false ], ternary_map(cond, question_t, if_true, colon_t, if_false)) end # Case matching def when(when_t, patterns, then_t, body) children = patterns << body n(:when, children, keyword_map(when_t, then_t, children, nil)) end def case(case_t, expr, when_bodies, else_t, else_body, end_t) n(:case, [ expr, *(when_bodies << else_body)], condition_map(case_t, expr, nil, nil, else_t, else_body, end_t)) end # Loops def loop(type, keyword_t, cond, do_t, body, end_t) n(type, [ check_condition(cond), body ], keyword_map(keyword_t, do_t, nil, end_t)) end def loop_mod(type, body, keyword_t, cond) if body.type == :kwbegin type = :"#{type}_post" end n(type, [ check_condition(cond), body ], keyword_mod_map(body, keyword_t, cond)) end def for(for_t, iterator, in_t, iteratee, do_t, body, end_t) n(:for, [ iterator, iteratee, body ], for_map(for_t, in_t, do_t, end_t)) end # Keywords def keyword_cmd(type, keyword_t, lparen_t=nil, args=[], rparen_t=nil) if type == :yield && args.count > 0 last_arg = args.last if last_arg.type == :block_pass diagnostic :error, :block_given_to_yield, nil, loc(keyword_t), [last_arg.loc.expression] end end if %i[yield super].include?(type) && self.class.emit_kwargs rewrite_hash_args_to_kwargs(args) end n(type, args, keyword_map(keyword_t, lparen_t, args, rparen_t)) end # BEGIN, END def preexe(preexe_t, lbrace_t, compstmt, rbrace_t) n(:preexe, [ compstmt ], keyword_map(preexe_t, lbrace_t, [], rbrace_t)) end def postexe(postexe_t, lbrace_t, compstmt, rbrace_t) n(:postexe, [ compstmt ], keyword_map(postexe_t, lbrace_t, [], rbrace_t)) end # Exception handling def rescue_body(rescue_t, exc_list, assoc_t, exc_var, then_t, compound_stmt) n(:resbody, [ exc_list, exc_var, compound_stmt ], rescue_body_map(rescue_t, exc_list, assoc_t, exc_var, then_t, compound_stmt)) end def begin_body(compound_stmt, rescue_bodies=[], else_t=nil, else_=nil, ensure_t=nil, ensure_=nil) if rescue_bodies.any? if else_t compound_stmt = n(:rescue, [ compound_stmt, *(rescue_bodies + [ else_ ]) ], eh_keyword_map(compound_stmt, nil, rescue_bodies, else_t, else_)) else compound_stmt = n(:rescue, [ compound_stmt, *(rescue_bodies + [ nil ]) ], eh_keyword_map(compound_stmt, nil, rescue_bodies, nil, nil)) end elsif else_t statements = [] if !compound_stmt.nil? if compound_stmt.type == :begin statements += compound_stmt.children else statements.push(compound_stmt) end end statements.push( n(:begin, [ else_ ], collection_map(else_t, [ else_ ], nil))) compound_stmt = n(:begin, statements, collection_map(nil, statements, nil)) end if ensure_t compound_stmt = n(:ensure, [ compound_stmt, ensure_ ], eh_keyword_map(compound_stmt, ensure_t, [ ensure_ ], nil, nil)) end compound_stmt end # # Expression grouping # def compstmt(statements) case when statements.none? nil when statements.one? statements.first else n(:begin, statements, collection_map(nil, statements, nil)) end end def begin(begin_t, body, end_t) if body.nil? # A nil expression: `()'. n0(:begin, collection_map(begin_t, nil, end_t)) elsif body.type == :mlhs || (body.type == :begin && body.loc.begin.nil? && body.loc.end.nil?) # Synthesized (begin) from compstmt "a; b" or (mlhs) # from multi_lhs "(a, b) = *foo". n(body.type, body.children, collection_map(begin_t, body.children, end_t)) else n(:begin, [ body ], collection_map(begin_t, [ body ], end_t)) end end def begin_keyword(begin_t, body, end_t) if body.nil? # A nil expression: `begin end'. n0(:kwbegin, collection_map(begin_t, nil, end_t)) elsif (body.type == :begin && body.loc.begin.nil? && body.loc.end.nil?) # Synthesized (begin) from compstmt "a; b". n(:kwbegin, body.children, collection_map(begin_t, body.children, end_t)) else n(:kwbegin, [ body ], collection_map(begin_t, [ body ], end_t)) end end # # PATTERN MATCHING # def case_match(case_t, expr, in_bodies, else_t, else_body, end_t) else_body = n(:empty_else, nil, token_map(else_t)) if else_t && !else_body n(:case_match, [ expr, *(in_bodies << else_body)], condition_map(case_t, expr, nil, nil, else_t, else_body, end_t)) end def in_match(lhs, in_t, rhs) n(:in_match, [lhs, rhs], binary_op_map(lhs, in_t, rhs)) end def match_pattern(lhs, match_t, rhs) n(:match_pattern, [lhs, rhs], binary_op_map(lhs, match_t, rhs)) end def match_pattern_p(lhs, match_t, rhs) n(:match_pattern_p, [lhs, rhs], binary_op_map(lhs, match_t, rhs)) end def in_pattern(in_t, pattern, guard, then_t, body) children = [pattern, guard, body] n(:in_pattern, children, keyword_map(in_t, then_t, children.compact, nil)) end def if_guard(if_t, if_body) n(:if_guard, [if_body], guard_map(if_t, if_body)) end def unless_guard(unless_t, unless_body) n(:unless_guard, [unless_body], guard_map(unless_t, unless_body)) end def match_var(name_t) name = value(name_t).to_sym name_l = loc(name_t) check_lvar_name(name, name_l) check_duplicate_pattern_variable(name, name_l) @parser.static_env.declare(name) n(:match_var, [ name ], variable_map(name_t)) end def match_hash_var(name_t) name = value(name_t).to_sym expr_l = loc(name_t) name_l = expr_l.adjust(end_pos: -1) check_lvar_name(name, name_l) check_duplicate_pattern_variable(name, name_l) @parser.static_env.declare(name) n(:match_var, [ name ], Source::Map::Variable.new(name_l, expr_l)) end def match_hash_var_from_str(begin_t, strings, end_t) if strings.length > 1 diagnostic :error, :pm_interp_in_var_name, nil, loc(begin_t).join(loc(end_t)) end string = strings[0] case string.type when :str # MRI supports plain strings in hash pattern matching name, = *string name_l = string.loc.expression check_lvar_name(name, name_l) check_duplicate_pattern_variable(name, name_l) @parser.static_env.declare(name) if (begin_l = string.loc.begin) # exclude beginning of the string from the location of the variable name_l = name_l.adjust(begin_pos: begin_l.length) end if (end_l = string.loc.end) # exclude end of the string from the location of the variable name_l = name_l.adjust(end_pos: -end_l.length) end expr_l = loc(begin_t).join(string.loc.expression).join(loc(end_t)) n(:match_var, [ name.to_sym ], Source::Map::Variable.new(name_l, expr_l)) when :begin match_hash_var_from_str(begin_t, string.children, end_t) else # we only can get here if there is an interpolation, e.g., ``in "#{ a }":` diagnostic :error, :pm_interp_in_var_name, nil, loc(begin_t).join(loc(end_t)) end end def match_rest(star_t, name_t = nil) if name_t.nil? n0(:match_rest, unary_op_map(star_t)) else name = match_var(name_t) n(:match_rest, [ name ], unary_op_map(star_t, name)) end end def hash_pattern(lbrace_t, kwargs, rbrace_t) args = check_duplicate_args(kwargs) n(:hash_pattern, args, collection_map(lbrace_t, args, rbrace_t)) end def array_pattern(lbrack_t, elements, rbrack_t) return n(:array_pattern, nil, collection_map(lbrack_t, [], rbrack_t)) if elements.nil? trailing_comma = false node_elements = elements.map do |element| if element.type == :match_with_trailing_comma trailing_comma = true element.children.first else trailing_comma = false element end end node_type = trailing_comma ? :array_pattern_with_tail : :array_pattern n(node_type, node_elements, collection_map(lbrack_t, elements, rbrack_t)) end def find_pattern(lbrack_t, elements, rbrack_t) n(:find_pattern, elements, collection_map(lbrack_t, elements, rbrack_t)) end def match_with_trailing_comma(match, comma_t) n(:match_with_trailing_comma, [ match ], expr_map(match.loc.expression.join(loc(comma_t)))) end def const_pattern(const, ldelim_t, pattern, rdelim_t) n(:const_pattern, [const, pattern], Source::Map::Collection.new( loc(ldelim_t), loc(rdelim_t), const.loc.expression.join(loc(rdelim_t)) ) ) end def pin(pin_t, var) n(:pin, [ var ], send_unary_op_map(pin_t, var)) end def match_alt(left, pipe_t, right) source_map = binary_op_map(left, pipe_t, right) n(:match_alt, [ left, right ], source_map) end def match_as(value, assoc_t, as) source_map = binary_op_map(value, assoc_t, as) n(:match_as, [ value, as ], source_map) end def match_nil_pattern(dstar_t, nil_t) n0(:match_nil_pattern, arg_prefix_map(dstar_t, nil_t)) end def match_pair(label_type, label, value) if label_type == :label check_duplicate_pattern_key(label[0], label[1]) pair_keyword(label, value) else begin_t, parts, end_t = label label_loc = loc(begin_t).join(loc(end_t)) # quoted label like "label": value if (var_name = static_string(parts)) check_duplicate_pattern_key(var_name, label_loc) else diagnostic :error, :pm_interp_in_var_name, nil, label_loc end pair_quoted(begin_t, parts, end_t, value) end end def match_label(label_type, label) if label_type == :label match_hash_var(label) else # quoted label like "label": value begin_t, strings, end_t = label match_hash_var_from_str(begin_t, strings, end_t) end end private # # VERIFICATION # def check_condition(cond) case cond.type when :masgn if @parser.version <= 23 diagnostic :error, :masgn_as_condition, nil, cond.loc.expression else cond end when :begin if cond.children.count == 1 cond.updated(nil, [ check_condition(cond.children.last) ]) else cond end when :and, :or lhs, rhs = *cond if @parser.version == 18 cond else cond.updated(cond.type, [ check_condition(lhs), check_condition(rhs) ]) end when :irange, :erange lhs, rhs = *cond type = case cond.type when :irange then :iflipflop when :erange then :eflipflop end lhs_condition = check_condition(lhs) unless lhs.nil? rhs_condition = check_condition(rhs) unless rhs.nil? return cond.updated(type, [ lhs_condition, rhs_condition ]) when :regexp n(:match_current_line, [ cond ], expr_map(cond.loc.expression)) else cond end end def check_duplicate_args(args, map={}) args.each do |this_arg| case this_arg.type when :arg, :optarg, :restarg, :blockarg, :kwarg, :kwoptarg, :kwrestarg, :shadowarg check_duplicate_arg(this_arg, map) when :procarg0 if this_arg.children[0].is_a?(Symbol) # s(:procarg0, :a) check_duplicate_arg(this_arg, map) else # s(:procarg0, s(:arg, :a), ...) check_duplicate_args(this_arg.children, map) end when :mlhs check_duplicate_args(this_arg.children, map) end end end def check_duplicate_arg(this_arg, map={}) this_name, = *this_arg that_arg = map[this_name] that_name, = *that_arg if that_arg.nil? map[this_name] = this_arg elsif arg_name_collides?(this_name, that_name) diagnostic :error, :duplicate_argument, nil, this_arg.loc.name, [ that_arg.loc.name ] end end def validate_no_forward_arg_after_restarg(args) restarg = nil forward_arg = nil args.each do |arg| case arg.type when :restarg then restarg = arg when :forward_arg then forward_arg = arg end end if !forward_arg.nil? && !restarg.nil? diagnostic :error, :forward_arg_after_restarg, nil, forward_arg.loc.expression, [restarg.loc.expression] end end def check_assignment_to_numparam(name, loc) # MRI < 2.7 treats numbered parameters as regular variables # and so it's allowed to perform assignments like `_1 = 42`. return if @parser.version < 27 assigning_to_numparam = @parser.context.in_dynamic_block? && name =~ /\A_([1-9])\z/ && @parser.max_numparam_stack.has_numparams? if assigning_to_numparam diagnostic :error, :cant_assign_to_numparam, { :name => name }, loc end end def check_reserved_for_numparam(name, loc) # MRI < 3.0 accepts assignemnt to variables like _1 # if it's not a numbered parameter. MRI 3.0 and newer throws an error. return if @parser.version < 30 if name =~ /\A_([1-9])\z/ diagnostic :error, :reserved_for_numparam, { :name => name }, loc end end def arg_name_collides?(this_name, that_name) case @parser.version when 18 this_name == that_name when 19 # Ignore underscore. this_name != :_ && this_name == that_name else # Ignore everything beginning with underscore. this_name && this_name[0] != '_' && this_name == that_name end end def check_lvar_name(name, loc) if name =~ /\A[[[:lower:]]_][[[:alnum:]]_]*\z/ # OK else diagnostic :error, :lvar_name, { name: name }, loc end end def check_duplicate_pattern_variable(name, loc) return if name.to_s.start_with?('_') if @parser.pattern_variables.declared?(name) diagnostic :error, :duplicate_variable_name, { name: name.to_s }, loc end @parser.pattern_variables.declare(name) end def check_duplicate_pattern_key(name, loc) if @parser.pattern_hash_keys.declared?(name) diagnostic :error, :duplicate_pattern_key, { name: name.to_s }, loc end @parser.pattern_hash_keys.declare(name) end # # SOURCE MAPS # def n(type, children, source_map) AST::Node.new(type, children, :location => source_map) end def n0(type, source_map) n(type, [], source_map) end def join_exprs(left_expr, right_expr) left_expr.loc.expression. join(right_expr.loc.expression) end def token_map(token) Source::Map.new(loc(token)) end def delimited_string_map(string_t) str_range = loc(string_t) begin_l = str_range.with(end_pos: str_range.begin_pos + 1) end_l = str_range.with(begin_pos: str_range.end_pos - 1) Source::Map::Collection.new(begin_l, end_l, loc(string_t)) end def prefix_string_map(symbol) str_range = loc(symbol) begin_l = str_range.with(end_pos: str_range.begin_pos + 1) Source::Map::Collection.new(begin_l, nil, loc(symbol)) end def unquoted_map(token) Source::Map::Collection.new(nil, nil, loc(token)) end def pair_keyword_map(key_t, value_e) key_range = loc(key_t) key_l = key_range.adjust(end_pos: -1) colon_l = key_range.with(begin_pos: key_range.end_pos - 1) [ # key map Source::Map::Collection.new(nil, nil, key_l), # pair map Source::Map::Operator.new(colon_l, key_range.join(value_e.loc.expression)) ] end def pair_quoted_map(begin_t, end_t, value_e) end_l = loc(end_t) quote_l = end_l.with(begin_pos: end_l.end_pos - 2, end_pos: end_l.end_pos - 1) colon_l = end_l.with(begin_pos: end_l.end_pos - 1) [ # modified end token [ value(end_t), quote_l ], # pair map Source::Map::Operator.new(colon_l, loc(begin_t).join(value_e.loc.expression)) ] end def expr_map(loc) Source::Map.new(loc) end def collection_map(begin_t, parts, end_t) if begin_t.nil? || end_t.nil? if parts.any? expr_l = join_exprs(parts.first, parts.last) elsif !begin_t.nil? expr_l = loc(begin_t) elsif !end_t.nil? expr_l = loc(end_t) end else expr_l = loc(begin_t).join(loc(end_t)) end Source::Map::Collection.new(loc(begin_t), loc(end_t), expr_l) end def string_map(begin_t, parts, end_t) if begin_t && value(begin_t).start_with?('<<') if parts.any? expr_l = join_exprs(parts.first, parts.last) else expr_l = loc(end_t).begin end Source::Map::Heredoc.new(loc(begin_t), expr_l, loc(end_t)) else collection_map(begin_t, parts, end_t) end end def regexp_map(begin_t, end_t, options_e) Source::Map::Collection.new(loc(begin_t), loc(end_t), loc(begin_t).join(options_e.loc.expression)) end def constant_map(scope, colon2_t, name_t) if scope.nil? expr_l = loc(name_t) else expr_l = scope.loc.expression.join(loc(name_t)) end Source::Map::Constant.new(loc(colon2_t), loc(name_t), expr_l) end def variable_map(name_t) Source::Map::Variable.new(loc(name_t)) end def binary_op_map(left_e, op_t, right_e) Source::Map::Operator.new(loc(op_t), join_exprs(left_e, right_e)) end def unary_op_map(op_t, arg_e=nil) if arg_e.nil? expr_l = loc(op_t) else expr_l = loc(op_t).join(arg_e.loc.expression) end Source::Map::Operator.new(loc(op_t), expr_l) end def range_map(start_e, op_t, end_e) if start_e && end_e expr_l = join_exprs(start_e, end_e) elsif start_e expr_l = start_e.loc.expression.join(loc(op_t)) elsif end_e expr_l = loc(op_t).join(end_e.loc.expression) end Source::Map::Operator.new(loc(op_t), expr_l) end def arg_prefix_map(op_t, name_t=nil) if name_t.nil? expr_l = loc(op_t) else expr_l = loc(op_t).join(loc(name_t)) end Source::Map::Variable.new(loc(name_t), expr_l) end def kwarg_map(name_t, value_e=nil) label_range = loc(name_t) name_range = label_range.adjust(end_pos: -1) if value_e expr_l = loc(name_t).join(value_e.loc.expression) else expr_l = loc(name_t) end Source::Map::Variable.new(name_range, expr_l) end def module_definition_map(keyword_t, name_e, operator_t, end_t) if name_e name_l = name_e.loc.expression end Source::Map::Definition.new(loc(keyword_t), loc(operator_t), name_l, loc(end_t)) end def definition_map(keyword_t, operator_t, name_t, end_t) Source::Map::MethodDefinition.new(loc(keyword_t), loc(operator_t), loc(name_t), loc(end_t), nil, nil) end def endless_definition_map(keyword_t, operator_t, name_t, assignment_t, body_e) body_l = body_e.loc.expression Source::Map::MethodDefinition.new(loc(keyword_t), loc(operator_t), loc(name_t), nil, loc(assignment_t), body_l) end def send_map(receiver_e, dot_t, selector_t, begin_t=nil, args=[], end_t=nil) if receiver_e begin_l = receiver_e.loc.expression elsif selector_t begin_l = loc(selector_t) end if end_t end_l = loc(end_t) elsif args.any? end_l = args.last.loc.expression elsif selector_t end_l = loc(selector_t) end Source::Map::Send.new(loc(dot_t), loc(selector_t), loc(begin_t), loc(end_t), begin_l.join(end_l)) end def var_send_map(variable_e) Source::Map::Send.new(nil, variable_e.loc.expression, nil, nil, variable_e.loc.expression) end def send_binary_op_map(lhs_e, selector_t, rhs_e) Source::Map::Send.new(nil, loc(selector_t), nil, nil, join_exprs(lhs_e, rhs_e)) end def send_unary_op_map(selector_t, arg_e) if arg_e.nil? expr_l = loc(selector_t) else expr_l = loc(selector_t).join(arg_e.loc.expression) end Source::Map::Send.new(nil, loc(selector_t), nil, nil, expr_l) end def index_map(receiver_e, lbrack_t, rbrack_t) Source::Map::Index.new(loc(lbrack_t), loc(rbrack_t), receiver_e.loc.expression.join(loc(rbrack_t))) end def send_index_map(receiver_e, lbrack_t, rbrack_t) Source::Map::Send.new(nil, loc(lbrack_t).join(loc(rbrack_t)), nil, nil, receiver_e.loc.expression.join(loc(rbrack_t))) end def block_map(receiver_l, begin_t, end_t) Source::Map::Collection.new(loc(begin_t), loc(end_t), receiver_l.join(loc(end_t))) end def keyword_map(keyword_t, begin_t, args, end_t) args ||= [] if end_t end_l = loc(end_t) elsif args.any? && !args.last.nil? end_l = args.last.loc.expression elsif args.any? && args.count > 1 end_l = args[-2].loc.expression else end_l = loc(keyword_t) end Source::Map::Keyword.new(loc(keyword_t), loc(begin_t), loc(end_t), loc(keyword_t).join(end_l)) end def keyword_mod_map(pre_e, keyword_t, post_e) Source::Map::Keyword.new(loc(keyword_t), nil, nil, join_exprs(pre_e, post_e)) end def condition_map(keyword_t, cond_e, begin_t, body_e, else_t, else_e, end_t) if end_t end_l = loc(end_t) elsif else_e && else_e.loc.expression end_l = else_e.loc.expression elsif loc(else_t) end_l = loc(else_t) elsif body_e && body_e.loc.expression end_l = body_e.loc.expression elsif loc(begin_t) end_l = loc(begin_t) else end_l = cond_e.loc.expression end Source::Map::Condition.new(loc(keyword_t), loc(begin_t), loc(else_t), loc(end_t), loc(keyword_t).join(end_l)) end def ternary_map(begin_e, question_t, mid_e, colon_t, end_e) Source::Map::Ternary.new(loc(question_t), loc(colon_t), join_exprs(begin_e, end_e)) end def for_map(keyword_t, in_t, begin_t, end_t) Source::Map::For.new(loc(keyword_t), loc(in_t), loc(begin_t), loc(end_t), loc(keyword_t).join(loc(end_t))) end def rescue_body_map(keyword_t, exc_list_e, assoc_t, exc_var_e, then_t, compstmt_e) end_l = compstmt_e.loc.expression if compstmt_e end_l = loc(then_t) if end_l.nil? && then_t end_l = exc_var_e.loc.expression if end_l.nil? && exc_var_e end_l = exc_list_e.loc.expression if end_l.nil? && exc_list_e end_l = loc(keyword_t) if end_l.nil? Source::Map::RescueBody.new(loc(keyword_t), loc(assoc_t), loc(then_t), loc(keyword_t).join(end_l)) end def eh_keyword_map(compstmt_e, keyword_t, body_es, else_t, else_e) if compstmt_e.nil? if keyword_t.nil? begin_l = body_es.first.loc.expression else begin_l = loc(keyword_t) end else begin_l = compstmt_e.loc.expression end if else_t if else_e.nil? end_l = loc(else_t) else end_l = else_e.loc.expression end elsif !body_es.last.nil? end_l = body_es.last.loc.expression else end_l = loc(keyword_t) end Source::Map::Condition.new(loc(keyword_t), nil, loc(else_t), nil, begin_l.join(end_l)) end def guard_map(keyword_t, guard_body_e) keyword_l = loc(keyword_t) guard_body_l = guard_body_e.loc.expression Source::Map::Keyword.new(keyword_l, nil, nil, keyword_l.join(guard_body_l)) end # # HELPERS # # Extract a static string from e.g. a regular expression, # honoring the fact that MRI expands interpolations like #{""} # at parse time. def static_string(nodes) nodes.map do |node| case node.type when :str node.children[0] when :begin if (string = static_string(node.children)) string else return nil end else return nil end end.join end def static_regexp(parts, options) source = static_string(parts) return nil if source.nil? source = case when options.children.include?(:u) source.encode(Encoding::UTF_8) when options.children.include?(:e) source.encode(Encoding::EUC_JP) when options.children.include?(:s) source.encode(Encoding::WINDOWS_31J) when options.children.include?(:n) source.encode(Encoding::BINARY) else source end Regexp.new(source, (Regexp::EXTENDED if options.children.include?(:x))) end def static_regexp_node(node) if node.type == :regexp if @parser.version >= 33 && node.children[0..-2].any? { |child| child.type != :str } return nil end parts, options = node.children[0..-2], node.children[-1] static_regexp(parts, options) end end def collapse_string_parts?(parts) parts.one? && [:str, :dstr].include?(parts.first.type) end def value(token) token[0] end def string_value(token) unless token[0].valid_encoding? diagnostic(:error, :invalid_encoding, nil, token[1]) end token[0] end def loc(token) # Pass through `nil`s and return nil for tNL. token[1] if token && token[0] end def diagnostic(type, reason, arguments, location, highlights=[]) @parser.diagnostics.process( Diagnostic.new(type, reason, arguments, location, highlights)) if type == :error @parser.send :yyerror end end def validate_definee(definee) case definee.type when :int, :str, :dstr, :sym, :dsym, :regexp, :array, :hash diagnostic :error, :singleton_literal, nil, definee.loc.expression false else true end end def rewrite_hash_args_to_kwargs(args) if args.any? && kwargs?(args.last) # foo(..., bar: baz) args[args.length - 1] = args[args.length - 1].updated(:kwargs) elsif args.length > 1 && args.last.type == :block_pass && kwargs?(args[args.length - 2]) # foo(..., bar: baz, &blk) args[args.length - 2] = args[args.length - 2].updated(:kwargs) end end def kwargs?(node) node.type == :hash && node.loc.begin.nil? && node.loc.end.nil? end end end parser-3.3.4.2/lib/parser/clobbering_error.rb000066400000000000000000000005131465510415600210710ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # {Parser::ClobberingError} is raised when {Parser::Source::Rewriter} # detects a clobbering rewrite action. This class inherits {RuntimeError} # rather than {StandardError} for backward compatibility. # # @api public # class ClobberingError < RuntimeError end end parser-3.3.4.2/lib/parser/color.rb000066400000000000000000000011631465510415600166720ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Color def self.color(str, code, bold: false) return str unless STDOUT.tty? code = Array(code) code.unshift(1) if bold "\e[#{code.join(';')}m#{str}\e[0m" end def self.red(str, bold: false) color(str, 31, bold: bold) end def self.green(str, bold: false) color(str, 32, bold: bold) end def self.yellow(str, bold: false) color(str, 33, bold: bold) end def self.magenta(str, bold: false) color(str, 35, bold: bold) end def self.underline(str) color(str, 4) end end end parser-3.3.4.2/lib/parser/context.rb000066400000000000000000000023111465510415600172340ustar00rootroot00000000000000# frozen_string_literal: true module Parser # Context of parsing that is represented by a stack of scopes. # # Supported states: # + :class - in the class body (class A; end) # + :module - in the module body (module M; end) # + :sclass - in the singleton class body (class << obj; end) # + :def - in the method body (def m; end) # + :defs - in the singleton method body (def self.m; end) # + :def_open_args - in the arglist of the method definition # keep in mind that it's set **only** after reducing the first argument, # if you need to handle the first argument check `lex_state == expr_fname` # + :block - in the block body (tap {}) # + :lambda - in the lambda body (-> {}) # class Context FLAGS = %i[ in_defined in_kwarg in_argdef in_def in_class in_block in_lambda ] def initialize reset end def reset @in_defined = false @in_kwarg = false @in_argdef = false @in_def = false @in_class = false @in_block = false @in_lambda = false end attr_accessor(*FLAGS) def in_dynamic_block? in_block || in_lambda end end end parser-3.3.4.2/lib/parser/current.rb000066400000000000000000000064431465510415600172440ustar00rootroot00000000000000# frozen_string_literal: true module Parser class << self def warn_syntax_deviation(feature, version) warn "warning: parser/current is loading #{feature}, which recognizes " \ "#{version}-compliant syntax, but you are running #{RUBY_VERSION}.\n" \ "Please see https://github.com/whitequark/parser#compatibility-with-ruby-mri." end private :warn_syntax_deviation end case RUBY_VERSION when /^2\.0\./ current_version = '2.0.0' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby20', current_version end require_relative 'ruby20' CurrentRuby = Ruby20 when /^2\.1\./ current_version = '2.1.10' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby21', current_version end require_relative 'ruby21' CurrentRuby = Ruby21 when /^2\.2\./ current_version = '2.2.10' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby22', current_version end require_relative 'ruby22' CurrentRuby = Ruby22 when /^2\.3\./ current_version = '2.3.8' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby23', current_version end require_relative 'ruby23' CurrentRuby = Ruby23 when /^2\.4\./ current_version = '2.4.10' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby24', current_version end require_relative 'ruby24' CurrentRuby = Ruby24 when /^2\.5\./ current_version = '2.5.9' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby25', current_version end require_relative 'ruby25' CurrentRuby = Ruby25 when /^2\.6\./ current_version = '2.6.10' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby26', current_version end require_relative 'ruby26' CurrentRuby = Ruby26 when /^2\.7\./ current_version = '2.7.8' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby27', current_version end require_relative 'ruby27' CurrentRuby = Ruby27 when /^3\.0\./ current_version = '3.0.7' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby30', current_version end require_relative 'ruby30' CurrentRuby = Ruby30 when /^3\.1\./ current_version = '3.1.6' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby31', current_version end require_relative 'ruby31' CurrentRuby = Ruby31 when /^3\.2\./ current_version = '3.2.5' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby32', current_version end require_relative 'ruby32' CurrentRuby = Ruby32 when /^3\.3\./ current_version = '3.3.4' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby33', current_version end require_relative 'ruby33' CurrentRuby = Ruby33 when /^3\.4\./ current_version = '3.4.0' if RUBY_VERSION != current_version warn_syntax_deviation 'parser/ruby34', current_version end require_relative 'ruby34' CurrentRuby = Ruby34 else # :nocov: # Keep this in sync with released Ruby. warn_syntax_deviation 'parser/ruby33', '3.3.x' require_relative 'ruby33' CurrentRuby = Ruby33 end end parser-3.3.4.2/lib/parser/current_arg_stack.rb000066400000000000000000000013151465510415600212530ustar00rootroot00000000000000# frozen_string_literal: true module Parser # Stack that holds names of current arguments, # i.e. while parsing # def m1(a = (def m2(b = def m3(c = 1); end); end)); end # ^ # stack is [:a, :b, :c] # # Emulates `p->cur_arg` in MRI's parse.y # # @api private # class CurrentArgStack attr_reader :stack def initialize @stack = [] freeze end def empty? @stack.size == 0 end def push(value) @stack << value end def set(value) @stack[@stack.length - 1] = value end def pop @stack.pop end def reset @stack.clear end def top @stack.last end end end parser-3.3.4.2/lib/parser/deprecation.rb000066400000000000000000000003721465510415600200520ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # @api private # module Deprecation attr_writer :warned_of_deprecation def warn_of_deprecation @warned_of_deprecation ||= warn(self::DEPRECATION_WARNING) || true end end end parser-3.3.4.2/lib/parser/diagnostic.rb000066400000000000000000000103631465510415600177020ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # @api public # # @!attribute [r] level # @see LEVELS # @return [Symbol] diagnostic level # # @!attribute [r] reason # @see Parser::MESSAGES # @return [Symbol] reason for error # # @!attribute [r] arguments # @see Parser::MESSAGES # @return [Symbol] extended arguments that describe the error # # @!attribute [r] message # @return [String] error message # # @!attribute [r] location # Main error-related source range. # @return [Parser::Source::Range] # # @!attribute [r] highlights # Supplementary error-related source ranges. # @return [Array] # class Diagnostic ## # Collection of the available diagnostic levels. # # @return [Array] # LEVELS = [:note, :warning, :error, :fatal].freeze attr_reader :level, :reason, :arguments attr_reader :location, :highlights ## # @param [Symbol] level # @param [Symbol] reason # @param [Hash] arguments # @param [Parser::Source::Range] location # @param [Array] highlights # def initialize(level, reason, arguments, location, highlights=[]) unless LEVELS.include?(level) raise ArgumentError, "Diagnostic#level must be one of #{LEVELS.join(', ')}; " \ "#{level.inspect} provided." end raise 'Expected a location' unless location @level = level @reason = reason @arguments = (arguments || {}).dup.freeze @location = location @highlights = highlights.dup.freeze freeze end ## # @return [String] the rendered message. # def message Messages.compile(@reason, @arguments) end ## # Renders the diagnostic message as a clang-like diagnostic. # # @example # diagnostic.render # => # # [ # # "(fragment:0):1:5: error: unexpected token $end", # # "foo +", # # " ^" # # ] # # @return [Array] # def render if @location.line == @location.last_line || @location.is?("\n") ["#{@location}: #{@level}: #{message}"] + render_line(@location) else # multi-line diagnostic first_line = first_line_only(@location) last_line = last_line_only(@location) num_lines = (@location.last_line - @location.line) + 1 buffer = @location.source_buffer last_lineno, last_column = buffer.decompose_position(@location.end_pos) ["#{@location}-#{last_lineno}:#{last_column}: #{@level}: #{message}"] + render_line(first_line, num_lines > 2, false) + render_line(last_line, false, true) end end private ## # Renders one source line in clang diagnostic style, with highlights. # # @return [Array] # def render_line(range, ellipsis=false, range_end=false) source_line = range.source_line highlight_line = ' ' * source_line.length @highlights.each do |highlight| line_range = range.source_buffer.line_range(range.line) if highlight = highlight.intersect(line_range) highlight_line[highlight.column_range] = '~' * highlight.size end end if range.is?("\n") highlight_line += "^" else if !range_end && range.size >= 1 highlight_line[range.column_range] = '^' + '~' * (range.size - 1) else highlight_line[range.column_range] = '~' * range.size end end highlight_line += '...' if ellipsis [source_line, highlight_line]. map { |line| "#{range.source_buffer.name}:#{range.line}: #{line}" } end ## # If necessary, shrink a `Range` so as to include only the first line. # # @return [Parser::Source::Range] # def first_line_only(range) if range.line != range.last_line range.resize(range.source =~ /\n/) else range end end ## # If necessary, shrink a `Range` so as to include only the last line. # # @return [Parser::Source::Range] # def last_line_only(range) if range.line != range.last_line range.adjust(begin_pos: range.source =~ /[^\n]*\z/) else range end end end end parser-3.3.4.2/lib/parser/diagnostic/000077500000000000000000000000001465510415600173525ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/diagnostic/engine.rb000066400000000000000000000047511465510415600211530ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # {Parser::Diagnostic::Engine} provides a basic API for dealing with # diagnostics by delegating them to registered consumers. # # @example # buffer = Parser::Source::Buffer.new(__FILE__, source: 'foobar') # # consumer = lambda do |diagnostic| # puts diagnostic.message # end # # engine = Parser::Diagnostic::Engine.new(consumer) # diagnostic = Parser::Diagnostic.new( # :warning, :unexpected_token, { :token => 'abc' }, buffer, 1..2) # # engine.process(diagnostic) # => "unexpected token abc" # # @api public # # @!attribute [rw] consumer # @return [#call(Diagnostic)] # # @!attribute [rw] all_errors_are_fatal # When set to `true` any error that is encountered will result in # {Parser::SyntaxError} being raised. # @return [Boolean] # # @!attribute [rw] ignore_warnings # When set to `true` warnings will be ignored. # @return [Boolean] # class Diagnostic::Engine attr_accessor :consumer attr_accessor :all_errors_are_fatal attr_accessor :ignore_warnings ## # @param [#call(Diagnostic)] consumer # def initialize(consumer=nil) @consumer = consumer @all_errors_are_fatal = false @ignore_warnings = false end ## # Processes a `diagnostic`: # * Passes the diagnostic to the consumer, if it's not a warning when # `ignore_warnings` is set. # * After that, raises {Parser::SyntaxError} when `all_errors_are_fatal` # is set to true. # # @param [Parser::Diagnostic] diagnostic # @return [Parser::Diagnostic::Engine] # @see ignore? # @see raise? # def process(diagnostic) if ignore?(diagnostic) # do nothing elsif @consumer @consumer.call(diagnostic) end if raise?(diagnostic) raise Parser::SyntaxError, diagnostic end self end protected ## # Checks whether `diagnostic` should be ignored. # # @param [Parser::Diagnostic] diagnostic # @return [Boolean] # def ignore?(diagnostic) @ignore_warnings && diagnostic.level == :warning end ## # Checks whether `diagnostic` should be raised as an exception. # # @param [Parser::Diagnostic] diagnostic # @return [Boolean] # def raise?(diagnostic) (@all_errors_are_fatal && diagnostic.level == :error) || diagnostic.level == :fatal end end end parser-3.3.4.2/lib/parser/lexer-strings.rl000066400000000000000000000655371465510415600204130ustar00rootroot00000000000000%%machine lex; # % fix highlighting class Parser::LexerStrings %% write data nofinal; # % ESCAPES = { ?a.ord => "\a", ?b.ord => "\b", ?e.ord => "\e", ?f.ord => "\f", ?n.ord => "\n", ?r.ord => "\r", ?s.ord => "\s", ?t.ord => "\t", ?v.ord => "\v", ?\\.ord => "\\" }.freeze REGEXP_META_CHARACTERS = Regexp.union(*"\\$()*+.<>?[]^{|}".chars).freeze attr_accessor :herebody_s # Set by "main" lexer attr_accessor :source_buffer, :source_pts def initialize(lexer, version) @lexer = lexer @version = version @_lex_actions = if self.class.respond_to?(:_lex_actions, true) self.class.send :_lex_actions else [] end reset end def reset @cs = self.class.lex_en_unknown @literal_stack = [] @escape_s = nil # starting position of current sequence @escape = nil # last escaped sequence, as string @herebody_s = nil # starting position of current heredoc line # After encountering the closing line of <<~SQUIGGLY_HEREDOC, # we store the indentation level and give it out to the parser # on request. It is not possible to infer indentation level just # from the AST because escape sequences such as `\ ` or `\t` are # expanded inside the lexer, but count as non-whitespace for # indentation purposes. @dedent_level = nil end LEX_STATES = { :interp_string => lex_en_interp_string, :interp_words => lex_en_interp_words, :plain_string => lex_en_plain_string, :plain_words => lex_en_plain_string, } def advance(p) # Ugly, but dependent on Ragel output. Consider refactoring it somehow. klass = self.class _lex_trans_keys = klass.send :_lex_trans_keys _lex_key_spans = klass.send :_lex_key_spans _lex_index_offsets = klass.send :_lex_index_offsets _lex_indicies = klass.send :_lex_indicies _lex_trans_targs = klass.send :_lex_trans_targs _lex_trans_actions = klass.send :_lex_trans_actions _lex_to_state_actions = klass.send :_lex_to_state_actions _lex_from_state_actions = klass.send :_lex_from_state_actions _lex_eof_trans = klass.send :_lex_eof_trans _lex_actions = @_lex_actions pe = source_pts.size + 2 eof = pe %% write exec; # % # Ragel creates a local variable called `testEof` but it doesn't use # it in any assignment. This dead code is here to swallow the warning. # It has no runtime cost because Ruby doesn't produce any instructions from it. if false testEof end [p, @root_lexer_state] end def read_character_constant(p) @cs = self.class.lex_en_character advance(p) end # # === LITERAL STACK === # def push_literal(*args) new_literal = Parser::Lexer::Literal.new(self, *args) @literal_stack.push(new_literal) @cs = next_state_for_literal(new_literal) end def next_state_for_literal(literal) if literal.words? && literal.backslash_delimited? if literal.interpolate? self.class.lex_en_interp_backslash_delimited_words else self.class.lex_en_plain_backslash_delimited_words end elsif literal.words? && !literal.backslash_delimited? if literal.interpolate? self.class.lex_en_interp_words else self.class.lex_en_plain_words end elsif !literal.words? && literal.backslash_delimited? if literal.interpolate? self.class.lex_en_interp_backslash_delimited else self.class.lex_en_plain_backslash_delimited end else if literal.interpolate? self.class.lex_en_interp_string else self.class.lex_en_plain_string end end end def continue_lexing(current_literal) @cs = next_state_for_literal(current_literal) end def literal @literal_stack.last end def pop_literal old_literal = @literal_stack.pop @dedent_level = old_literal.dedent_level if old_literal.type == :tREGEXP_BEG @root_lexer_state = @lexer.class.lex_en_inside_string # Fetch modifiers. self.class.lex_en_regexp_modifiers else @root_lexer_state = @lexer.class.lex_en_expr_end # Do nothing, yield to main lexer nil end end def close_interp_on_current_literal(p) current_literal = literal if current_literal if current_literal.end_interp_brace_and_try_closing if version?(18, 19) emit(:tRCURLY, '}'.freeze, p - 1, p) @lexer.cond.lexpop @lexer.cmdarg.lexpop else emit(:tSTRING_DEND, '}'.freeze, p - 1, p) end if current_literal.saved_herebody_s @herebody_s = current_literal.saved_herebody_s end continue_lexing(current_literal) return true end end end def dedent_level # We erase @dedent_level as a precaution to avoid accidentally # using a stale value. dedent_level, @dedent_level = @dedent_level, nil dedent_level end # This hook is triggered by "main" lexer on every newline character def on_newline(p) # After every heredoc was parsed, @herebody_s contains the # position of next token after all heredocs. if @herebody_s p = @herebody_s @herebody_s = nil end p end protected def eof_codepoint?(point) [0x04, 0x1a, 0x00].include? point end def version?(*versions) versions.include?(@version) end def tok(s = @ts, e = @te) @source_buffer.slice(s, e - s) end def range(s = @ts, e = @te) Parser::Source::Range.new(@source_buffer, s, e) end def emit(type, value = tok, s = @ts, e = @te) @lexer.send(:emit, type, value, s, e) end def diagnostic(type, reason, arguments=nil, location=range, highlights=[]) @lexer.send(:diagnostic, type, reason, arguments, location, highlights) end def cond @lexer.cond end def emit_invalid_escapes? # always true for old Rubies return true if @version < 32 # in "?\u123" case we don't push any literals # but we always emit invalid escapes return true if literal.nil? # Ruby >= 32, regexp, exceptional case !literal.regexp? end # String escaping def extend_string_escaped current_literal = literal # Get the first character after the backslash. escaped_char = source_buffer.slice(@escape_s, 1).chr if current_literal.munge_escape? escaped_char # If this particular literal uses this character as an opening # or closing delimiter, it is an escape sequence for that # particular character. Write it without the backslash. if current_literal.regexp? && REGEXP_META_CHARACTERS.match(escaped_char) # Regular expressions should include escaped delimiters in their # escaped form, except when the escaped character is # a closing delimiter but not a regexp metacharacter. # # The backslash itself cannot be used as a closing delimiter # at the same time as an escape symbol, but it is always munged, # so this branch also executes for the non-closing-delimiter case # for the backslash. current_literal.extend_string(tok, @ts, @te) else current_literal.extend_string(escaped_char, @ts, @te) end else # It does not. So this is an actual escape sequence, yay! if current_literal.squiggly_heredoc? && escaped_char == "\n".freeze # Squiggly heredocs like # <<~-HERE # 1\ # 2 # HERE # treat '\' as a line continuation, but still dedent the body, so the heredoc above becomes "12\n". # This information is emitted as is, without escaping, # later this escape sequence (\\\n) gets handled manually in the Lexer::Dedenter current_literal.extend_string(tok, @ts, @te) elsif current_literal.supports_line_continuation_via_slash? && escaped_char == "\n".freeze # Heredocs, regexp and a few other types of literals support line # continuation via \\\n sequence. The code like # "a\ # b" # must be parsed as "ab" current_literal.extend_string(tok.gsub("\\\n".freeze, ''.freeze), @ts, @te) elsif current_literal.regexp? && @version >= 31 && %w[c C m M].include?(escaped_char) # Ruby >= 3.1 escapes \c- and \m chars, that's the only escape sequence # supported by regexes so far, so it needs a separate branch. current_literal.extend_string(@escape, @ts, @te) elsif current_literal.regexp? # Regular expressions should include escape sequences in their # escaped form. On the other hand, escaped newlines are removed (in cases like "\\C-\\\n\\M-x") current_literal.extend_string(tok.gsub("\\\n".freeze, ''.freeze), @ts, @te) else current_literal.extend_string(@escape || tok, @ts, @te) end end end def extend_interp_code(current_literal) current_literal.flush_string current_literal.extend_content emit(:tSTRING_DBEG, '#{'.freeze) if current_literal.heredoc? current_literal.saved_herebody_s = @herebody_s @herebody_s = nil end current_literal.start_interp_brace @lexer.command_start = true end def extend_interp_digit_var if @version >= 27 literal.extend_string(tok, @ts, @te) else message = tok.start_with?('#@@') ? :cvar_name : :ivar_name diagnostic :error, message, { :name => tok(@ts + 1, @te) }, range(@ts + 1, @te) end end def extend_string_eol_check_eof(current_literal, pe) if @te == pe diagnostic :fatal, :string_eof, nil, range(current_literal.str_s, current_literal.str_s + 1) end end def extend_string_eol_heredoc_line line = tok(@herebody_s, @ts).gsub(/\r+$/, ''.freeze) if version?(18, 19, 20) # See ruby:c48b4209c line = line.gsub(/\r.*$/, ''.freeze) end line end def extend_string_eol_heredoc_intertwined(p) if @herebody_s # This is a regular literal intertwined with a heredoc. Like: # # p <<-foo+"1 # bar # foo # 2" # # which, incidentally, evaluates to "bar\n1\n2". p = @herebody_s - 1 @herebody_s = nil end p end def extend_string_eol_words(current_literal, p) if current_literal.words? && !eof_codepoint?(source_pts[p]) current_literal.extend_space @ts, @te else # A literal newline is appended if the heredoc was _not_ closed # this time (see fbreak above). See also Literal#nest_and_try_closing # for rationale of calling #flush_string here. current_literal.extend_string tok, @ts, @te current_literal.flush_string end end def extend_string_slice_end(lookahead) # tLABEL_END is only possible in non-cond context on >= 2.2 if @version >= 22 && !cond.active? lookahead = source_buffer.slice(@te, 2) end lookahead end def extend_string_for_token_range(current_literal, string) current_literal.extend_string(string, @ts, @te) end def encode_escape(ord) ord.chr.force_encoding(source_buffer.source.encoding) end def unescape_char(p) codepoint = source_pts[p - 1] if @version >= 30 && (codepoint == 117 || codepoint == 85) # 'u' or 'U' diagnostic :fatal, :invalid_escape end if (@escape = ESCAPES[codepoint]).nil? @escape = encode_escape(source_buffer.slice(p - 1, 1)) end end def unicode_points(p) @escape = "" codepoints = tok(@escape_s + 2, p - 1) codepoint_s = @escape_s + 2 if @version < 24 if codepoints.start_with?(" ") || codepoints.start_with?("\t") diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s + 2, @escape_s + 3) end if spaces_p = codepoints.index(/[ \t]{2}/) diagnostic :fatal, :invalid_unicode_escape, nil, range(codepoint_s + spaces_p + 1, codepoint_s + spaces_p + 2) end if codepoints.end_with?(" ") || codepoints.end_with?("\t") diagnostic :fatal, :invalid_unicode_escape, nil, range(p - 1, p) end end codepoints.scan(/([0-9a-fA-F]+)|([ \t]+)/).each do |(codepoint_str, spaces)| if spaces codepoint_s += spaces.length else codepoint = codepoint_str.to_i(16) if codepoint >= 0x110000 diagnostic :error, :unicode_point_too_large, nil, range(codepoint_s, codepoint_s + codepoint_str.length) break end @escape += codepoint.chr(Encoding::UTF_8) codepoint_s += codepoint_str.length end end end def read_post_meta_or_ctrl_char(p) @escape = source_buffer.slice(p - 1, 1).chr if @version >= 27 && ((0..8).include?(@escape.ord) || (14..31).include?(@escape.ord)) diagnostic :fatal, :invalid_escape end end def extend_interp_var(current_literal) current_literal.flush_string current_literal.extend_content emit(:tSTRING_DVAR, nil, @ts, @ts + 1) @ts end def emit_interp_var(interp_var_kind) case interp_var_kind when :cvar @lexer.send(:emit_class_var, @ts + 1, @te) when :ivar @lexer.send(:emit_instance_var, @ts + 1, @te) when :gvar @lexer.send(:emit_global_var, @ts + 1, @te) end end def encode_escaped_char(p) @escape = encode_escape(tok(p - 2, p).to_i(16)) end def slash_c_char @escape = encode_escape(@escape[0].ord & 0x9f) end def slash_m_char @escape = encode_escape(@escape[0].ord | 0x80) end def emit_character_constant value = @escape || tok(@ts + 1) if version?(18) emit(:tINTEGER, value.getbyte(0)) else emit(:tCHARACTER, value) end end def check_ambiguous_slash(tm) if tok(tm, tm + 1) == '/'.freeze # Ambiguous regexp literal. if @version < 30 diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1) else diagnostic :warning, :ambiguous_regexp, nil, range(tm, tm + 1) end end end def check_invalid_escapes(p) if emit_invalid_escapes? diagnostic :fatal, :invalid_unicode_escape, nil, range(@escape_s - 1, p) end end ESCAPE_WHITESPACE = { " " => '\s', "\r" => '\r', "\n" => '\n', "\t" => '\t', "\v" => '\v', "\f" => '\f' } %%{ # % access @; getkey (source_pts[p] || 0); # TODO: extract into shared included lexer # # === CHARACTER CLASSES === # # Pay close attention to the differences between c_any and any. # c_any does not include EOF and so will cause incorrect behavior # for machine subtraction (any-except rules) and default transitions # for scanners. action do_nl { # Record position of a newline for precise location reporting on tNL # tokens. # # This action is embedded directly into c_nl, as it is idempotent and # there are no cases when we need to skip it. @newline_s = p } c_nl = '\n' $ do_nl; c_space = [ \t\r\f\v]; c_space_nl = c_space | c_nl; c_eof = 0x04 | 0x1a | 0 | zlen; # ^D, ^Z, \0, EOF c_eol = c_nl | c_eof; c_any = any - c_eof; c_nl_zlen = c_nl | zlen; c_line = any - c_nl_zlen; c_ascii = 0x00..0x7f; c_unicode = c_any - c_ascii; c_upper = [A-Z]; c_lower = [a-z_] | c_unicode; c_alpha = c_lower | c_upper; c_alnum = c_alpha | [0-9]; bareword = c_alpha c_alnum*; # TODO: move to shared included lexer # # Interpolated variables via "#@foo" / "#$foo" global_var = '$' ( bareword | digit+ | [`'+~*$&?!@/\\;,.=:<>"] # ` | '-' c_alnum ) ; # Ruby accepts (and fails on) variables with leading digit # in literal context, but not in unquoted symbol body. class_var_v = '@@' c_alnum+; instance_var_v = '@' c_alnum+; # # === ESCAPE SEQUENCE PARSING === # # Escape parsing code is a Ragel pattern, not a scanner, and therefore # it shouldn't directly raise errors or perform other actions with side effects. # In reality this would probably just mess up error reporting in pathological # cases, through. # The amount of code required to parse \M\C stuff correctly is ridiculous. escaped_nl = "\\" c_nl; action unicode_points { unicode_points(p) } action unescape_char { unescape_char(p) } action invalid_complex_escape { diagnostic :fatal, :invalid_escape } action read_post_meta_or_ctrl_char { read_post_meta_or_ctrl_char(p) } action slash_c_char { slash_c_char } action slash_m_char { slash_m_char } maybe_escaped_char = ( '\\' c_any %unescape_char | '\\x' xdigit{1,2} % { encode_escaped_char(p) } %slash_c_char | ( c_any - [\\] ) %read_post_meta_or_ctrl_char ); maybe_escaped_ctrl_char = ( # why?! '\\' c_any %unescape_char %slash_c_char | '?' % { @escape = "\x7f" } | '\\x' xdigit{1,2} % { encode_escaped_char(p) } %slash_c_char | ( c_any - [\\?] ) %read_post_meta_or_ctrl_char %slash_c_char ); escape = ( # \377 [0-7]{1,3} % { @escape = encode_escape(tok(@escape_s, p).to_i(8) % 0x100) } # \xff | 'x' xdigit{1,2} % { @escape = encode_escape(tok(@escape_s + 1, p).to_i(16)) } # %q[\x] | 'x' ( c_any - xdigit ) % { diagnostic :fatal, :invalid_hex_escape, nil, range(@escape_s - 1, p + 2) } # \u263a | 'u' xdigit{4} % { @escape = tok(@escape_s + 1, p).to_i(16).chr(Encoding::UTF_8) } # \u123 | 'u' xdigit{0,3} % { check_invalid_escapes(p) } # u{not hex} or u{} | 'u{' ( c_any - xdigit - [ \t}] )* '}' % { check_invalid_escapes(p) } # \u{ \t 123 \t 456 \t\t } | 'u{' [ \t]* ( xdigit{1,6} [ \t]+ )* ( ( xdigit{1,6} [ \t]* '}' %unicode_points ) | ( xdigit* ( c_any - xdigit - [ \t}] )+ '}' | ( c_any - [ \t}] )* c_eof | xdigit{7,} ) % { diagnostic :fatal, :unterminated_unicode, nil, range(p - 1, p) } ) # \C-\a \cx | ( 'C-' | 'c' ) escaped_nl? maybe_escaped_ctrl_char # \M-a | 'M-' escaped_nl? maybe_escaped_char %slash_m_char # \C-\M-f \M-\cf \c\M-f | ( ( 'C-' | 'c' ) escaped_nl? '\\M-' | 'M-\\' escaped_nl? ( 'C-' | 'c' ) ) escaped_nl? maybe_escaped_ctrl_char %slash_m_char | 'C' c_any %invalid_complex_escape | 'M' c_any %invalid_complex_escape | ( 'M-\\C' | 'C-\\M' ) c_any %invalid_complex_escape | ( c_any - [0-7xuCMc] ) %unescape_char | c_eof % { diagnostic :fatal, :escape_eof, nil, range(p - 1, p) } ); # Use rules in form of `e_bs escape' when you need to parse a sequence. e_bs = '\\' % { @escape_s = p @escape = nil }; # # === STRING AND HEREDOC PARSING === # # Heredoc parsing is quite a complex topic. First, consider that heredocs # can be arbitrarily nested. For example: # # puts < 2.7 # If interpolated instance/class variable starts with a digit we parse it as a plain substring # However, "#$1" is still a regular interpolation interp_digit_var = '#' ('@' | '@@') digit c_alpha*; action extend_interp_digit_var { extend_interp_digit_var } # Interpolations with code blocks must match nested curly braces, as # interpolation ending is ambiguous with a block ending. So, every # opening and closing brace should be matched with e_[lr]brace rules, # which automatically perform the counting. # # Note that interpolations can themselves be nested, so brace balance # is tied to the innermost literal. # # Also note that literals themselves should not use e_[lr]brace rules # when matching their opening and closing delimiters, as the amount of # braces inside the characters of a string literal is independent. interp_code = '#{'; action extend_interp_code { current_literal = literal extend_interp_code(current_literal) @root_lexer_state = @lexer.class.lex_en_expr_value; fbreak; } # Actual string parsers are simply combined from the primitives defined # above. interp_words := |* interp_code => extend_interp_code; interp_digit_var => extend_interp_digit_var; interp_var => extend_interp_var; e_bs escape => extend_string_escaped; c_space+ => extend_string_space; c_eol => extend_string_eol; c_any => extend_string; *|; interp_string := |* interp_code => extend_interp_code; interp_digit_var => extend_interp_digit_var; interp_var => extend_interp_var; e_bs escape => extend_string_escaped; c_eol => extend_string_eol; c_any => extend_string; *|; plain_words := |* e_bs c_any => extend_string_escaped; c_space+ => extend_string_space; c_eol => extend_string_eol; c_any => extend_string; *|; plain_string := |* '\\' c_nl => extend_string_eol; e_bs c_any => extend_string_escaped; c_eol => extend_string_eol; c_any => extend_string; *|; interp_backslash_delimited := |* interp_code => extend_interp_code; interp_digit_var => extend_interp_digit_var; interp_var => extend_interp_var; c_eol => extend_string_eol; c_any => extend_string; *|; plain_backslash_delimited := |* c_eol => extend_string_eol; c_any => extend_string; *|; interp_backslash_delimited_words := |* interp_code => extend_interp_code; interp_digit_var => extend_interp_digit_var; interp_var => extend_interp_var; c_space+ => extend_string_space; c_eol => extend_string_eol; c_any => extend_string; *|; plain_backslash_delimited_words := |* c_space+ => extend_string_space; c_eol => extend_string_eol; c_any => extend_string; *|; regexp_modifiers := |* [A-Za-z]+ => { unknown_options = tok.scan(/[^imxouesn]/) if unknown_options.any? diagnostic :error, :regexp_options, { :options => unknown_options.join } end emit(:tREGEXP_OPT) @root_lexer_state = @lexer.class.lex_en_expr_end; fbreak; }; any => { emit(:tREGEXP_OPT, tok(@ts, @te - 1), @ts, @te - 1) fhold; @root_lexer_state = @lexer.class.lex_en_expr_end; fbreak; }; *|; character := |* # # AMBIGUOUS TERNARY OPERATOR # # Character constant, like ?a, ?\n, ?\u1000, and so on # Don't accept \u escape with multiple codepoints, like \u{1 2 3} '?' ( e_bs ( escape - ( '\u{' (xdigit+ [ \t]+)+ xdigit+ '}' )) | (c_any - c_space_nl - e_bs) % { @escape = nil } ) => { emit_character_constant @root_lexer_state = @lexer.class.lex_en_expr_end; fbreak; }; '?' c_space_nl => { escape = ESCAPE_WHITESPACE[source_buffer.slice(@ts + 1, 1)] diagnostic :warning, :invalid_escape_use, { :escape => escape }, range p = @ts - 1 @root_lexer_state = @lexer.class.lex_en_expr_end; fbreak; }; # f ?aa : b: Disambiguate with a character literal. '?' [A-Za-z_] bareword => { p = @ts - 1 @root_lexer_state = @lexer.class.lex_en_expr_end; fbreak; }; *|; unknown := |* c_any => { raise 'bug' }; *|; }%% # % end parser-3.3.4.2/lib/parser/lexer.rl000066400000000000000000001526151465510415600167160ustar00rootroot00000000000000%%machine lex; # % fix highlighting # # === BEFORE YOU START === # # Read the Ruby Hacking Guide chapter 11, available in English at # http://whitequark.org/blog/2013/04/01/ruby-hacking-guide-ch-11-finite-state-lexer/ # # Remember two things about Ragel scanners: # # 1) Longest match wins. # # 2) If two matches have the same length, the first # in source code wins. # # General rules of making Ragel and Bison happy: # # * `p` (position) and `@te` contain the index of the character # they're pointing to ("current"), plus one. `@ts` contains the index # of the corresponding character. The code for extracting matched token is: # # @source_buffer.slice(@ts...@te) # # * If your input is `foooooooobar` and the rule is: # # 'f' 'o'+ # # the result will be: # # foooooooobar # ^ ts=0 ^ p=te=9 # # * A Ragel lexer action should not emit more than one token, unless # you know what you are doing. # # * All Ragel commands (fnext, fgoto, ...) end with a semicolon. # # * If an action emits the token and transitions to another state, use # these Ragel commands: # # emit($whatever) # fnext $next_state; fbreak; # # If you perform `fgoto` in an action which does not emit a token nor # rewinds the stream pointer, the parser's side-effectful, # context-sensitive lookahead actions will break in a hard to detect # and debug way. # # * If an action does not emit a token: # # fgoto $next_state; # # * If an action features lookbehind, i.e. matches characters with the # intent of passing them to another action: # # p = @ts - 1 # fgoto $next_state; # # or, if the lookbehind consists of a single character: # # fhold; fgoto $next_state; # # * Ragel merges actions. So, if you have `e_lparen = '(' %act` and # `c_lparen = '('` and a lexer action `e_lparen | c_lparen`, the result # _will_ invoke the action `act`. # # e_something stands for "something with **e**mbedded action". # # * EOF is explicit and is matched by `c_eof`. If you want to introspect # the state of the lexer, add this rule to the state: # # c_eof => do_eof; # # * If you proceed past EOF, the lexer will complain: # # NoMethodError: undefined method `ord' for nil:NilClass # class Parser::Lexer %% write data nofinal; # % attr_reader :source_buffer attr_accessor :diagnostics attr_accessor :static_env attr_accessor :force_utf32 attr_accessor :cond, :cmdarg, :context, :command_start attr_accessor :tokens, :comments attr_reader :paren_nest, :cmdarg_stack, :cond_stack, :lambda_stack, :version def initialize(version) @version = version @static_env = nil @context = nil @tokens = nil @comments = nil @_lex_actions = if self.class.respond_to?(:_lex_actions, true) self.class.send :_lex_actions else [] end @emit_integer = lambda { |chars, p| emit(:tINTEGER, chars); p } @emit_rational = lambda { |chars, p| emit(:tRATIONAL, Rational(chars)); p } @emit_imaginary = lambda { |chars, p| emit(:tIMAGINARY, Complex(0, chars)); p } @emit_imaginary_rational = lambda { |chars, p| emit(:tIMAGINARY, Complex(0, Rational(chars))); p } @emit_integer_re = lambda { |chars, p| emit(:tINTEGER, chars, @ts, @te - 2); p - 2 } @emit_integer_if = lambda { |chars, p| emit(:tINTEGER, chars, @ts, @te - 2); p - 2 } @emit_integer_rescue = lambda { |chars, p| emit(:tINTEGER, chars, @ts, @te - 6); p - 6 } @emit_float = lambda { |chars, p| emit(:tFLOAT, Float(chars)); p } @emit_imaginary_float = lambda { |chars, p| emit(:tIMAGINARY, Complex(0, Float(chars))); p } @emit_float_if = lambda { |chars, p| emit(:tFLOAT, Float(chars), @ts, @te - 2); p - 2 } @emit_float_rescue = lambda { |chars, p| emit(:tFLOAT, Float(chars), @ts, @te - 6); p - 6 } reset end def reset(reset_state=true) # Ragel state: if reset_state # Unit tests set state prior to resetting lexer. @cs = self.class.lex_en_line_begin @cond = StackState.new('cond') @cmdarg = StackState.new('cmdarg') @cond_stack = [] @cmdarg_stack = [] end @force_utf32 = false # Set to true by some tests @source_pts = nil # @source as a codepoint array @p = 0 # stream position (saved manually in #advance) @ts = nil # token start @te = nil # token end @act = 0 # next action @stack = [] # state stack @top = 0 # state stack top pointer # Lexer state: @token_queue = [] @eq_begin_s = nil # location of last encountered =begin @sharp_s = nil # location of last encountered # @newline_s = nil # location of last encountered newline @num_base = nil # last numeric base @num_digits_s = nil # starting position of numeric digits @num_suffix_s = nil # starting position of numeric suffix @num_xfrm = nil # numeric suffix-induced transformation # Ruby 1.9 ->() lambdas emit a distinct token if do/{ is # encountered after a matching closing parenthesis. @paren_nest = 0 @lambda_stack = [] # If the lexer is in `command state' (aka expr_value) # at the entry to #advance, it will transition to expr_cmdarg # instead of expr_arg at certain points. @command_start = true # State before =begin / =end block comment @cs_before_block_comment = self.class.lex_en_line_begin @strings = Parser::LexerStrings.new(self, @version) end def source_buffer=(source_buffer) @source_buffer = source_buffer if @source_buffer source = @source_buffer.source if source.encoding == Encoding::UTF_8 @source_pts = source.unpack('U*') else @source_pts = source.unpack('C*') end if @source_pts[0] == 0xfeff # Skip byte order mark. @p = 1 end else @source_pts = nil end @strings.source_buffer = @source_buffer @strings.source_pts = @source_pts end def encoding @source_buffer.source.encoding end LEX_STATES = { :line_begin => lex_en_line_begin, :expr_dot => lex_en_expr_dot, :expr_fname => lex_en_expr_fname, :expr_value => lex_en_expr_value, :expr_beg => lex_en_expr_beg, :expr_mid => lex_en_expr_mid, :expr_arg => lex_en_expr_arg, :expr_cmdarg => lex_en_expr_cmdarg, :expr_end => lex_en_expr_end, :expr_endarg => lex_en_expr_endarg, :expr_endfn => lex_en_expr_endfn, :expr_labelarg => lex_en_expr_labelarg, :inside_string => lex_en_inside_string } def state LEX_STATES.invert.fetch(@cs, @cs) end def state=(state) @cs = LEX_STATES.fetch(state) end def push_cmdarg @cmdarg_stack.push(@cmdarg) @cmdarg = StackState.new("cmdarg.#{@cmdarg_stack.count}") end def pop_cmdarg @cmdarg = @cmdarg_stack.pop end def push_cond @cond_stack.push(@cond) @cond = StackState.new("cond.#{@cond_stack.count}") end def pop_cond @cond = @cond_stack.pop end def dedent_level @strings.dedent_level end # Return next token: [type, value]. def advance unless @token_queue.empty? return @token_queue.shift end # Ugly, but dependent on Ragel output. Consider refactoring it somehow. klass = self.class _lex_trans_keys = klass.send :_lex_trans_keys _lex_key_spans = klass.send :_lex_key_spans _lex_index_offsets = klass.send :_lex_index_offsets _lex_indicies = klass.send :_lex_indicies _lex_trans_targs = klass.send :_lex_trans_targs _lex_trans_actions = klass.send :_lex_trans_actions _lex_to_state_actions = klass.send :_lex_to_state_actions _lex_from_state_actions = klass.send :_lex_from_state_actions _lex_eof_trans = klass.send :_lex_eof_trans _lex_actions = @_lex_actions pe = @source_pts.size + 2 p, eof = @p, pe cmd_state = @command_start @command_start = false %% write exec; # % # Ragel creates a local variable called `testEof` but it doesn't use # it in any assignment. This dead code is here to swallow the warning. # It has no runtime cost because Ruby doesn't produce any instructions from it. if false testEof end @p = p if @token_queue.any? @token_queue.shift elsif @cs == klass.lex_error [ false, [ '$error'.freeze, range(p - 1, p) ] ] else eof = @source_pts.size [ false, [ '$eof'.freeze, range(eof, eof) ] ] end end protected def version?(*versions) versions.include?(@version) end def stack_pop @top -= 1 @stack[@top] end def tok(s = @ts, e = @te) @source_buffer.slice(s, e - s) end def range(s = @ts, e = @te) Parser::Source::Range.new(@source_buffer, s, e) end def emit(type, value = tok, s = @ts, e = @te) token = [ type, [ value, range(s, e) ] ] @token_queue.push(token) @tokens.push(token) if @tokens token end def emit_table(table, s = @ts, e = @te) value = tok(s, e) emit(table[value], value, s, e) end def emit_do(do_block=false) if @cond.active? emit(:kDO_COND, 'do'.freeze) elsif @cmdarg.active? || do_block emit(:kDO_BLOCK, 'do'.freeze) else emit(:kDO, 'do'.freeze) end end def arg_or_cmdarg(cmd_state) if cmd_state self.class.lex_en_expr_cmdarg else self.class.lex_en_expr_arg end end def emit_comment(s = @ts, e = @te) if @comments @comments.push(Parser::Source::Comment.new(range(s, e))) end if @tokens @tokens.push([ :tCOMMENT, [ tok(s, e), range(s, e) ] ]) end nil end def emit_comment_from_range(p, pe) emit_comment(@sharp_s, p == pe ? p - 2 : p) end def diagnostic(type, reason, arguments=nil, location=range, highlights=[]) @diagnostics.process( Parser::Diagnostic.new(type, reason, arguments, location, highlights)) end def e_lbrace @cond.push(false); @cmdarg.push(false) current_literal = @strings.literal if current_literal current_literal.start_interp_brace end end def numeric_literal_int digits = tok(@num_digits_s, @num_suffix_s) if digits.end_with? '_'.freeze diagnostic :error, :trailing_in_number, { :character => '_'.freeze }, range(@te - 1, @te) elsif digits.empty? && @num_base == 8 && version?(18) # 1.8 did not raise an error on 0o. digits = '0'.freeze elsif digits.empty? diagnostic :error, :empty_numeric elsif @num_base == 8 && (invalid_idx = digits.index(/[89]/)) invalid_s = @num_digits_s + invalid_idx diagnostic :error, :invalid_octal, nil, range(invalid_s, invalid_s + 1) end digits end def on_newline(p) @strings.on_newline(p) end def check_ambiguous_slash(tm) if tok(tm, tm + 1) == '/'.freeze # Ambiguous regexp literal. if @version < 30 diagnostic :warning, :ambiguous_literal, nil, range(tm, tm + 1) else diagnostic :warning, :ambiguous_regexp, nil, range(tm, tm + 1) end end end def emit_global_var(ts = @ts, te = @te) if tok(ts, te) =~ /^\$([1-9][0-9]*)$/ emit(:tNTH_REF, tok(ts + 1, te).to_i, ts, te) elsif tok =~ /^\$([&`'+])$/ emit(:tBACK_REF, tok(ts, te), ts, te) else emit(:tGVAR, tok(ts, te), ts, te) end end def emit_class_var(ts = @ts, te = @te) if tok(ts, te) =~ /^@@[0-9]/ diagnostic :error, :cvar_name, { :name => tok(ts, te) } end emit(:tCVAR, tok(ts, te), ts, te) end def emit_instance_var(ts = @ts, te = @te) if tok(ts, te) =~ /^@[0-9]/ diagnostic :error, :ivar_name, { :name => tok(ts, te) } end emit(:tIVAR, tok(ts, te), ts, te) end def emit_rbrace_rparen_rbrack emit_table(PUNCTUATION) if @version < 24 @cond.lexpop @cmdarg.lexpop else @cond.pop @cmdarg.pop end end def emit_colon_with_digits(p, tm, diag_msg) if @version >= 27 diagnostic :error, diag_msg, { name: tok(tm, @te) }, range(tm, @te) else emit(:tCOLON, tok(@ts, @ts + 1), @ts, @ts + 1) p = @ts end p end def emit_singleton_class emit(:kCLASS, 'class'.freeze, @ts, @ts + 5) emit(:tLSHFT, '<<'.freeze, @te - 2, @te) end # Mapping of strings to parser tokens. PUNCTUATION = { '=' => :tEQL, '&' => :tAMPER2, '|' => :tPIPE, '!' => :tBANG, '^' => :tCARET, '+' => :tPLUS, '-' => :tMINUS, '*' => :tSTAR2, '/' => :tDIVIDE, '%' => :tPERCENT, '~' => :tTILDE, ',' => :tCOMMA, ';' => :tSEMI, '.' => :tDOT, '..' => :tDOT2, '...' => :tDOT3, '[' => :tLBRACK2, ']' => :tRBRACK, '(' => :tLPAREN2, ')' => :tRPAREN, '?' => :tEH, ':' => :tCOLON, '&&' => :tANDOP, '||' => :tOROP, '-@' => :tUMINUS, '+@' => :tUPLUS, '~@' => :tTILDE, '**' => :tPOW, '->' => :tLAMBDA, '=~' => :tMATCH, '!~' => :tNMATCH, '==' => :tEQ, '!=' => :tNEQ, '>' => :tGT, '>>' => :tRSHFT, '>=' => :tGEQ, '<' => :tLT, '<<' => :tLSHFT, '<=' => :tLEQ, '=>' => :tASSOC, '::' => :tCOLON2, '===' => :tEQQ, '<=>' => :tCMP, '[]' => :tAREF, '[]=' => :tASET, '{' => :tLCURLY, '}' => :tRCURLY, '`' => :tBACK_REF2, '!@' => :tBANG, '&.' => :tANDDOT, } PUNCTUATION_BEGIN = { '&' => :tAMPER, '*' => :tSTAR, '**' => :tDSTAR, '+' => :tUPLUS, '-' => :tUMINUS, '::' => :tCOLON3, '(' => :tLPAREN, '{' => :tLBRACE, '[' => :tLBRACK, } KEYWORDS = { 'if' => :kIF_MOD, 'unless' => :kUNLESS_MOD, 'while' => :kWHILE_MOD, 'until' => :kUNTIL_MOD, 'rescue' => :kRESCUE_MOD, 'defined?' => :kDEFINED, 'BEGIN' => :klBEGIN, 'END' => :klEND, } KEYWORDS_BEGIN = { 'if' => :kIF, 'unless' => :kUNLESS, 'while' => :kWHILE, 'until' => :kUNTIL, 'rescue' => :kRESCUE, 'defined?' => :kDEFINED, 'BEGIN' => :klBEGIN, 'END' => :klEND, } ESCAPE_WHITESPACE = { " " => '\s', "\r" => '\r', "\n" => '\n', "\t" => '\t', "\v" => '\v', "\f" => '\f' } %w(class module def undef begin end then elsif else ensure case when for break next redo retry in do return yield super self nil true false and or not alias __FILE__ __LINE__ __ENCODING__).each do |keyword| KEYWORDS_BEGIN[keyword] = KEYWORDS[keyword] = :"k#{keyword.upcase}" end %%{ # % access @; getkey (@source_pts[p] || 0); # === CHARACTER CLASSES === # # Pay close attention to the differences between c_any and any. # c_any does not include EOF and so will cause incorrect behavior # for machine subtraction (any-except rules) and default transitions # for scanners. action do_nl { # Record position of a newline for precise location reporting on tNL # tokens. # # This action is embedded directly into c_nl, as it is idempotent and # there are no cases when we need to skip it. @newline_s = p } c_nl = '\n' $ do_nl; c_space = [ \t\r\f\v]; c_space_nl = c_space | c_nl; c_eof = 0x04 | 0x1a | 0 | zlen; # ^D, ^Z, \0, EOF c_eol = c_nl | c_eof; c_any = any - c_eof; c_nl_zlen = c_nl | zlen; c_line = any - c_nl_zlen; c_ascii = 0x00..0x7f; c_unicode = c_any - c_ascii; c_upper = [A-Z]; c_lower = [a-z_] | c_unicode; c_alpha = c_lower | c_upper; c_alnum = c_alpha | [0-9]; action do_eof { # Sit at EOF indefinitely. #advance would return $eof each time. # This allows to feed the lexer more data if needed; this is only used # in tests. # # Note that this action is not embedded into e_eof like e_nl and e_bs # below. This is due to the fact that scanner state at EOF is observed # by tests, and encapsulating it in a rule would break the introspection. fhold; fbreak; } # # === TOKEN DEFINITIONS === # # All operators are punctuation. There is more to punctuation # than just operators. Operators can be overridden by user; # punctuation can not. # A list of operators which are valid in the function name context, but # have different semantics in others. operator_fname = '[]' | '[]=' | '`' | '-@' | '+@' | '~@' | '!@' ; # A list of operators which can occur within an assignment shortcut (+ → +=). operator_arithmetic = '&' | '|' | '&&' | '||' | '^' | '+' | '-' | '*' | '/' | '**' | '~' | '<<' | '>>' | '%' ; # A list of all user-definable operators not covered by groups above. operator_rest = '=~' | '!~' | '==' | '!=' | '!' | '===' | '<' | '<=' | '>' | '>=' | '<=>' | '=>' ; # Note that `{` and `}` need to be referred to as e_lbrace and e_rbrace, # as they are ambiguous with interpolation `#{}` and should be counted. # These braces are not present in punctuation lists. # A list of punctuation which has different meaning when used at the # beginning of expression. punctuation_begin = '-' | '+' | '::' | '(' | '[' | '*' | '**' | '&' ; # A list of all punctuation except punctuation_begin. punctuation_end = ',' | '=' | '->' | '(' | '[' | ']' | '::' | '?' | ':' | '.' | '..' | '...' ; # A list of keywords which have different meaning at the beginning of expression. keyword_modifier = 'if' | 'unless' | 'while' | 'until' | 'rescue' ; # A list of keywords which accept an argument-like expression, i.e. have the # same post-processing as method calls or commands. Example: `yield 1`, # `yield (1)`, `yield(1)`, are interpreted as if `yield` was a function. keyword_with_arg = 'yield' | 'super' | 'not' | 'defined?' ; # A list of keywords which accept a literal function name as an argument. keyword_with_fname = 'def' | 'undef' | 'alias' ; # A list of keywords which accept an expression after them. keyword_with_value = 'else' | 'case' | 'ensure' | 'module' | 'elsif' | 'then' | 'for' | 'in' | 'do' | 'when' | 'begin' | 'class' | 'and' | 'or' ; # A list of keywords which accept a value, and treat the keywords from # `keyword_modifier` list as modifiers. keyword_with_mid = 'rescue' | 'return' | 'break' | 'next' ; # A list of keywords which do not accept an expression after them. keyword_with_end = 'end' | 'self' | 'true' | 'false' | 'retry' | 'redo' | 'nil' | 'BEGIN' | 'END' | '__FILE__' | '__LINE__' | '__ENCODING__'; # All keywords. keyword = keyword_with_value | keyword_with_mid | keyword_with_end | keyword_with_arg | keyword_with_fname | keyword_modifier ; constant = c_upper c_alnum*; bareword = c_alpha c_alnum*; call_or_var = c_lower c_alnum*; class_var = '@@' bareword; instance_var = '@' bareword; global_var = '$' ( bareword | digit+ | [`'+~*$&?!@/\\;,.=:<>"] # ` | '-' c_alnum ) ; # Ruby accepts (and fails on) variables with leading digit # in literal context, but not in unquoted symbol body. class_var_v = '@@' c_alnum+; instance_var_v = '@' c_alnum+; label = bareword [?!]? ':'; # # === NUMERIC PARSING === # int_hex = ( xdigit+ '_' )* xdigit* '_'? ; int_dec = ( digit+ '_' )* digit* '_'? ; int_bin = ( [01]+ '_' )* [01]* '_'? ; flo_int = [1-9] [0-9]* ( '_' digit+ )* | '0'; flo_frac = '.' ( digit+ '_' )* digit+; flo_pow = [eE] [+\-]? ( digit+ '_' )* digit+; int_suffix = '' % { @num_xfrm = @emit_integer } | 'r' % { @num_xfrm = @emit_rational } | 'i' % { @num_xfrm = @emit_imaginary } | 'ri' % { @num_xfrm = @emit_imaginary_rational } | 're' % { @num_xfrm = @emit_integer_re } | 'if' % { @num_xfrm = @emit_integer_if } | 'rescue' % { @num_xfrm = @emit_integer_rescue }; flo_pow_suffix = '' % { @num_xfrm = @emit_float } | 'i' % { @num_xfrm = @emit_imaginary_float } | 'if' % { @num_xfrm = @emit_float_if }; flo_suffix = flo_pow_suffix | 'r' % { @num_xfrm = @emit_rational } | 'ri' % { @num_xfrm = @emit_imaginary_rational } | 'rescue' % { @num_xfrm = @emit_float_rescue }; # # === INTERPOLATION PARSING === # e_lbrace = '{' % { e_lbrace }; e_rbrace = '}' % { if @strings.close_interp_on_current_literal(p) fhold; fnext inside_string; fbreak; end @paren_nest -= 1 }; # # === WHITESPACE HANDLING === # # Various contexts in Ruby allow various kinds of whitespace # to be used. They are grouped to clarify the lexing machines # and ease collection of comments. # A line of code with inline #comment at end is always equivalent # to a line of code ending with just a newline, so an inline # comment is deemed equivalent to non-newline whitespace # (c_space character class). e_nl = c_nl % { p = on_newline(p) }; w_space = c_space+ | '\\' e_nl ; w_comment = '#' %{ @sharp_s = p - 1 } # The (p == pe) condition compensates for added "\0" and # the way Ragel handles EOF. c_line* %{ emit_comment_from_range(p, pe) } ; w_space_comment = w_space | w_comment ; # A newline in non-literal context always interoperates with # here document logic and can always be escaped by a backslash, # still interoperating with here document logic in the same way, # yet being invisible to anything else. # # To demonstrate: # # foo = <' %{ tm = p - 2 } | # a=>b a => b '===' %{ tm = p - 3 } # a===b a === b ; ambiguous_symbol_suffix = # actual parsed ambiguous_ident_suffix | '==>' %{ tm = p - 2 } # :a==>b :a= => b ; # Ambiguous with 1.9 hash labels. ambiguous_const_suffix = # actual parsed '::' %{ tm = p - 2 } # A::B A :: B ; # Resolving kDO/kDO_COND/kDO_BLOCK ambiguity requires embedding # @cond/@cmdarg-related code to e_lbrack, e_lparen and e_lbrace. e_lbrack = '[' % { @cond.push(false); @cmdarg.push(false) @paren_nest += 1 }; e_rbrack = ']' % { @paren_nest -= 1 }; # Ruby 1.9 lambdas require parentheses counting in order to # emit correct opening kDO/tLBRACE. e_lparen = '(' % { @cond.push(false); @cmdarg.push(false) @paren_nest += 1 if version?(18) @command_start = true end }; e_rparen = ')' % { @paren_nest -= 1 }; # Ruby is context-sensitive wrt/ local identifiers. action local_ident { emit(:tIDENTIFIER) if !@static_env.nil? && @static_env.declared?(tok) fnext expr_endfn; fbreak; elsif @version >= 32 && tok =~ /\A_[1-9]\z/ fnext expr_endfn; fbreak; else fnext *arg_or_cmdarg(cmd_state); fbreak; end } # Variable lexing code is accessed from both expressions and # string interpolation related code. # expr_variable := |* global_var => { emit_global_var fnext *stack_pop; fbreak; }; class_var_v => { emit_class_var fnext *stack_pop; fbreak; }; instance_var_v => { emit_instance_var fnext *stack_pop; fbreak; }; *|; # Literal function name in definition (e.g. `def class`). # Keywords are returned as their respective tokens; this is used # to support singleton def `def self.foo`. Global variables are # returned as `tGVAR`; this is used in global variable alias # statements `alias $a $b`. Symbols are returned verbatim; this # is used in `alias :a :"b#{foo}"` and `undef :a`. # # Transitions to `expr_endfn` afterwards. # expr_fname := |* keyword => { emit_table(KEYWORDS_BEGIN); fnext expr_endfn; fbreak; }; constant => { emit(:tCONSTANT) fnext expr_endfn; fbreak; }; bareword [?=!]? => { emit(:tIDENTIFIER) fnext expr_endfn; fbreak; }; global_var => { p = @ts - 1 fnext expr_end; fcall expr_variable; }; # If the handling was to be delegated to expr_end, # these cases would transition to something else than # expr_endfn, which is incorrect. operator_fname | operator_arithmetic | operator_rest => { emit_table(PUNCTUATION) fnext expr_endfn; fbreak; }; '::' => { fhold; fhold; fgoto expr_end; }; ':' => { fhold; fgoto expr_beg; }; '%s' (c_ascii - [A-Za-z0-9]) => { if version?(23) type, delimiter = tok[0..-2], tok[-1].chr @strings.push_literal(type, delimiter, @ts) fgoto inside_string; else p = @ts - 1 fgoto expr_end; end }; w_any; c_any => { fhold; fgoto expr_end; }; c_eof => do_eof; *|; # After literal function name in definition. Behaves like `expr_end`, # but allows a tLABEL. # # Transitions to `expr_end` afterwards. # expr_endfn := |* label ( any - ':' ) => { emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1) fhold; fnext expr_labelarg; fbreak; }; '...' => { if @version >= 31 && @context.in_argdef emit(:tBDOT3, '...'.freeze) # emit(:tNL, "\n".freeze, @te - 1, @te) fnext expr_end; fbreak; else p -= 3; fgoto expr_end; end }; w_space_comment; c_any => { fhold; fgoto expr_end; }; c_eof => do_eof; *|; # Literal function name in method call (e.g. `a.class`). # # Transitions to `expr_arg` afterwards. # expr_dot := |* constant => { emit(:tCONSTANT) fnext *arg_or_cmdarg(cmd_state); fbreak; }; call_or_var => { emit(:tIDENTIFIER) fnext *arg_or_cmdarg(cmd_state); fbreak; }; bareword ambiguous_fid_suffix => { emit(:tFID, tok(@ts, tm), @ts, tm) fnext *arg_or_cmdarg(cmd_state); p = tm - 1; fbreak; }; # See the comment in `expr_fname`. operator_fname | operator_arithmetic | operator_rest => { emit_table(PUNCTUATION) fnext expr_arg; fbreak; }; w_any; c_any => { fhold; fgoto expr_end; }; c_eof => do_eof; *|; # The previous token emitted was a `tIDENTIFIER` or `tFID`; no space # is consumed; the current expression is a command or method call. # expr_arg := |* # # COMMAND MODE SPECIFIC TOKENS # # cmd (1 + 2) # See below the rationale about expr_endarg. w_space+ e_lparen => { if version?(18) emit(:tLPAREN2, '('.freeze, @te - 1, @te) fnext expr_value; fbreak; else emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te) fnext expr_beg; fbreak; end }; # meth(1 + 2) # Regular method call. e_lparen => { emit(:tLPAREN2, '('.freeze) fnext expr_beg; fbreak; }; # meth [...] # Array argument. Compare with indexing `meth[...]`. w_space+ e_lbrack => { emit(:tLBRACK, '['.freeze, @te - 1, @te) fnext expr_beg; fbreak; }; # cmd {} # Command: method call without parentheses. w_space* e_lbrace => { if @lambda_stack.last == @paren_nest @lambda_stack.pop emit(:tLAMBEG, '{'.freeze, @te - 1, @te) else emit(:tLCURLY, '{'.freeze, @te - 1, @te) end @command_start = true @paren_nest += 1 fnext expr_value; fbreak; }; # # AMBIGUOUS TOKENS RESOLVED VIA EXPR_BEG # # a?? # Ternary operator '?' c_space_nl => { # Unlike expr_beg as invoked in the next rule, do not warn p = @ts - 1 fgoto expr_end; }; # a ?b, a? ? # Character literal or ternary operator w_space* '?' => { fhold; fgoto expr_beg; }; # a %{1}, a %[1] (but not "a %=1=" or "a % foo") # a /foo/ (but not "a / foo" or "a /=foo") # a < { check_ambiguous_slash(tm) p = tm - 1 fgoto expr_beg; }; # x *1 # Ambiguous splat, kwsplat or block-pass. w_space+ %{ tm = p } ( '+' | '-' | '*' | '&' | '**' ) => { diagnostic :warning, :ambiguous_prefix, { :prefix => tok(tm, @te) }, range(tm, @te) p = tm - 1 fgoto expr_beg; }; # x ::Foo # Ambiguous toplevel constant access. w_space+ '::' => { fhold; fhold; fgoto expr_beg; }; # x:b # Symbol. w_space* ':' => { fhold; fgoto expr_beg; }; w_space+ label => { p = @ts - 1; fgoto expr_beg; }; # # AMBIGUOUS TOKENS RESOLVED VIA EXPR_END # # a ? b # Ternary operator. w_space+ %{ tm = p } '?' c_space_nl => { p = tm - 1; fgoto expr_end; }; # x + 1: Binary operator or operator-assignment. w_space* operator_arithmetic ( '=' | c_space_nl )? | # x rescue y: Modifier keyword. w_space* keyword_modifier | # a &. b: Safe navigation operator. w_space* '&.' | # Miscellanea. w_space* punctuation_end => { p = @ts - 1 fgoto expr_end; }; w_space; w_comment => { fgoto expr_end; }; w_newline => { fhold; fgoto expr_end; }; c_any => { fhold; fgoto expr_beg; }; c_eof => do_eof; *|; # The previous token was an identifier which was seen while in the # command mode (that is, the state at the beginning of #advance was # expr_value). This state is very similar to expr_arg, but disambiguates # two very rare and specific condition: # * In 1.8 mode, "foo (lambda do end)". # * In 1.9+ mode, "f x: -> do foo do end end". expr_cmdarg := |* w_space+ e_lparen => { emit(:tLPAREN_ARG, '('.freeze, @te - 1, @te) if version?(18) fnext expr_value; fbreak; else fnext expr_beg; fbreak; end }; w_space* 'do' => { if @cond.active? emit(:kDO_COND, 'do'.freeze, @te - 2, @te) else emit(:kDO, 'do'.freeze, @te - 2, @te) end fnext expr_value; fbreak; }; c_any | # Disambiguate with the `do' rule above. w_space* bareword | w_space* label => { p = @ts - 1 fgoto expr_arg; }; c_eof => do_eof; *|; # The rationale for this state is pretty complex. Normally, if an argument # is passed to a command and then there is a block (tLCURLY...tRCURLY), # the block is attached to the innermost argument (`f` in `m f {}`), or it # is a parse error (`m 1 {}`). But there is a special case for passing a single # primary expression grouped with parentheses: if you write `m (1) {}` or # (2.0 only) `m () {}`, then the block is attached to `m`. # # Thus, we recognize the opening `(` of a command (remember, a command is # a method call without parens) as a tLPAREN_ARG; then, in parser, we recognize # `tLPAREN_ARG expr rparen` as a `primary_expr` and before rparen, set the # lexer's state to `expr_endarg`, which makes it emit the possibly following # `{` as `tLBRACE_ARG`. # # The default post-`expr_endarg` state is `expr_end`, so this state also handles # `do` (as `kDO_BLOCK` in `expr_beg`). expr_endarg := |* e_lbrace => { if @lambda_stack.last == @paren_nest @lambda_stack.pop emit(:tLAMBEG, '{'.freeze) else emit(:tLBRACE_ARG, '{'.freeze) end @paren_nest += 1 @command_start = true fnext expr_value; fbreak; }; 'do' => { emit_do(true) fnext expr_value; fbreak; }; w_space_comment; c_any => { fhold; fgoto expr_end; }; c_eof => do_eof; *|; # The rationale for this state is that several keywords accept value # (i.e. should transition to `expr_beg`), do not accept it like a command # (i.e. not an `expr_arg`), and must behave like a statement, that is, # accept a modifier if/while/etc. # expr_mid := |* keyword_modifier => { emit_table(KEYWORDS) fnext expr_beg; fbreak; }; bareword => { p = @ts - 1; fgoto expr_beg; }; w_space_comment; w_newline => { fhold; fgoto expr_end; }; c_any => { fhold; fgoto expr_beg; }; c_eof => do_eof; *|; # Beginning of an expression. # # Don't fallthrough to this state from `c_any`; make sure to handle # `c_space* c_nl` and let `expr_end` handle the newline. # Otherwise code like `f\ndef x` gets glued together and the parser # explodes. # expr_beg := |* # +5, -5, - 5 [+\-] w_any* [0-9] => { emit(:tUNARY_NUM, tok(@ts, @ts + 1), @ts, @ts + 1) fhold; fnext expr_end; fbreak; }; # splat *a '*' => { emit(:tSTAR, '*'.freeze) fbreak; }; # # STRING AND REGEXP LITERALS # # /regexp/oui # /=/ (disambiguation with /=) '/' c_any => { type = delimiter = tok[0].chr @strings.push_literal(type, delimiter, @ts) fhold; fgoto inside_string; }; # % '%' ( c_ascii - [A-Za-z0-9] ) => { type, delimiter = @source_buffer.slice(@ts, 1).chr, tok[-1].chr @strings.push_literal(type, delimiter, @ts) fgoto inside_string; }; # %w(we are the people) '%' [A-Za-z] (c_ascii - [A-Za-z0-9]) => { type, delimiter = tok[0..-2], tok[-1].chr @strings.push_literal(type, delimiter, @ts) fgoto inside_string; }; '%' c_eof => { diagnostic :fatal, :string_eof, nil, range(@ts, @ts + 1) }; # Heredoc start. # < { tok(@ts, heredoc_e) =~ /^<<(-?)(~?)(["'`]?)(.*)\3$/m indent = !$1.empty? || !$2.empty? dedent_body = !$2.empty? type = $3.empty? ? '<<"'.freeze : ('<<'.freeze + $3) delimiter = $4 if @version >= 27 if delimiter.count("\n") > 0 || delimiter.count("\r") > 0 diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1) end elsif @version >= 24 if delimiter.count("\n") > 0 if delimiter.end_with?("\n") diagnostic :warning, :heredoc_id_ends_with_nl, nil, range(@ts, @ts + 1) delimiter = delimiter.rstrip else diagnostic :fatal, :heredoc_id_has_newline, nil, range(@ts, @ts + 1) end end end if dedent_body && version?(18, 19, 20, 21, 22) emit(:tLSHFT, '<<'.freeze, @ts, @ts + 2) p = @ts + 1 fnext expr_beg; fbreak; else @strings.push_literal(type, delimiter, @ts, heredoc_e, indent, dedent_body); @strings.herebody_s ||= new_herebody_s p = @strings.herebody_s - 1 fnext inside_string; end }; # Escaped unterminated heredoc start # <<'END | <<"END | <<`END | # <<-'END | <<-"END | <<-`END | # <<~'END | <<~"END | <<~`END # # If the heredoc is terminated the rule above should handle it '<<' [~\-]? ('"' (any - c_nl - '"')* |"'" (any - c_nl - "'")* |"`" (any - c_nl - "`") ) => { diagnostic :error, :unterminated_heredoc_id, nil, range(@ts, @ts + 1) }; # # SYMBOL LITERALS # # :&&, :|| ':' ('&&' | '||') => { fhold; fhold; emit(:tSYMBEG, tok(@ts, @ts + 1), @ts, @ts + 1) fgoto expr_fname; }; # :"bar", :'baz' ':' ['"] # ' => { type, delimiter = tok, tok[-1].chr @strings.push_literal(type, delimiter, @ts); fgoto inside_string; }; # :!@ is :! # :~@ is :~ ':' [!~] '@' => { emit(:tSYMBOL, tok(@ts + 1, @ts + 2)) fnext expr_end; fbreak; }; ':' bareword ambiguous_symbol_suffix => { emit(:tSYMBOL, tok(@ts + 1, tm), @ts, tm) p = tm - 1 fnext expr_end; fbreak; }; ':' ( bareword | global_var | class_var | instance_var | operator_fname | operator_arithmetic | operator_rest ) => { gvar_name = tok(@ts + 1) if @version >= 33 && gvar_name.start_with?('$0') && gvar_name.length > 2 diagnostic :error, :gvar_name, { :name => gvar_name }, range(@ts + 1, @te) end emit(:tSYMBOL, gvar_name, @ts) fnext expr_end; fbreak; }; ':' ( '@' %{ tm = p - 1; diag_msg = :ivar_name } | '@@' %{ tm = p - 2; diag_msg = :cvar_name } ) [0-9]* => { emit_colon_with_digits(p, tm, diag_msg) fnext expr_end; fbreak; }; # # AMBIGUOUS TERNARY OPERATOR # # Character constant, like ?a, ?\n, ?\u1000, and so on # Don't accept \u escape with multiple codepoints, like \u{1 2 3} '?' c_any => { p, next_state = @strings.read_character_constant(@ts) fhold; # Ragel will do `p += 1` to consume input, prevent it # If strings lexer founds a character constant (?a) emit it, # otherwise read ternary operator if @token_queue.empty? fgoto *next_state; else fnext *next_state; fbreak; end }; '?' c_eof => { diagnostic :fatal, :incomplete_escape, nil, range(@ts, @ts + 1) }; # # AMBIGUOUS EMPTY BLOCK ARGUMENTS # # Ruby >= 2.7 emits it as two tPIPE terminals # while Ruby < 2.7 as a single tOROP (like in `a || b`) '||' => { if @version >= 27 emit(:tPIPE, tok(@ts, @ts + 1), @ts, @ts + 1) fhold; fnext expr_beg; fbreak; else p -= 2 fgoto expr_end; end }; # # KEYWORDS AND PUNCTUATION # # a({b=>c}) e_lbrace => { if @lambda_stack.last == @paren_nest @lambda_stack.pop @command_start = true emit(:tLAMBEG, '{'.freeze) else emit(:tLBRACE, '{'.freeze) end @paren_nest += 1 fbreak; }; # a([1, 2]) e_lbrack => { emit(:tLBRACK, '['.freeze) fbreak; }; # a() e_lparen => { emit(:tLPAREN, '('.freeze) fbreak; }; # a(+b) punctuation_begin => { emit_table(PUNCTUATION_BEGIN) fbreak; }; # rescue Exception => e: Block rescue. # Special because it should transition to expr_mid. 'rescue' %{ tm = p } '=>'? => { emit(:kRESCUE, 'rescue'.freeze, @ts, tm) p = tm - 1 fnext expr_mid; fbreak; }; # if a: Statement if. keyword_modifier => { emit_table(KEYWORDS_BEGIN) @command_start = true fnext expr_value; fbreak; }; # # RUBY 1.9 HASH LABELS # label ( any - ':' ) => { fhold; if version?(18) ident = tok(@ts, @te - 2) emit((@source_buffer.slice(@ts, 1) =~ /[A-Z]/) ? :tCONSTANT : :tIDENTIFIER, ident, @ts, @te - 2) fhold; # continue as a symbol if !@static_env.nil? && @static_env.declared?(ident) fnext expr_end; else fnext *arg_or_cmdarg(cmd_state); end else emit(:tLABEL, tok(@ts, @te - 2), @ts, @te - 1) fnext expr_labelarg; end fbreak; }; # # RUBY 2.7 BEGINLESS RANGE '..' => { if @version >= 27 emit(:tBDOT2) else emit(:tDOT2) end fnext expr_beg; fbreak; }; '...' c_nl? => { # Here we scan and conditionally emit "\n": # + if it's there # + and emitted we do nothing # + and not emitted we return `p` to "\n" to process it on the next scan # + if it's not there we do nothing followed_by_nl = @te - 1 == @newline_s nl_emitted = false dots_te = followed_by_nl ? @te - 1 : @te if @version >= 30 if @lambda_stack.any? && @lambda_stack.last + 1 == @paren_nest # To reject `->(...)` like `->...` emit(:tDOT3, '...'.freeze, @ts, dots_te) else emit(:tBDOT3, '...'.freeze, @ts, dots_te) if @version >= 31 && followed_by_nl && @context.in_argdef emit(:tNL, @te - 1, @te) nl_emitted = true end end elsif @version >= 27 emit(:tBDOT3, '...'.freeze, @ts, dots_te) else emit(:tDOT3, '...'.freeze, @ts, dots_te) end if followed_by_nl && !nl_emitted # return "\n" to process it on the next scan fhold; end fnext expr_beg; fbreak; }; # # CONTEXT-DEPENDENT VARIABLE LOOKUP OR COMMAND INVOCATION # # foo= bar: Disambiguate with bareword rule below. bareword ambiguous_ident_suffix | # def foo: Disambiguate with bareword rule below. keyword => { p = @ts - 1 fgoto expr_end; }; # a = 42; a [42]: Indexing. # def a; end; a [42]: Array argument. call_or_var => local_ident; (call_or_var - keyword) % { ident_tok = tok; ident_ts = @ts; ident_te = @te; } w_space+ '(' => { emit(:tIDENTIFIER, ident_tok, ident_ts, ident_te) p = ident_te - 1 if !@static_env.nil? && @static_env.declared?(ident_tok) && @version < 25 fnext expr_endfn; else fnext expr_cmdarg; end fbreak; }; # # WHITESPACE # w_any; e_nl '=begin' ( c_space | c_nl_zlen ) => { p = @ts - 1 @cs_before_block_comment = @cs fgoto line_begin; }; # # DEFAULT TRANSITION # # The following rules match most binary and all unary operators. # Rules for binary operators provide better error reporting. operator_arithmetic '=' | operator_rest | punctuation_end | c_any => { p = @ts - 1; fgoto expr_end; }; c_eof => do_eof; *|; # Special newline handling for "def a b:" # expr_labelarg := |* w_space_comment; w_newline => { if @context.in_kwarg fhold; fgoto expr_end; else fgoto line_begin; end }; c_any => { fhold; fgoto expr_beg; }; c_eof => do_eof; *|; # Like expr_beg, but no 1.9 label or 2.2 quoted label possible. # expr_value := |* # a:b: a(:b), a::B, A::B label (any - ':') => { p = @ts - 1 fgoto expr_end; }; # "bar", 'baz' ['"] # ' => { @strings.push_literal(tok, tok, @ts) fgoto inside_string; }; w_space_comment; w_newline => { fgoto line_begin; }; c_any => { fhold; fgoto expr_beg; }; c_eof => do_eof; *|; expr_end := |* # # STABBY LAMBDA # '->' => { emit(:tLAMBDA, '->'.freeze, @ts, @ts + 2) @lambda_stack.push @paren_nest fnext expr_endfn; fbreak; }; e_lbrace | 'do' => { if @lambda_stack.last == @paren_nest @lambda_stack.pop if tok == '{'.freeze emit(:tLAMBEG, '{'.freeze) else # 'do' emit(:kDO_LAMBDA, 'do'.freeze) end else if tok == '{'.freeze emit(:tLCURLY, '{'.freeze) else # 'do' emit_do end end if tok == '{'.freeze @paren_nest += 1 end @command_start = true fnext expr_value; fbreak; }; # # KEYWORDS # keyword_with_fname => { emit_table(KEYWORDS) fnext expr_fname; fbreak; }; 'class' w_any* '<<' => { emit_singleton_class fnext expr_value; fbreak; }; # a if b:c: Syntax error. keyword_modifier => { emit_table(KEYWORDS) fnext expr_beg; fbreak; }; # elsif b:c: elsif b(:c) keyword_with_value => { emit_table(KEYWORDS) @command_start = true fnext expr_value; fbreak; }; keyword_with_mid => { emit_table(KEYWORDS) fnext expr_mid; fbreak; }; keyword_with_arg => { emit_table(KEYWORDS) if version?(18) && tok == 'not'.freeze fnext expr_beg; fbreak; else fnext expr_arg; fbreak; end }; '__ENCODING__' => { if version?(18) emit(:tIDENTIFIER) unless !@static_env.nil? && @static_env.declared?(tok) fnext *arg_or_cmdarg(cmd_state); end else emit(:k__ENCODING__, '__ENCODING__'.freeze) end fbreak; }; keyword_with_end => { emit_table(KEYWORDS) fbreak; }; # # NUMERIC LITERALS # ( '0' [Xx] %{ @num_base = 16; @num_digits_s = p } int_hex | '0' [Dd] %{ @num_base = 10; @num_digits_s = p } int_dec | '0' [Oo] %{ @num_base = 8; @num_digits_s = p } int_dec | '0' [Bb] %{ @num_base = 2; @num_digits_s = p } int_bin | [1-9] digit* '_'? %{ @num_base = 10; @num_digits_s = @ts } int_dec | '0' digit* '_'? %{ @num_base = 8; @num_digits_s = @ts } int_dec ) %{ @num_suffix_s = p } int_suffix => { digits = numeric_literal_int if version?(18, 19, 20) emit(:tINTEGER, digits.to_i(@num_base), @ts, @num_suffix_s) p = @num_suffix_s - 1 else p = @num_xfrm.call(digits.to_i(@num_base), p) end fbreak; }; flo_frac flo_pow? => { diagnostic :error, :no_dot_digit_literal }; flo_int [eE] => { if version?(18, 19, 20) diagnostic :error, :trailing_in_number, { :character => tok(@te - 1, @te) }, range(@te - 1, @te) else emit(:tINTEGER, tok(@ts, @te - 1).to_i, @ts, @te - 1) fhold; fbreak; end }; flo_int flo_frac [eE] => { if version?(18, 19, 20) diagnostic :error, :trailing_in_number, { :character => tok(@te - 1, @te) }, range(@te - 1, @te) else emit(:tFLOAT, tok(@ts, @te - 1).to_f, @ts, @te - 1) fhold; fbreak; end }; flo_int ( flo_frac? flo_pow %{ @num_suffix_s = p } flo_pow_suffix | flo_frac %{ @num_suffix_s = p } flo_suffix ) => { digits = tok(@ts, @num_suffix_s) if version?(18, 19, 20) emit(:tFLOAT, Float(digits), @ts, @num_suffix_s) p = @num_suffix_s - 1 else p = @num_xfrm.call(digits, p) end fbreak; }; # # STRING AND XSTRING LITERALS # # `echo foo`, "bar", 'baz' '`' | ['"] # ' => { type, delimiter = tok, tok[-1].chr @strings.push_literal(type, delimiter, @ts, nil, false, false, true); fgoto inside_string; }; # # CONSTANTS AND VARIABLES # constant => { emit(:tCONSTANT) fnext *arg_or_cmdarg(cmd_state); fbreak; }; constant ambiguous_const_suffix => { emit(:tCONSTANT, tok(@ts, tm), @ts, tm) p = tm - 1; fbreak; }; global_var | class_var_v | instance_var_v => { p = @ts - 1; fcall expr_variable; }; # # METHOD CALLS # '.' | '&.' | '::' => { emit_table(PUNCTUATION) fnext expr_dot; fbreak; }; call_or_var => local_ident; bareword ambiguous_fid_suffix => { if tm == @te # Suffix was consumed, e.g. foo! emit(:tFID) else # Suffix was not consumed, e.g. foo!= emit(:tIDENTIFIER, tok(@ts, tm), @ts, tm) p = tm - 1 end fnext expr_arg; fbreak; }; # # OPERATORS # '*' | '=>' => { emit_table(PUNCTUATION) fnext expr_value; fbreak; }; # When '|', '~', '!', '=>' are used as operators # they do not accept any symbols (or quoted labels) after. # Other binary operators accept it. ( operator_arithmetic | operator_rest ) - ( '|' | '~' | '!' | '*' ) => { emit_table(PUNCTUATION); fnext expr_value; fbreak; }; ( e_lparen | '|' | '~' | '!' ) => { emit_table(PUNCTUATION) fnext expr_beg; fbreak; }; e_rbrace | e_rparen | e_rbrack => { emit_rbrace_rparen_rbrack if tok == '}'.freeze || tok == ']'.freeze if @version >= 25 fnext expr_end; else fnext expr_endarg; end else # ) # fnext expr_endfn; ? end fbreak; }; operator_arithmetic '=' => { emit(:tOP_ASGN, tok(@ts, @te - 1)) fnext expr_beg; fbreak; }; '?' => { emit(:tEH, '?'.freeze) fnext expr_value; fbreak; }; e_lbrack => { emit(:tLBRACK2, '['.freeze) fnext expr_beg; fbreak; }; '...' c_nl => { if @paren_nest == 0 diagnostic :warning, :triple_dot_at_eol, nil, range(@ts, @te - 1) end emit(:tDOT3, '...'.freeze, @ts, @te - 1) fhold; fnext expr_beg; fbreak; }; punctuation_end => { emit_table(PUNCTUATION) fnext expr_beg; fbreak; }; # # WHITESPACE # w_space_comment; w_newline => { fgoto leading_dot; }; ';' => { emit(:tSEMI, ';'.freeze) @command_start = true fnext expr_value; fbreak; }; '\\' c_line { diagnostic :error, :bare_backslash, nil, range(@ts, @ts + 1) fhold; }; c_any => { diagnostic :fatal, :unexpected, { :character => tok.inspect[1..-2] } }; c_eof => do_eof; *|; leading_dot := |* # Insane leading dots: # a #comment # # post-2.7 comment # .b: a.b # Here we use '\n' instead of w_newline to not modify @newline_s # and eventually properly emit tNL (c_space* w_space_comment '\n')+ => { if @version < 27 # Ruby before 2.7 doesn't support comments before leading dot. # If a line after "a" starts with a comment then "a" is a self-contained statement. # So in that case we emit a special tNL token and start reading the # next line as a separate statement. # # Note: block comments before leading dot are not supported on any version of Ruby. emit(:tNL, nil, @newline_s, @newline_s + 1) fhold; fnext line_begin; fbreak; end }; c_space* '..' => { emit(:tNL, nil, @newline_s, @newline_s + 1) if @version < 27 fhold; fnext line_begin; fbreak; else emit(:tBDOT2) fnext expr_beg; fbreak; end }; c_space* '...' => { emit(:tNL, nil, @newline_s, @newline_s + 1) if @version < 27 fhold; fnext line_begin; fbreak; else emit(:tBDOT3) fnext expr_beg; fbreak; end }; c_space* %{ tm = p } ('.' | '&.') => { p = tm - 1; fgoto expr_end; }; any => { emit(:tNL, nil, @newline_s, @newline_s + 1) fhold; fnext line_begin; fbreak; }; *|; # # === EMBEDDED DOCUMENT (aka BLOCK COMMENT) PARSING === # line_comment := |* '=end' c_line* c_nl_zlen => { emit_comment(@eq_begin_s, @te) fgoto *@cs_before_block_comment; }; c_line* c_nl; c_line* zlen => { diagnostic :fatal, :embedded_document, nil, range(@eq_begin_s, @eq_begin_s + '=begin'.length) }; *|; line_begin := |* w_any; '=begin' ( c_space | c_nl_zlen ) => { @eq_begin_s = @ts fgoto line_comment; }; '__END__' ( c_eol - zlen ) => { p = pe - 3 }; c_any => { cmd_state = true; fhold; fgoto expr_value; }; c_eof => do_eof; *|; inside_string := |* any => { p, next_state = @strings.advance(p) fhold; # Ragel will do `p += 1` to consume input, prevent it fnext *next_state; fbreak; }; *|; }%% # % end parser-3.3.4.2/lib/parser/lexer/000077500000000000000000000000001465510415600163455ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/lexer/dedenter.rb000066400000000000000000000043621465510415600204710ustar00rootroot00000000000000# frozen_string_literal: true module Parser class Lexer::Dedenter # Tab (\t) counts as 8 spaces TAB_WIDTH = 8 def initialize(dedent_level) @dedent_level = dedent_level @at_line_begin = true @indent_level = 0 end # For a heredoc like # <<-HERE # a # b # HERE # this method gets called with " a\n" and " b\n" # # However, the following heredoc: # # <<-HERE # a\ # b # HERE # calls this method only once with a string " a\\\n b\n" # # This is important because technically it's a single line, # but it has to be concatenated __after__ dedenting. # # It has no effect for non-squiggly heredocs, i.e. it simply removes "\\\n" # Of course, lexer could do it but once again: it's all because of dedenting. # def dedent(string) original_encoding = string.encoding # Prevent the following error when processing binary encoded source. # "\xC0".split # => ArgumentError (invalid byte sequence in UTF-8) lines = string.force_encoding(Encoding::BINARY).split("\\\n") if lines.length == 1 # If the line continuation sequence was found but there is no second # line, it was not really a line continuation and must be ignored. lines = [string.force_encoding(original_encoding)] else lines.map! {|s| s.force_encoding(original_encoding) } end if @at_line_begin lines_to_dedent = lines else _first, *lines_to_dedent = lines end lines_to_dedent.each do |line| left_to_remove = @dedent_level remove = 0 line.each_char do |char| break if left_to_remove <= 0 case char when ?\s remove += 1 left_to_remove -= 1 when ?\t break if TAB_WIDTH * (remove / TAB_WIDTH + 1) > @dedent_level remove += 1 left_to_remove -= TAB_WIDTH else # no more spaces or tabs break end end line.slice!(0, remove) end string.replace(lines.join) @at_line_begin = string.end_with?("\n") end def interrupt @at_line_begin = false end end end parser-3.3.4.2/lib/parser/lexer/explanation.rb000066400000000000000000000026061465510415600212200ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Lexer::Explanation def self.included(klass) klass.class_exec do alias_method :state_before_explanation=, :state= alias_method :advance_before_explanation, :advance remove_method :state=, :advance end end # Like #advance, but also pretty-print the token and its position # in the stream to `stdout`. def advance type, (val, range) = advance_before_explanation more = "(in-kwarg)" if @context.in_kwarg puts decorate(range, Color.green("#{type} #{val.inspect}"), "#{state.to_s.ljust(12)} #{@cond} #{@cmdarg} #{more}") [ type, [val, range] ] end def state=(new_state) puts " #{Color.yellow(">>> STATE SET <<<", bold: true)} " + "#{new_state.to_s.ljust(12)} #{@cond} #{@cmdarg}".rjust(66) self.state_before_explanation = new_state end private def decorate(range, token, info) from, to = range.begin.column, range.end.column line = range.source_line + ' ' line[from...to] = Color.underline(line[from...to]) tail_len = to - from - 1 tail = '~' * (tail_len >= 0 ? tail_len : 0) decoration = "#{" " * from}#{Color.red("^#{tail}", bold: true)} #{token} ". ljust(68) + info [ line, decoration ] end end end parser-3.3.4.2/lib/parser/lexer/literal.rb000066400000000000000000000155561465510415600203420ustar00rootroot00000000000000# encoding: binary # frozen_string_literal: true module Parser class Lexer::Literal DELIMITERS = { '(' => ')', '[' => ']', '{' => '}', '<' => '>' } SPACE = ' '.ord TAB = "\t".ord TYPES = { # type start token interpolate? "'" => [ :tSTRING_BEG, false ], "<<'" => [ :tSTRING_BEG, false ], '%q' => [ :tSTRING_BEG, false ], '"' => [ :tSTRING_BEG, true ], '<<"' => [ :tSTRING_BEG, true ], '%' => [ :tSTRING_BEG, true ], '%Q' => [ :tSTRING_BEG, true ], '%w' => [ :tQWORDS_BEG, false ], '%W' => [ :tWORDS_BEG, true ], '%i' => [ :tQSYMBOLS_BEG, false ], '%I' => [ :tSYMBOLS_BEG, true ], ":'" => [ :tSYMBEG, false ], '%s' => [ :tSYMBEG, false ], ':"' => [ :tSYMBEG, true ], '/' => [ :tREGEXP_BEG, true ], '%r' => [ :tREGEXP_BEG, true ], '%x' => [ :tXSTRING_BEG, true ], '`' => [ :tXSTRING_BEG, true ], '<<`' => [ :tXSTRING_BEG, true ], } attr_reader :heredoc_e, :str_s, :dedent_level attr_accessor :saved_herebody_s def initialize(lexer, str_type, delimiter, str_s, heredoc_e = nil, indent = false, dedent_body = false, label_allowed = false) @lexer = lexer @nesting = 1 # DELIMITERS and TYPES are hashes with keys encoded in binary. # Coerce incoming data to the same encoding. str_type = coerce_encoding(str_type) delimiter = coerce_encoding(delimiter) unless TYPES.include?(str_type) lexer.send(:diagnostic, :error, :unexpected_percent_str, { :type => str_type }, @lexer.send(:range, str_s, str_s + 2)) end # String type. For :'foo', it is :' @str_type = str_type # Start of the string type specifier. @str_s = str_s @start_tok, @interpolate = TYPES[str_type] @start_delim = DELIMITERS.include?(delimiter) ? delimiter : nil @end_delim = DELIMITERS.fetch(delimiter, delimiter) @heredoc_e = heredoc_e @indent = indent @label_allowed = label_allowed @dedent_body = dedent_body @dedent_level = nil @interp_braces = 0 @space_emitted = true # Monolithic strings are glued into a single token, e.g. # tSTRING_BEG tSTRING_CONTENT tSTRING_END -> tSTRING. @monolithic = (@start_tok == :tSTRING_BEG && %w(' ").include?(str_type) && !heredoc?) # Capture opening delimiter in percent-literals. @str_type += delimiter if @str_type.start_with?('%'.freeze) clear_buffer emit_start_tok unless @monolithic end def interpolate? @interpolate end def words? type == :tWORDS_BEG || type == :tQWORDS_BEG || type == :tSYMBOLS_BEG || type == :tQSYMBOLS_BEG end def regexp? type == :tREGEXP_BEG end def heredoc? !!@heredoc_e end def plain_heredoc? heredoc? && !@dedent_body end def squiggly_heredoc? heredoc? && @dedent_body end def backslash_delimited? @end_delim == '\\'.freeze end def type @start_tok end def munge_escape?(character) character = coerce_encoding(character) if words? && character =~ /[ \t\v\r\f\n]/ true else ['\\'.freeze, @start_delim, @end_delim].include?(character) end end def nest_and_try_closing(delimiter, ts, te, lookahead=nil) delimiter = coerce_encoding(delimiter) if @start_delim && @start_delim == delimiter @nesting += 1 elsif delimiter?(delimiter) @nesting -= 1 end # Finalize if last matching delimiter is closed. if @nesting == 0 if words? extend_space(ts, ts) end if lookahead && @label_allowed && lookahead[0] == ?: && lookahead[1] != ?: && @start_tok == :tSTRING_BEG # This is a quoted label. flush_string emit(:tLABEL_END, @end_delim, ts, te + 1) elsif @monolithic # Emit the string as a single token. emit(:tSTRING, @buffer, @str_s, te) else # If this is a heredoc, @buffer contains the sentinel now. # Just throw it out. Lexer flushes the heredoc after each # non-heredoc-terminating \n anyway, so no data will be lost. flush_string unless heredoc? emit(:tSTRING_END, @end_delim, ts, te) end end end def infer_indent_level(line) return if !@dedent_body indent_level = 0 line.each_char do |char| case char when ?\s indent_level += 1 when ?\t indent_level += (8 - indent_level % 8) else if @dedent_level.nil? || @dedent_level > indent_level @dedent_level = indent_level end break end end end def start_interp_brace @interp_braces += 1 end def end_interp_brace_and_try_closing @interp_braces -= 1 (@interp_braces == 0) end def extend_string(string, ts, te) @buffer_s ||= ts @buffer_e = te @buffer << string end def flush_string if @monolithic emit_start_tok @monolithic = false end unless @buffer.empty? emit(:tSTRING_CONTENT, @buffer, @buffer_s, @buffer_e) clear_buffer extend_content end end def extend_content @space_emitted = false end def extend_space(ts, te) flush_string unless @space_emitted emit(:tSPACE, nil, ts, te) @space_emitted = true end end def supports_line_continuation_via_slash? !words? && @interpolate end protected def delimiter?(delimiter) if heredoc? # This heredoc is valid: # <<~E # E # and this: # <<~E # E # but this one is not: # <<~' E' # E # because there are not enough leading spaces in the closing delimiter. delimiter.end_with?(@end_delim) && delimiter.sub(/#{Regexp.escape(@end_delim)}\z/, '').bytes.all? { |c| c == SPACE || c == TAB } elsif @indent @end_delim == delimiter.lstrip else @end_delim == delimiter end end def coerce_encoding(string) string.b end def clear_buffer @buffer = ''.dup # Prime the buffer with lexer encoding; otherwise, # concatenation will produce varying results. @buffer.force_encoding(@lexer.source_buffer.source.encoding) @buffer_s = nil @buffer_e = nil end def emit_start_tok str_e = @heredoc_e || @str_s + @str_type.length emit(@start_tok, @str_type, @str_s, str_e) end def emit(token, type, s, e) @lexer.send(:emit, token, type, s, e) end end end parser-3.3.4.2/lib/parser/lexer/stack_state.rb000066400000000000000000000012251465510415600211770ustar00rootroot00000000000000# frozen_string_literal: true module Parser class Lexer::StackState def initialize(name) @name = name.freeze clear end def clear @stack = 0 end def push(bit) bit_value = bit ? 1 : 0 @stack = (@stack << 1) | bit_value bit end def pop bit_value = @stack & 1 @stack >>= 1 bit_value == 1 end def lexpop @stack = ((@stack >> 1) | (@stack & 1)) @stack[0] == 1 end def active? @stack[0] == 1 end def empty? @stack == 0 end def to_s "[#{@stack.to_s(2)} <= #{@name}]" end alias inspect to_s end end parser-3.3.4.2/lib/parser/macruby.y000066400000000000000000002340161465510415600170700ustar00rootroot00000000000000class Parser::MacRuby token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { result = @builder.preexe(val[0], val[1], val[2], val[3]) } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt { result = [ val[0] ] } | stmts terms stmt { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_call { diagnostic :error, :const_reassignment, nil, val[3] } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL arg_value { result = @builder.multi_assign(val[0], val[1], val[2]) } | mlhs tEQL mrhs { result = @builder.multi_assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | expr expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command block_command: block_call | block_call tDOT operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } | block_call tCOLON2 operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } command: operation command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], *val[1]) } | operation command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], *val[1]) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tDOT operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } | primary_value tDOT operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], *val[3]) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], *val[3]) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], *val[1]) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], *val[1]) } | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.op_assign(val[0], val[1], rescue_) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[2], [ val[3] ] } | tCOLON3 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[1], [ val[2] ] } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM tINTEGER tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.integer(val[1]), val[2], val[3])) } | tUNARY_NUM tFLOAT tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.float(val[1]), val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | args tCOMMA assocs tCOMMA args opt_block_arg { val[2][-1] = @builder.objc_varargs(val[2][-1], val[4]) assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[5]) } | block_arg { result = [ val[0] ] } call_args2: arg_value tCOMMA args opt_block_arg { result = [ val[0], *val[2].concat(val[3]) ] } | arg_value tCOMMA block_arg { result = [ val[0], val[2] ] } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil), *val[1] ] } | arg_value tCOMMA assocs opt_block_arg { result = [ val[0], @builder.associate(nil, val[2], nil), *val[3] ] } | arg_value tCOMMA args tCOMMA assocs opt_block_arg { result = [ val[0], *val[2]. push(@builder.associate(nil, val[4], nil)). concat(val[5]) ] } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } open_args { @lexer.cmdarg = val[0] result = val[1] } open_args: call_args { result = [ nil, val[0], nil ] } | tLPAREN_ARG { @lexer.state = :expr_endarg } rparen { result = [ val[0], [], val[2] ] } | tLPAREN_ARG call_args2 { @lexer.state = :expr_endarg } rparen { result = [ val[0], val[1], val[3] ] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | tCOMMA { result = [] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN bodystmt kEND { result = @builder.begin_keyword(val[0], val[1], val[2]) } | tLPAREN_ARG expr { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | operation brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.objc_restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.objc_restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.objc_restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.objc_restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.objc_restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.objc_restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.objc_restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.objc_restarg(val[0]), *val[2] ] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_f_block_arg { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_arg { result = [ val[0] ] } opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: # nothing { result = [] } | tSEMI bv_decls { result = val[1] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist lambda_body { result = [ val[1], val[2] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl rparen { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call tCOLON2 operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } method_call: operation paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value tDOT paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { result = @builder.string_compose(val[0], val[1], val[2]) } | tSTRING { result = @builder.string(val[0]) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { result = @builder.xstring_compose(val[0], val[1], val[2]) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tRCURLY { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tUNARY_NUM tINTEGER =tLOWEST { num = @builder.integer(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } | tUNARY_NUM tFLOAT =tLOWEST { num = @builder.float(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: variable { result = @builder.accessible(val[0]) } var_lhs: variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: term { result = nil } | tLT expr_value term { result = [ val[0], val[1] ] } | error term { yyerrok result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | f_args term { result = @builder.args(nil, val[0], nil) } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_arg { result = [ val[0] ] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = @builder.arg(val[0]) } | tIDENTIFIER tASSOC tIDENTIFIER { @static_env.declare val[2][0] result = @builder.objc_kwarg(val[0], val[1], val[2]) } | tLABEL tIDENTIFIER { @static_env.declare val[1][0] result = @builder.objc_kwarg(val[0], nil, val[1]) } f_arg_item: f_norm_arg | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_opt: tIDENTIFIER tEQL arg_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: tIDENTIFIER tEQL primary_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | # nothing { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: tDOT | tCOLON2 opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 19 # closest released match: v1_9_0_2 end def default_encoding Encoding::BINARY end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/max_numparam_stack.rb000066400000000000000000000014151465510415600214260ustar00rootroot00000000000000# frozen_string_literal: true module Parser # Holds p->max_numparam from parse.y # # @api private class MaxNumparamStack attr_reader :stack ORDINARY_PARAMS = -1 def initialize @stack = [] end def empty? @stack.size == 0 end def has_ordinary_params! set(ORDINARY_PARAMS) end def has_ordinary_params? top == ORDINARY_PARAMS end def has_numparams? top && top > 0 end def register(numparam) set( [top, numparam].max ) end def top @stack.last[:value] end def push(static:) @stack.push(value: 0, static: static) end def pop @stack.pop[:value] end private def set(value) @stack.last[:value] = value end end end parser-3.3.4.2/lib/parser/messages.rb000066400000000000000000000162771465510415600173770ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # Diagnostic messages (errors, warnings and notices) that can be generated. # # @see Diagnostic # # @api public # MESSAGES = { # Lexer errors :unicode_point_too_large => 'invalid Unicode codepoint (too large)', :invalid_escape => 'invalid escape character syntax', :incomplete_escape => 'incomplete character syntax', :invalid_hex_escape => 'invalid hex escape', :invalid_unicode_escape => 'invalid Unicode escape', :unterminated_unicode => 'unterminated Unicode escape', :escape_eof => 'escape sequence meets end of file', :string_eof => 'unterminated string meets end of file', :regexp_options => 'unknown regexp options: %{options}', :cvar_name => "`%{name}' is not allowed as a class variable name", :ivar_name => "`%{name}' is not allowed as an instance variable name", :gvar_name => "`%{name}' is not allowed as a global variable name", :trailing_in_number => "trailing `%{character}' in number", :empty_numeric => 'numeric literal without digits', :invalid_octal => 'invalid octal digit', :no_dot_digit_literal => 'no . floating literal anymore; put 0 before dot', :bare_backslash => 'bare backslash only allowed before newline', :unexpected => "unexpected `%{character}'", :embedded_document => 'embedded document meets end of file (and they embark on a romantic journey)', :heredoc_id_has_newline => 'here document identifier across newlines, never match', :heredoc_id_ends_with_nl => 'here document identifier ends with a newline', :unterminated_heredoc_id => 'unterminated heredoc id', # Lexer warnings :invalid_escape_use => 'invalid character syntax; use ?%{escape}', :ambiguous_literal => 'ambiguous first argument; put parentheses or a space even after the operator', :ambiguous_regexp => "ambiguity between regexp and two divisions: wrap regexp in parentheses or add a space after `/' operator", :ambiguous_prefix => "`%{prefix}' interpreted as argument prefix", :triple_dot_at_eol => '... at EOL, should be parenthesized', # Parser errors :nth_ref_alias => 'cannot define an alias for a back-reference variable', :begin_in_method => 'BEGIN in method', :backref_assignment => 'cannot assign to a back-reference variable', :invalid_assignment => 'cannot assign to a keyword', :module_name_const => 'class or module name must be a constant literal', :unexpected_token => 'unexpected token %{token}', :argument_const => 'formal argument cannot be a constant', :argument_ivar => 'formal argument cannot be an instance variable', :argument_gvar => 'formal argument cannot be a global variable', :argument_cvar => 'formal argument cannot be a class variable', :duplicate_argument => 'duplicate argument name', :empty_symbol => 'empty symbol literal', :odd_hash => 'odd number of entries for a hash', :singleton_literal => 'cannot define a singleton method for a literal', :dynamic_const => 'dynamic constant assignment', :const_reassignment => 'constant re-assignment', :module_in_def => 'module definition in method body', :class_in_def => 'class definition in method body', :unexpected_percent_str => '%{type}: unknown type of percent-literal', :block_and_blockarg => 'both block argument and literal block are passed', :masgn_as_condition => 'multiple assignment in conditional context', :block_given_to_yield => 'block given to yield', :invalid_regexp => '%{message}', :invalid_return => 'Invalid return in class/module body', :csend_in_lhs_of_masgn => '&. inside multiple assignment destination', :cant_assign_to_numparam => 'cannot assign to numbered parameter %{name}', :reserved_for_numparam => '%{name} is reserved for numbered parameter', :ordinary_param_defined => 'ordinary parameter is defined', :numparam_used_in_outer_scope => 'numbered parameter is already used in an outer scope', :circular_argument_reference => 'circular argument reference %{var_name}', :pm_interp_in_var_name => 'symbol literal with interpolation is not allowed', :lvar_name => "`%{name}' is not allowed as a local variable name", :undefined_lvar => "no such local variable: `%{name}'", :duplicate_variable_name => 'duplicate variable name %{name}', :duplicate_pattern_key => 'duplicate hash pattern key %{name}', :endless_setter => 'setter method cannot be defined in an endless method definition', :invalid_id_to_get => 'identifier %{identifier} is not valid to get', :forward_arg_after_restarg => '... after rest argument', :no_anonymous_blockarg => 'no anonymous block parameter', :no_anonymous_restarg => 'no anonymous rest parameter', :no_anonymous_kwrestarg => 'no anonymous keyword rest parameter', :ambiguous_anonymous_restarg => 'anonymous rest parameter is also used within block', :ambiguous_anonymous_kwrestarg => 'anonymous keyword rest parameter is also used within block', :ambiguous_anonymous_blockarg => 'anonymous block parameter is also used within block', # Parser warnings :useless_else => 'else without rescue is useless', :duplicate_hash_key => 'key is duplicated and overwritten', :ambiguous_it_call => '`it` calls without arguments refers to the first block param', # Parser errors that are not Ruby errors :invalid_encoding => 'literal contains escape sequences incompatible with UTF-8', # Rewriter diagnostics :invalid_action => 'cannot %{action}', :clobbered => 'clobbered by: %{action}', # Rewriter diagnostics :different_replacements => 'different replacements: %{replacement} vs %{other_replacement}', :swallowed_insertions => 'this replacement:', :swallowed_insertions_conflict => 'swallows some inner rewriting actions:', :crossing_deletions => 'the deletion of:', :crossing_deletions_conflict => 'is crossing:', :crossing_insertions => 'the rewriting action on:', :crossing_insertions_conflict => 'is crossing that on:', }.freeze # @api private module Messages # Formats the message, returns a raw template if there's nothing to interpolate # # Code like `format("", {})` gives a warning, and so this method tries interpolating # only if `arguments` hash is not empty. # # @api private def self.compile(reason, arguments) template = MESSAGES[reason] return template if Hash === arguments && arguments.empty? format(template, arguments) end end end parser-3.3.4.2/lib/parser/meta.rb000066400000000000000000000031111465510415600164750ustar00rootroot00000000000000# frozen_string_literal: true module Parser # Parser metadata module Meta # All node types that parser can produce. Not all parser versions # will be able to produce every possible node. NODE_TYPES = %i( true false nil int float str dstr sym dsym xstr regopt regexp array splat pair kwsplat hash irange erange self lvar ivar cvar gvar const defined? lvasgn ivasgn cvasgn gvasgn casgn mlhs masgn op_asgn and_asgn ensure rescue arg_expr or_asgn back_ref nth_ref match_with_lvasgn match_current_line module class sclass def defs undef alias args cbase arg optarg restarg blockarg block_pass kwarg kwoptarg kwrestarg kwnilarg send csend super zsuper yield block and not or if when case while until while_post until_post for break next redo return resbody kwbegin begin retry preexe postexe iflipflop eflipflop shadowarg complex rational __FILE__ __LINE__ __ENCODING__ ident lambda indexasgn index procarg0 restarg_expr blockarg_expr objc_kwarg objc_restarg objc_varargs numargs numblock forward_args forwarded_args forward_arg case_match in_match in_pattern match_var pin match_alt match_as match_rest array_pattern match_with_trailing_comma array_pattern_with_tail hash_pattern const_pattern if_guard unless_guard match_nil_pattern empty_else find_pattern kwargs match_pattern_p match_pattern forwarded_restarg forwarded_kwrestarg ).to_set.freeze end # Meta end # Parser parser-3.3.4.2/lib/parser/rewriter.rb000066400000000000000000000050121465510415600174140ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # {Parser::Rewriter} is deprecated. Use {Parser::TreeRewriter} instead. # It has a backwards compatible API and uses {Parser::Source::TreeRewriter} # instead of {Parser::Source::Rewriter}. # Please check the documentation for {Parser::Source::Rewriter} for details. # # @api public # @deprecated Use {Parser::TreeRewriter} # class Rewriter < Parser::AST::Processor ## # Rewrites the AST/source buffer and returns a String containing the new # version. # # @param [Parser::Source::Buffer] source_buffer # @param [Parser::AST::Node] ast # @return [String] # def rewrite(source_buffer, ast) @source_rewriter = Source::Rewriter.new(source_buffer) process(ast) @source_rewriter.process end ## # Returns `true` if the specified node is an assignment node, returns false # otherwise. # # @param [Parser::AST::Node] node # @return [Boolean] # def assignment?(node) [:lvasgn, :ivasgn, :gvasgn, :cvasgn, :casgn].include?(node.type) end ## # Removes the source range. # # @param [Parser::Source::Range] range # def remove(range) @source_rewriter.remove(range) end ## # Wraps the given source range with the given values. # # @param [Parser::Source::Range] range # @param [String] content # def wrap(range, before, after) @source_rewriter.wrap(range, before, after) end ## # Inserts new code before the given source range. # # @param [Parser::Source::Range] range # @param [String] content # def insert_before(range, content) @source_rewriter.insert_before(range, content) end ## # Inserts new code after the given source range. # # @param [Parser::Source::Range] range # @param [String] content # def insert_after(range, content) @source_rewriter.insert_after(range, content) end ## # Replaces the code of the source range `range` with `content`. # # @param [Parser::Source::Range] range # @param [String] content # def replace(range, content) @source_rewriter.replace(range, content) end DEPRECATION_WARNING = [ 'Parser::Rewriter is deprecated.', 'Please update your code to use Parser::TreeRewriter instead' ].join("\n").freeze extend Deprecation def initialize(*) self.class.warn_of_deprecation Source::Rewriter.warned_of_deprecation = true super end end end parser-3.3.4.2/lib/parser/ruby18.y000066400000000000000000002123341465510415600165570ustar00rootroot00000000000000class Parser::Ruby18 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tWORDS_BEG tQWORDS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING tSYMBOL tREGEXP_OPT tNL tEH tCOLON tCOMMA tSPACE tSEMI prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: compstmt { result = val[0] } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt { result = [ val[0] ] } | error stmt { result = [ val[1] ] } | stmts terms stmt { result = val[0] << val[2] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klBEGIN tLCURLY compstmt tRCURLY { if @context.in_def diagnostic :error, :begin_in_method, nil, val[0] end result = @builder.preexe(val[0], val[1], val[2], val[3]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 aref_args tRBRACK tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL arg_value { result = @builder.multi_assign(val[0], val[1], val[2]) } | mlhs tEQL mrhs { result = @builder.multi_assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | expr expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT expr { result = @builder.not_op(val[0], nil, val[1], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } block_command: block_call | block_call tDOT operation2 command_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call tCOLON2 operation2 command_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_var compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } command: operation command_args =tLOWEST { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | operation command_args cmd_brace_block { lparen_t, args, rparen_t = val[1] method_call = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) begin_t, block_args, body, end_t = val[2] result = @builder.block(method_call, begin_t, block_args, body, end_t) } | primary_value tDOT operation2 command_args =tLOWEST { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tDOT operation2 command_args cmd_brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, block_args, body, end_t = val[4] result = @builder.block(method_call, begin_t, block_args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, block_args, body, end_t = val[4] result = @builder.block(method_call, begin_t, block_args, body, end_t) } | kSUPER command_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kYIELD command_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:yield, val[0], lparen_t, args, rparen_t) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_entry tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } mlhs_entry: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_entry tRPAREN { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head { result = val[0] } | mlhs_head mlhs_item { result = val[0] << val[1] } | mlhs_head tSTAR mlhs_node { result = val[0] << @builder.splat(val[1], val[2]) } | mlhs_head tSTAR { result = val[0] << @builder.splat(val[1]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR { result = [ @builder.splat(val[0]) ] } mlhs_item: mlhs_node | tLPAREN mlhs_entry tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_node: variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 aref_args tRBRACK { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 aref_args tRBRACK { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tGT | tGEQ | tLT | tLEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 aref_args tRBRACK tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[2], [ val[3] ] } | tCOLON3 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[1], [ val[2] ] } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM tINTEGER tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.integer(val[1]), val[2], val[3])) } | tUNARY_NUM tFLOAT tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.float(val[1]), val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[3], val[4]) } | primary arg_value: arg aref_args: none { result = [] } | command opt_nl { result = [ val[0] ] } | args trailer { result = val[0] } | args tCOMMA tSTAR arg opt_nl { result = val[0] << @builder.splat(val[2], val[3]) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } | tSTAR arg opt_nl { result = [ @builder.splat(val[0], val[1]) ] } paren_args: tLPAREN2 none tRPAREN { result = [ val[0], [], val[2] ] } | tLPAREN2 call_args opt_nl tRPAREN { result = [ val[0], val[1], val[3] ] } | tLPAREN2 block_call opt_nl tRPAREN { result = [ val[0], [ val[1] ], val[3] ] } | tLPAREN2 args tCOMMA block_call opt_nl tRPAREN { result = [ val[0], val[1] << val[3], val[5] ] } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | args tCOMMA tSTAR arg_value opt_block_arg { result = val[0].concat( [ @builder.splat(val[2], val[3]), *val[4] ]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil), *val[1] ] } | assocs tCOMMA tSTAR arg_value opt_block_arg { result = [ @builder.associate(nil, val[0], nil), @builder.splat(val[2], val[3]), *val[4] ] } | args tCOMMA assocs opt_block_arg { result = val[0].concat( [ @builder.associate(nil, val[2], nil), *val[3] ]) } | args tCOMMA assocs tCOMMA tSTAR arg opt_block_arg { result = val[0].concat( [ @builder.associate(nil, val[2], nil), @builder.splat(val[4], val[5]), *val[6] ]) } | tSTAR arg_value opt_block_arg { result = [ @builder.splat(val[0], val[1]), *val[2] ] } | block_arg { result = [ val[0] ] } call_args2: arg_value tCOMMA args opt_block_arg { result = [ val[0], *val[2].concat(val[3]) ] } | arg_value tCOMMA block_arg { result = [ val[0], val[2] ] } | arg_value tCOMMA tSTAR arg_value opt_block_arg { result = [ val[0], @builder.splat(val[2], val[3]), *val[4] ] } | arg_value tCOMMA args tCOMMA tSTAR arg_value opt_block_arg { result = [ val[0], *val[2]. push(@builder.splat(val[4], val[5])). concat(val[6]) ] } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil), *val[1] ] } | assocs tCOMMA tSTAR arg_value opt_block_arg { result = [ @builder.associate(nil, val[0], nil), @builder.splat(val[2], val[3]), *val[4] ] } | arg_value tCOMMA assocs opt_block_arg { result = [ val[0], @builder.associate(nil, val[2], nil), *val[3] ] } | arg_value tCOMMA args tCOMMA assocs opt_block_arg { result = [ val[0], *val[2]. push(@builder.associate(nil, val[4], nil)). concat(val[5]) ] } | arg_value tCOMMA assocs tCOMMA tSTAR arg_value opt_block_arg { result = [ val[0], @builder.associate(nil, val[2], nil), @builder.splat(val[4], val[5]), *val[6] ] } | arg_value tCOMMA args tCOMMA assocs tCOMMA tSTAR arg_value opt_block_arg { result = [ val[0], *val[2]. push(@builder.associate(nil, val[4], nil)). push(@builder.splat(val[6], val[7])). concat(val[8]) ] } | tSTAR arg_value opt_block_arg { result = [ @builder.splat(val[0], val[1]), *val[2] ] } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } open_args { @lexer.cmdarg = val[0] result = val[1] } open_args: call_args { result = [ nil, val[0], nil ] } | tLPAREN_ARG { @lexer.state = :expr_endarg } tRPAREN { result = [ val[0], [], val[2] ] } | tLPAREN_ARG call_args2 { @lexer.state = :expr_endarg } tRPAREN { result = [ val[0], val[1], val[3] ] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | args tCOMMA arg_value { result = val[0] << val[2] } mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN bodystmt kEND { result = @builder.begin_keyword(val[0], val[1], val[2]) } | tLPAREN_ARG expr { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], val[1], val[4]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | primary_value tLBRACK2 aref_args tRBRACK { result = @builder.index(val[0], val[1], val[2], val[3]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args tRPAREN { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 tRPAREN { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr tRPAREN { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | operation brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { when_bodies = val[3][0..-2] else_t, else_body = val[3][-1] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { when_bodies = val[2][0..-2] else_t, else_body = val[2][-1] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kCASE opt_terms kELSE compstmt kEND { result = @builder.case(val[0], nil, [], val[2], val[3], val[4]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | tCOLON | kTHEN | term kTHEN { result = val[1] } do: term | tCOLON | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs block_par: mlhs_item { result = [ @builder.arg_expr(val[0]) ] } | block_par tCOMMA mlhs_item { result = val[0] << @builder.arg_expr(val[2]) } block_var: block_par | block_par tCOMMA | block_par tCOMMA tAMPER lhs { result = val[0]. push(@builder.blockarg_expr(val[2], val[3])) } | block_par tCOMMA tSTAR lhs tCOMMA tAMPER lhs { result = val[0]. push(@builder.restarg_expr(val[2], val[3])). push(@builder.blockarg_expr(val[5], val[6])) } | block_par tCOMMA tSTAR tCOMMA tAMPER lhs { result = val[0]. push(@builder.restarg_expr(val[2])). push(@builder.blockarg_expr(val[4], val[5])) } | block_par tCOMMA tSTAR lhs { result = val[0]. push(@builder.restarg_expr(val[2], val[3])) } | block_par tCOMMA tSTAR { result = val[0]. push(@builder.restarg_expr(val[2])) } | tSTAR lhs tCOMMA tAMPER lhs { result = [ @builder.restarg_expr(val[0], val[1]), @builder.blockarg_expr(val[3], val[4]) ] } | tSTAR tCOMMA tAMPER lhs { result = [ @builder.restarg_expr(val[0]), @builder.blockarg_expr(val[2], val[3]) ] } | tSTAR lhs { result = [ @builder.restarg_expr(val[0], val[1]) ] } | tSTAR { result = [ @builder.restarg_expr(val[0]) ] } | tAMPER lhs { result = [ @builder.blockarg_expr(val[0], val[1]) ] } ; opt_block_var: # nothing { result = @builder.args(nil, [], nil) } | tPIPE tPIPE { result = @builder.args(val[0], [], val[1]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_var tPIPE { result = @builder.args(val[0], val[1], val[2], false) } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_var compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call tCOLON2 operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } method_call: operation paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_var compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_var compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN when_args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } when_args: args | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | # nothing { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { result = @builder.string_compose(val[0], val[1], val[2]) } | tSTRING { result = @builder.string(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { result = @builder.xstring_compose(val[0], val[1], val[2]) } regexp: tREGEXP_BEG xstring_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tRCURLY { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tUNARY_NUM tINTEGER =tLOWEST { num = @builder.integer(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } | tUNARY_NUM tFLOAT =tLOWEST { num = @builder.float(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } var_ref: variable { result = @builder.accessible(val[0]) } var_lhs: variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: term { result = nil } | tLT expr_value term { result = [ val[0], val[1] ] } | error term { yyerrok result = nil } f_arglist: tLPAREN2 f_args opt_nl tRPAREN { result = @builder.args(val[0], val[1], val[3]) @lexer.state = :expr_beg } | f_args term { result = @builder.args(nil, val[0], nil) } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_rest_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_block_arg { result = [ val[0] ] } | # nothing { result = [] } f_norm_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } | tIDENTIFIER { @static_env.declare val[0][0] result = @builder.arg(val[0]) } f_arg: f_norm_arg { result = [ val[0] ] } | f_arg tCOMMA f_norm_arg { result = val[0] << val[2] } f_opt: tIDENTIFIER tEQL arg_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | # nothing { result = [] } singleton: var_ref | tLPAREN2 expr opt_nl tRPAREN { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer { result = val[0] } | args trailer { result = @builder.pair_list_18(val[0]) } assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: tDOT | tCOLON2 opt_terms: | terms opt_nl: | tNL trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 18 end def default_encoding Encoding::BINARY end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby19.y000066400000000000000000002320111465510415600165520ustar00rootroot00000000000000class Parser::Ruby19 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { result = @builder.preexe(val[0], val[1], val[2], val[3]) } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt { result = [ val[0] ] } | stmts terms stmt { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_call { diagnostic :error, :const_reassignment, nil, val[3] } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL arg_value { result = @builder.multi_assign(val[0], val[1], val[2]) } | mlhs tEQL mrhs { result = @builder.multi_assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | expr command_asgn: lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL command_asgn { result = @builder.assign(val[0], val[1], val[2]) } expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command block_command: block_call | block_call tDOT operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | block_call tCOLON2 operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } command: operation command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | operation command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tDOT operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tDOT operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.op_assign(val[0], val[1], rescue_) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[2], [ val[3] ] } | tCOLON3 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[1], [ val[2] ] } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM tINTEGER tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.integer(val[1]), val[2], val[3])) } | tUNARY_NUM tFLOAT tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.float(val[1]), val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } call_args { @lexer.cmdarg = val[0] result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN bodystmt kEND { result = @builder.begin_keyword(val[0], val[1], val[2]) } | tLPAREN_ARG { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } expr { @lexer.state = :expr_endarg } opt_nl tRPAREN { @lexer.cmdarg = val[1] result = @builder.begin(val[0], val[2], val[5]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | operation brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { @static_env.declare val[0][0] result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { @static_env.declare val[3][0] result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { @static_env.declare val[3][0] result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_f_block_arg { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_arg { result = [ val[0] ] } opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: # nothing { result = [] } | tSEMI bv_decls { result = val[1] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist lambda_body { result = [ val[1], val[2] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl rparen { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call tCOLON2 operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } method_call: operation paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value tDOT paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { result = @builder.string_compose(val[0], val[1], val[2]) } | tSTRING { result = @builder.string(val[0]) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { result = @builder.xstring_compose(val[0], val[1], val[2]) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tRCURLY { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tUNARY_NUM tINTEGER =tLOWEST { num = @builder.integer(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } | tUNARY_NUM tFLOAT =tLOWEST { num = @builder.float(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: term { result = nil } | tLT expr_value term { result = [ val[0], val[1] ] } | error term { yyerrok result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | f_args term { result = @builder.args(nil, val[0], nil) } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_arg { result = [ val[0] ] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER f_arg_item: f_norm_arg { @static_env.declare val[0][0] result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_opt: tIDENTIFIER tEQL arg_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: tIDENTIFIER tEQL primary_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | # nothing { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: tDOT | tCOLON2 opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 19 end def default_encoding Encoding::BINARY end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby20.y000066400000000000000000002455271465510415600165620ustar00rootroot00000000000000class Parser::Ruby20 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { result = @builder.preexe(val[0], val[1], val[2], val[3]) } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { if @context.in_def diagnostic :error, :begin_in_method, nil, val[0] end result = @builder.preexe(val[0], val[1], val[2], val[3]) } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_call { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL arg_value { result = @builder.multi_assign(val[0], val[1], val[2]) } | mlhs tEQL mrhs { result = @builder.multi_assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | expr command_asgn: lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL command_asgn { result = @builder.assign(val[0], val[1], val[2]) } expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tDOT operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tDOT operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.op_assign(val[0], val[1], rescue_) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM tINTEGER tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.integer(val[1]), val[2], val[3])) } | tUNARY_NUM tFLOAT tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.float(val[1]), val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } call_args { @lexer.cmdarg = val[0] result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } bodystmt kEND { @lexer.cmdarg = val[1] result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } expr { @lexer.state = :expr_endarg } opt_nl tRPAREN { @lexer.cmdarg = val[1] result = @builder.begin(val[0], val[2], val[5]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { @static_env.declare val[0][0] result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { @static_env.declare val[3][0] result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { @static_env.declare val[3][0] result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist lambda_body { result = [ val[1], val[2] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value tDOT paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { result = @builder.string_compose(val[0], val[1], val[2]) } | tSTRING { result = @builder.string(val[0]) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { result = @builder.xstring_compose(val[0], val[1], val[2]) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tSTRING_DEND { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tUNARY_NUM tINTEGER =tLOWEST { num = @builder.integer(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } | tUNARY_NUM tFLOAT =tLOWEST { num = @builder.float(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: term { result = nil } | tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | error term { yyerrok result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | f_args term { result = @builder.args(nil, val[0], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER f_arg_item: f_norm_arg { @static_env.declare val[0][0] result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_kw: tLABEL arg_value { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = @builder.kwoptarg(val[0], val[1]) } f_block_kw: tLABEL primary_value { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = @builder.kwoptarg(val[0], val[1]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: tIDENTIFIER tEQL arg_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: tIDENTIFIER tEQL primary_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: tDOT | tCOLON2 opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 20 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby21.y000066400000000000000000002451361465510415600165570ustar00rootroot00000000000000class Parser::Ruby21 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { result = @builder.preexe(val[0], val[1], val[2], val[3]) } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_call { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL command_asgn { result = @builder.assign(val[0], val[1], val[2]) } expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tDOT operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tDOT operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.op_assign(val[0], val[1], rescue_) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } call_args { @lexer.cmdarg = val[0] result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } bodystmt kEND { @lexer.cmdarg = val[1] result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } expr { @lexer.state = :expr_endarg } opt_nl tRPAREN { @lexer.cmdarg = val[1] result = @builder.begin(val[0], val[2], val[5]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } lambda_body { @lexer.cmdarg = val[2] @lexer.cmdarg.lexpop result = [ val[1], val[3] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value tDOT paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { result = @builder.string_compose(val[0], val[1], val[2]) } | tSTRING { result = @builder.string(val[0]) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { result = @builder.xstring_compose(val[0], val[1], val[2]) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tSTRING_DEND { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tRATIONAL { result = @builder.rational(val[0]) } | tIMAGINARY { result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: term { result = nil } | tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | error term { yyerrok result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = val[0] } f_arg_item: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = val[0] } f_kw: f_label arg_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_norm_arg tEQL arg_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_norm_arg tEQL primary_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: tDOT | tCOLON2 opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 21 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby22.y000066400000000000000000002456321465510415600165610ustar00rootroot00000000000000class Parser::Ruby22 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { result = @builder.preexe(val[0], val[1], val[2], val[3]) } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_call { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL command_asgn { result = @builder.assign(val[0], val[1], val[2]) } expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tDOT operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tDOT operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value tDOT tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tDOT tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.op_assign(val[0], val[1], rescue_) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value tDOT tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tDOT tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } call_args { @lexer.cmdarg = val[0] result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } bodystmt kEND { @lexer.cmdarg = val[1] result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } expr { @lexer.state = :expr_endarg } opt_nl tRPAREN { @lexer.cmdarg = val[1] result = @builder.begin(val[0], val[2], val[5]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } lambda_body { @lexer.cmdarg = val[2] @lexer.cmdarg.lexpop result = [ val[1], val[3] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value tDOT operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value tDOT paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { result = @builder.string_compose(val[0], val[1], val[2]) } | tSTRING { result = @builder.string(val[0]) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { result = @builder.xstring_compose(val[0], val[1], val[2]) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tSTRING_DEND { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tRATIONAL { result = @builder.rational(val[0]) } | tIMAGINARY { result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: term { result = nil } | tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | error term { yyerrok result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = val[0] } f_arg_asgn: f_norm_arg { result = val[0] } f_arg_item: f_arg_asgn { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = val[0] } f_kw: f_label arg_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn tEQL arg_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn tEQL primary_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: tDOT | tCOLON2 opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 22 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby23.y000066400000000000000000002464531465510415600165640ustar00rootroot00000000000000class Parser::Ruby23 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { result = @builder.preexe(val[0], val[1], val[2], val[3]) } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_call { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL command_asgn { result = @builder.assign(val[0], val[1], val[2]) } expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.op_assign(val[0], val[1], rescue_) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } call_args { @lexer.cmdarg = val[0] result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } bodystmt kEND { @lexer.cmdarg = val[1] result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } expr { @lexer.state = :expr_endarg } opt_nl tRPAREN { @lexer.cmdarg = val[1] result = @builder.begin(val[0], val[2], val[5]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist { result = @lexer.cmdarg.dup @lexer.cmdarg.clear } lambda_body { @lexer.cmdarg = val[2] @lexer.cmdarg.lexpop result = [ val[1], val[3] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tSTRING_DEND { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tRATIONAL { result = @builder.rational(val[0]) } | tIMAGINARY { result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = val[0] } f_arg_asgn: f_norm_arg { result = val[0] } f_arg_item: f_arg_asgn { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = val[0] } f_kw: f_label arg_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn tEQL arg_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn tEQL primary_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 23 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby24.y000066400000000000000000002520021465510415600165500ustar00rootroot00000000000000class Parser::Ruby24 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { result = @builder.preexe(val[0], val[1], val[2], val[3]) } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN tLCURLY top_compstmt tRCURLY { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG stmt { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist { @lexer.cmdarg.push(false) } lambda_body { @lexer.cmdarg.pop result = [ val[1], val[3] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } | kDO { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } brace_body: { @static_env.extend_dynamic } opt_block_param compstmt { result = [ val[1], val[2] ] @static_env.unextend } do_body: { @static_env.extend_dynamic } { @lexer.cmdarg.push(false) } opt_block_param compstmt { result = [ val[2], val[3] ] @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt tSTRING_DEND { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = val[0] } f_arg_asgn: f_norm_arg { result = val[0] } f_arg_item: f_arg_asgn { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = val[0] } f_kw: f_label arg_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn tEQL arg_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn tEQL primary_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 24 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby25.y000066400000000000000000002511321465510415600165540ustar00rootroot00000000000000class Parser::Ruby25 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG stmt { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist { @lexer.cmdarg.push(false) } lambda_body { @lexer.cmdarg.pop result = [ val[1], val[3] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } | kDO { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } brace_body: { @static_env.extend_dynamic } opt_block_param compstmt { result = [ val[1], val[2] ] @static_env.unextend } do_body: { @static_env.extend_dynamic } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { result = [ val[2], val[3] ] @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt tSTRING_DEND { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = val[0] } f_arg_asgn: f_norm_arg { result = val[0] } f_arg_item: f_arg_asgn { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = val[0] } f_kw: f_label arg_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn tEQL arg_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn tEQL primary_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 25 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby26.y000066400000000000000000002517541465510415600165670ustar00rootroot00000000000000class Parser::Ruby26 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: top_compstmt top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :error, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | arg tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG stmt { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | k_def fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | k_def singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_def: kDEF { result = val[0] } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.restarg(val[0]), *val[2] ] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist { @lexer.cmdarg.push(false) } lambda_body { @lexer.cmdarg.pop result = [ val[1], val[3] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } bodystmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } | kDO { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } brace_body: { @static_env.extend_dynamic } opt_block_param compstmt { result = [ val[1], val[2] ] @static_env.unextend } do_body: { @static_env.extend_dynamic } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { result = [ val[2], val[3] ] @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt tSTRING_DEND { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = val[0] } f_arg_asgn: f_norm_arg { result = val[0] } f_arg_item: f_arg_asgn { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] result = val[0] } f_kw: f_label arg_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn tEQL arg_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn tEQL primary_value { result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 26 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/ruby27.y000066400000000000000000003224441465510415600165630ustar00rootroot00000000000000class Parser::Ruby27 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT tBDOT2 tBDOT3 prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 tBDOT2 tBDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kIN nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: { @current_arg_stack.push(nil) @max_numparam_stack.push(static: false) } top_compstmt { result = val[1] @current_arg_stack.pop @max_numparam_stack.pop } top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :error, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) begin_body = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.multi_assign(val[0], val[1], begin_body) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push result = @context.in_kwarg @context.in_kwarg = true } p_expr { @pattern_variables.pop @context.in_kwarg = val[2] if @builder.class.emit_match_pattern result = @builder.match_pattern(val[0], val[1], val[3]) else result = @builder.in_match(val[0], val[1], val[3]) end } | arg =tLBRACE_ARG expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fitem: fname { result = @builder.symbol_internal(val[0]) } | symbol undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | arg tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | tBDOT2 arg { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 arg { result = @builder.range_exclusive(nil, val[0], val[1]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } | tLPAREN2 args tCOMMA args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[3] end result = [val[0], [*val[1], @builder.forwarded_args(val[3])], val[4]] } | tLPAREN2 args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[1] end result = [val[0], [@builder.forwarded_args(val[1])], val[2]] } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG stmt { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kCASE expr_value opt_terms p_case_body kEND { *in_bodies, (else_t, else_body) = *val[3] result = @builder.case_match(val[0], val[1], in_bodies, else_t, else_body, val[4]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | k_def fname { local_push result = context.dup @context.in_def = true @current_arg_stack.push(nil) } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def @current_arg_stack.pop } | k_def singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true @current_arg_stack.push(nil) } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def @current_arg_stack.pop } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_def: kDEF { result = val[0] } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA f_rest_marg { result = val[0]. push(val[2]) } | f_marg_list tCOMMA f_rest_marg tCOMMA f_marg_list { result = val[0]. push(val[2]). concat(val[4]) } | f_rest_marg { result = [ val[0] ] } | f_rest_marg tCOMMA f_marg_list { result = [ val[0], *val[2] ] } f_rest_marg: tSTAR f_norm_arg { result = @builder.restarg(val[0], val[1]) } | tSTAR { result = @builder.restarg(val[0]) } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_no_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) result = @builder.args(val[0], val[1], val[2]) } | tPIPE block_param opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } f_larglist { @lexer.cmdarg.push(false) } lambda_body { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[1] result = [ args, val[3] ] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { @max_numparam_stack.has_ordinary_params! result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { if val[0].any? @max_numparam_stack.has_ordinary_params! end result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } bodystmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } | kDO { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } brace_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } opt_block_param compstmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[1] result = [ args, val[2] ] @max_numparam_stack.pop @static_env.unextend } do_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] result = [ args, val[3] ] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body p_case_body: kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr then { @pattern_hash_keys.pop @pattern_variables.pop @context.in_kwarg = val[1] } compstmt p_cases { result = [ @builder.in_pattern(val[0], *val[2], val[3], val[5]), *val[6] ] } p_cases: opt_else { result = [ val[0] ] } | p_case_body p_top_expr: p_top_expr_body { result = [ val[0], nil ] } | p_top_expr_body kIF_MOD expr_value { result = [ val[0], @builder.if_guard(val[1], val[2]) ] } | p_top_expr_body kUNLESS_MOD expr_value { result = [ val[0], @builder.unless_guard(val[1], val[2]) ] } p_top_expr_body: p_expr | p_expr tCOMMA { # array patterns that end with comma # like 1, 2, # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = @builder.array_pattern(nil, [ item ], nil) } | p_expr tCOMMA p_args { result = @builder.array_pattern(nil, [val[0]].concat(val[2]), nil) } | p_args_tail { result = @builder.array_pattern(nil, val[0], nil) } | p_kwargs { result = @builder.hash_pattern(nil, val[0], nil) } p_expr: p_as p_as: p_expr tASSOC p_variable { result = @builder.match_as(val[0], val[1], val[2]) } | p_alt p_alt: p_alt tPIPE p_expr_basic { result = @builder.match_alt(val[0], val[1], val[2]) } | p_expr_basic p_lparen: tLPAREN2 { result = val[0] @pattern_hash_keys.push } p_lbracket: tLBRACK2 { result = val[0] @pattern_hash_keys.push } p_expr_basic: p_value | p_const p_lparen p_args rparen { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_kwargs rparen { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLPAREN2 rparen { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | p_const p_lbracket p_args rbracket { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_kwargs rbracket { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLBRACK2 rbracket { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | tLBRACK { @pattern_hash_keys.push } p_args rbracket { @pattern_hash_keys.pop result = @builder.array_pattern(val[0], val[2], val[3]) } | tLBRACK rbracket { result = @builder.array_pattern(val[0], [], val[1]) } | tLBRACE { @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = false } p_kwargs rbrace { @pattern_hash_keys.pop @context.in_kwarg = val[1] result = @builder.hash_pattern(val[0], val[2], val[3]) } | tLBRACE rbrace { result = @builder.hash_pattern(val[0], [], val[1]) } | tLPAREN { @pattern_hash_keys.push } p_expr rparen { @pattern_hash_keys.pop result = @builder.begin(val[0], val[2], val[3]) } p_args: p_expr { result = [ val[0] ] } | p_args_head { result = val[0] } | p_args_head p_arg { result = [ *val[0], val[1] ] } | p_args_head tSTAR tIDENTIFIER { match_rest = @builder.match_rest(val[1], val[2]) result = [ *val[0], match_rest ] } | p_args_head tSTAR tIDENTIFIER tCOMMA p_args_post { match_rest = @builder.match_rest(val[1], val[2]) result = [ *val[0], match_rest, *val[4] ] } | p_args_head tSTAR { result = [ *val[0], @builder.match_rest(val[1]) ] } | p_args_head tSTAR tCOMMA p_args_post { result = [ *val[0], @builder.match_rest(val[1]), *val[3] ] } | p_args_tail p_args_head: p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = [ item ] } | p_args_head p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` last_item = @builder.match_with_trailing_comma(val[1], val[2]) result = [ *val[0], last_item ] } p_args_tail: tSTAR tIDENTIFIER { match_rest = @builder.match_rest(val[0], val[1]) result = [ match_rest ] } | tSTAR tIDENTIFIER tCOMMA p_args_post { match_rest = @builder.match_rest(val[0], val[1]) result = [ match_rest, *val[3] ] } | tSTAR { match_rest = @builder.match_rest(val[0]) result = [ match_rest ] } | tSTAR tCOMMA p_args_post { match_rest = @builder.match_rest(val[0]) result = [ match_rest, *val[2] ] } p_args_post: p_arg { result = [ val[0] ] } | p_args_post tCOMMA p_arg { result = [ *val[0], val[2] ] } p_arg: p_expr p_kwargs: p_kwarg tCOMMA p_kwrest { result = [ *val[0], *val[2] ] } | p_kwarg { result = val[0] } | p_kwarg tCOMMA { result = val[0] } | p_kwrest { result = val[0] } | p_kwarg tCOMMA p_kwnorest { result = [ *val[0], *val[2] ] } | p_kwnorest { result = [ *val[0], *val[2] ] } p_kwarg: p_kw { result = [ val[0] ] } | p_kwarg tCOMMA p_kw { result = [ *val[0], val[2] ] } p_kw: p_kw_label p_expr { result = @builder.match_pair(*val[0], val[1]) } | p_kw_label { result = @builder.match_label(*val[0]) } p_kw_label: tLABEL { result = [:label, val[0]] } | tSTRING_BEG string_contents tLABEL_END { result = [:quoted, [val[0], val[1], val[2]]] } p_kwrest: kwrest_mark tIDENTIFIER { result = [ @builder.match_rest(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.match_rest(val[0], nil) ] } p_kwnorest: kwrest_mark kNIL { result = [ @builder.match_nil_pattern(val[0], val[1]) ] } p_value: p_primitive | p_primitive tDOT2 p_primitive { result = @builder.range_inclusive(val[0], val[1], val[2]) } | p_primitive tDOT3 p_primitive { result = @builder.range_exclusive(val[0], val[1], val[2]) } | p_primitive tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | p_primitive tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | p_variable | p_var_ref | p_const | tBDOT2 p_primitive { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 p_primitive { result = @builder.range_exclusive(nil, val[0], val[1]) } p_primitive: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | keyword_variable { result = @builder.accessible(val[0]) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } p_variable: tIDENTIFIER { result = @builder.assignable(@builder.match_var(val[0])) } p_var_ref: tCARET tIDENTIFIER { name = val[1][0] unless static_env.declared?(name) diagnostic :error, :undefined_lvar, { :name => name }, val[1] end lvar = @builder.accessible(@builder.ident(val[1])) result = @builder.pin(val[0], lvar) } p_const: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | p_const tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCONSTANT { result = @builder.const(val[0]) } opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt tSTRING_DEND { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: ssym | dsym ssym: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | tLPAREN2 f_arg tCOMMA args_forward rparen { args = [ *val[1], @builder.forward_arg(val[3]) ] result = @builder.args(val[0], args, val[4]) @static_env.declare_forward_args } | tLPAREN2 args_forward rparen { result = @builder.forward_only_args(val[0], val[1], val[2]) @static_env.declare_forward_args @lexer.state = :expr_value } | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_no_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } args_forward: tBDOT3 { result = val[0] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! result = val[0] } f_arg_asgn: f_norm_arg { @current_arg_stack.set(val[0][0]) result = val[0] } f_arg_item: f_arg_asgn { @current_arg_stack.set(0) result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(val[0][0]) result = val[0] } f_kw: f_label arg_value { @current_arg_stack.set(nil) result = @builder.kwoptarg(val[0], val[1]) } | f_label { @current_arg_stack.set(nil) result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_no_kwarg: kwrest_mark kNIL { result = [ @builder.kwnilarg(val[0], val[1]) ] } f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn tEQL arg_value { @current_arg_stack.set(0) result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn tEQL primary_value { @current_arg_stack.set(0) result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } rbrace: opt_nl tRCURLY { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 27 end def default_encoding Encoding::UTF_8 end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) @max_numparam_stack.push(static: true) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop @max_numparam_stack.pop end def try_declare_numparam(node) name = node.children[0] if name =~ /\A_[1-9]\z/ && !static_env.declared?(name) && context.in_dynamic_block? # definitely an implicit param location = node.loc.expression if max_numparam_stack.has_ordinary_params? diagnostic :error, :ordinary_param_defined, nil, [nil, location] end raw_max_numparam_stack = max_numparam_stack.stack.dup # ignore current block scope raw_max_numparam_stack.pop raw_max_numparam_stack.reverse_each do |outer_scope| if outer_scope[:static] # found an outer scope that can't have numparams # like def/class/etc break else outer_scope_has_numparams = outer_scope[:value] > 0 if outer_scope_has_numparams diagnostic :error, :numparam_used_in_outer_scope, nil, [nil, location] else # for now it's ok, but an outer scope can also be a block # with numparams, so we need to continue end end end static_env.declare(name) max_numparam_stack.register(name[1].to_i) true else false end end parser-3.3.4.2/lib/parser/ruby30.y000066400000000000000000003303201465510415600165450ustar00rootroot00000000000000class Parser::Ruby30 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT tBDOT2 tBDOT3 prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 tBDOT2 tBDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kIN nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: { @current_arg_stack.push(nil) @max_numparam_stack.push(static: true) } top_compstmt { result = val[1] @current_arg_stack.pop @max_numparam_stack.pop } top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :error, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) begin_body = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.multi_assign(val[0], val[1], begin_body) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg tASSOC { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push result = @context.in_kwarg @context.in_kwarg = true } p_expr { @pattern_variables.pop @context.in_kwarg = val[2] result = @builder.match_pattern(val[0], val[1], val[3]) } | arg kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push result = @context.in_kwarg @context.in_kwarg = true } p_expr { @pattern_variables.pop @context.in_kwarg = val[2] result = @builder.match_pattern_p(val[0], val[1], val[3]) } | arg =tLBRACE_ARG expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } def_name: fname { local_push @current_arg_stack.push(nil) result = [ val[0], @context.dup ] @context.in_def = true } defn_head: k_def def_name { result = [ val[0], val[1] ] } defs_head: k_def singleton dot_or_colon { @lexer.state = :expr_fname } def_name { result = [ val[0], val[1], val[2], val[4] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fitem: fname { result = @builder.symbol_internal(val[0]) } | symbol undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | arg tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | tBDOT2 arg { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 arg { result = @builder.range_exclusive(nil, val[0], val[1]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | defn_head f_opt_paren_args tEQL arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @context.in_def = ctx.in_def @current_arg_stack.pop } | defn_head f_opt_paren_args tEQL arg kRESCUE_MOD arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], method_body) local_pop @context.in_def = ctx.in_def @current_arg_stack.pop } | defs_head f_opt_paren_args tEQL arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @context.in_def = ctx.in_def @current_arg_stack.pop } | defs_head f_opt_paren_args tEQL arg kRESCUE_MOD arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], method_body) local_pop @context.in_def = ctx.in_def @current_arg_stack.pop } | primary relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } | tLPAREN2 args tCOMMA args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[3] end result = [val[0], [*val[1], @builder.forwarded_args(val[3])], val[4]] } | tLPAREN2 args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[1] end result = [val[0], [@builder.forwarded_args(val[1])], val[2]] } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG stmt { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | lambda | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kCASE expr_value opt_terms p_case_body kEND { *in_bodies, (else_t, else_body) = *val[3] result = @builder.case_match(val[0], val[1], in_bodies, else_t, else_body, val[4]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { @context.in_class = true local_push } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | defn_head f_arglist bodystmt kEND { def_t, (name_t, ctx) = val[0] result = @builder.def_method(def_t, name_t, val[1], val[2], val[3]) local_pop @context.in_def = ctx.in_def @current_arg_stack.pop } | defs_head f_arglist bodystmt kEND { def_t, recv, dot_t, (name_t, ctx) = val[0] result = @builder.def_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @context.in_def = ctx.in_def @current_arg_stack.pop } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_def: kDEF { result = val[0] } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA f_rest_marg { result = val[0]. push(val[2]) } | f_marg_list tCOMMA f_rest_marg tCOMMA f_marg_list { result = val[0]. push(val[2]). concat(val[4]) } | f_rest_marg { result = [ val[0] ] } | f_rest_marg tCOMMA f_marg_list { result = [ val[0], *val[2] ] } f_rest_marg: tSTAR f_norm_arg { result = @builder.restarg(val[0], val[1]) } | tSTAR { result = @builder.restarg(val[0]) } f_any_kwrest: f_kwrest | f_no_kwarg block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } excessed_comma: tCOMMA block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg excessed_comma | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) result = @builder.args(val[0], val[1], val[2]) } | tPIPE block_param opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: tLAMBDA { @static_env.extend_dynamic @max_numparam_stack.push(static: false) result = @context.dup @context.in_lambda = true } f_larglist { @lexer.cmdarg.push(false) } lambda_body { lambda_call = @builder.call_lambda(val[0]) args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] begin_t, body, end_t = val[4] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop @context.in_lambda = val[1].in_lambda result = @builder.block(lambda_call, begin_t, args, body, end_t) } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { @max_numparam_stack.has_ordinary_params! result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { if val[0].any? @max_numparam_stack.has_ordinary_params! end result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } bodystmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } | kDO { result = @context.dup @context.in_block = true } do_body kEND { result = [ val[0], *val[2], val[3] ] @context.in_block = val[1].in_block } brace_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } opt_block_param compstmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[1] result = [ args, val[2] ] @max_numparam_stack.pop @static_env.unextend } do_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] result = [ args, val[3] ] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body p_case_body: kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr then { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[1] } compstmt p_cases { result = [ @builder.in_pattern(val[0], *val[2], val[3], val[5]), *val[6] ] } p_cases: opt_else { result = [ val[0] ] } | p_case_body p_top_expr: p_top_expr_body { result = [ val[0], nil ] } | p_top_expr_body kIF_MOD expr_value { result = [ val[0], @builder.if_guard(val[1], val[2]) ] } | p_top_expr_body kUNLESS_MOD expr_value { result = [ val[0], @builder.unless_guard(val[1], val[2]) ] } p_top_expr_body: p_expr | p_expr tCOMMA { # array patterns that end with comma # like 1, 2, # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = @builder.array_pattern(nil, [ item ], nil) } | p_expr tCOMMA p_args { result = @builder.array_pattern(nil, [val[0]].concat(val[2]), nil) } | p_find { result = @builder.find_pattern(nil, val[0], nil) } | p_args_tail { result = @builder.array_pattern(nil, val[0], nil) } | p_kwargs { result = @builder.hash_pattern(nil, val[0], nil) } p_expr: p_as p_as: p_expr tASSOC p_variable { result = @builder.match_as(val[0], val[1], val[2]) } | p_alt p_alt: p_alt tPIPE p_expr_basic { result = @builder.match_alt(val[0], val[1], val[2]) } | p_expr_basic p_lparen: tLPAREN2 { result = val[0] @pattern_hash_keys.push } p_lbracket: tLBRACK2 { result = val[0] @pattern_hash_keys.push } p_expr_basic: p_value | p_const p_lparen p_args rparen { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_find rparen { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_kwargs rparen { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLPAREN2 rparen { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | p_const p_lbracket p_args rbracket { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_find rbracket { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_kwargs rbracket { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLBRACK2 rbracket { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | tLBRACK p_args rbracket { result = @builder.array_pattern(val[0], val[1], val[2]) } | tLBRACK p_find rbracket { result = @builder.find_pattern(val[0], val[1], val[2]) } | tLBRACK rbracket { result = @builder.array_pattern(val[0], [], val[1]) } | tLBRACE { @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = false } p_kwargs rbrace { @pattern_hash_keys.pop @context.in_kwarg = val[1] result = @builder.hash_pattern(val[0], val[2], val[3]) } | tLBRACE rbrace { result = @builder.hash_pattern(val[0], [], val[1]) } | tLPAREN { @pattern_hash_keys.push } p_expr rparen { @pattern_hash_keys.pop result = @builder.begin(val[0], val[2], val[3]) } p_args: p_expr { result = [ val[0] ] } | p_args_head { result = val[0] } | p_args_head p_arg { result = [ *val[0], val[1] ] } | p_args_head tSTAR tIDENTIFIER { match_rest = @builder.match_rest(val[1], val[2]) result = [ *val[0], match_rest ] } | p_args_head tSTAR tIDENTIFIER tCOMMA p_args_post { match_rest = @builder.match_rest(val[1], val[2]) result = [ *val[0], match_rest, *val[4] ] } | p_args_head tSTAR { result = [ *val[0], @builder.match_rest(val[1]) ] } | p_args_head tSTAR tCOMMA p_args_post { result = [ *val[0], @builder.match_rest(val[1]), *val[3] ] } | p_args_tail p_args_head: p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = [ item ] } | p_args_head p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` last_item = @builder.match_with_trailing_comma(val[1], val[2]) result = [ *val[0], last_item ] } p_args_tail: p_rest { result = [ val[0] ] } | p_rest tCOMMA p_args_post { result = [ val[0], *val[2] ] } p_find: p_rest tCOMMA p_args_post tCOMMA p_rest { result = [ val[0], *val[2], val[4] ] } p_rest: tSTAR tIDENTIFIER { result = @builder.match_rest(val[0], val[1]) } | tSTAR { result = @builder.match_rest(val[0]) } p_args_post: p_arg { result = [ val[0] ] } | p_args_post tCOMMA p_arg { result = [ *val[0], val[2] ] } p_arg: p_expr p_kwargs: p_kwarg tCOMMA p_any_kwrest { result = [ *val[0], *val[2] ] } | p_kwarg { result = val[0] } | p_kwarg tCOMMA { result = val[0] } | p_any_kwrest { result = val[0] } p_kwarg: p_kw { result = [ val[0] ] } | p_kwarg tCOMMA p_kw { result = [ *val[0], val[2] ] } p_kw: p_kw_label p_expr { result = @builder.match_pair(*val[0], val[1]) } | p_kw_label { result = @builder.match_label(*val[0]) } p_kw_label: tLABEL { result = [:label, val[0]] } | tSTRING_BEG string_contents tLABEL_END { result = [:quoted, [val[0], val[1], val[2]]] } p_kwrest: kwrest_mark tIDENTIFIER { result = [ @builder.match_rest(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.match_rest(val[0], nil) ] } p_kwnorest: kwrest_mark kNIL { result = [ @builder.match_nil_pattern(val[0], val[1]) ] } p_any_kwrest: p_kwrest | p_kwnorest p_value: p_primitive | p_primitive tDOT2 p_primitive { result = @builder.range_inclusive(val[0], val[1], val[2]) } | p_primitive tDOT3 p_primitive { result = @builder.range_exclusive(val[0], val[1], val[2]) } | p_primitive tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | p_primitive tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | p_variable | p_var_ref | p_const | tBDOT2 p_primitive { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 p_primitive { result = @builder.range_exclusive(nil, val[0], val[1]) } p_primitive: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | keyword_variable { result = @builder.accessible(val[0]) } | lambda p_variable: tIDENTIFIER { result = @builder.assignable(@builder.match_var(val[0])) } p_var_ref: tCARET tIDENTIFIER { name = val[1][0] unless static_env.declared?(name) diagnostic :error, :undefined_lvar, { :name => name }, val[1] end lvar = @builder.accessible(@builder.ident(val[1])) result = @builder.pin(val[0], lvar) } p_const: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | p_const tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCONSTANT { result = @builder.const(val[0]) } opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt tSTRING_DEND { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: ssym | dsym ssym: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_opt_paren_args: f_paren_args | none { result = @builder.args(nil, [], nil) } f_paren_args: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | tLPAREN2 f_arg tCOMMA args_forward rparen { args = [ *val[1], @builder.forward_arg(val[3]) ] result = @builder.args(val[0], args, val[4]) @static_env.declare_forward_args } | tLPAREN2 args_forward rparen { result = @builder.forward_only_args(val[0], val[1], val[2]) @static_env.declare_forward_args @lexer.state = :expr_value } f_arglist: f_paren_args | { result = @context.in_kwarg @context.in_kwarg = true } f_args term { @context.in_kwarg = val[0] result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } args_forward: tBDOT3 { result = val[0] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! result = val[0] } f_arg_asgn: f_norm_arg { @current_arg_stack.set(val[0][0]) result = val[0] } f_arg_item: f_arg_asgn { @current_arg_stack.set(0) result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(val[0][0]) result = val[0] } f_kw: f_label arg_value { @current_arg_stack.set(nil) result = @builder.kwoptarg(val[0], val[1]) } | f_label { @current_arg_stack.set(nil) result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { result = @builder.kwoptarg(val[0], val[1]) } | f_label { result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_no_kwarg: kwrest_mark kNIL { result = [ @builder.kwnilarg(val[0], val[1]) ] } f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn tEQL arg_value { @current_arg_stack.set(0) result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn tEQL primary_value { @current_arg_stack.set(0) result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } rbrace: opt_nl tRCURLY { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 30 end def default_encoding Encoding::UTF_8 end def endless_method_name(name_t) if !%w[=== == != <= >=].include?(name_t[0]) && name_t[0].end_with?('=') diagnostic :error, :endless_setter, nil, name_t end end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) @max_numparam_stack.push(static: true) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop @max_numparam_stack.pop end def try_declare_numparam(node) name = node.children[0] if name =~ /\A_[1-9]\z/ && !static_env.declared?(name) && @context.in_dynamic_block? # definitely an implicit param location = node.loc.expression if max_numparam_stack.has_ordinary_params? diagnostic :error, :ordinary_param_defined, nil, [nil, location] end raw_max_numparam_stack = max_numparam_stack.stack.dup # ignore current block scope raw_max_numparam_stack.pop raw_max_numparam_stack.reverse_each do |outer_scope| if outer_scope[:static] # found an outer scope that can't have numparams # like def/class/etc break else outer_scope_has_numparams = outer_scope[:value] > 0 if outer_scope_has_numparams diagnostic :error, :numparam_used_in_outer_scope, nil, [nil, location] else # for now it's ok, but an outer scope can also be a block # like proc { _1; proc { proc { proc { _2 }} }} # with numparams, so we need to continue end end end static_env.declare(name) max_numparam_stack.register(name[1].to_i) true else false end end parser-3.3.4.2/lib/parser/ruby31.y000066400000000000000000003422651465510415600165610ustar00rootroot00000000000000class Parser::Ruby31 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT tBDOT2 tBDOT3 prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 tBDOT2 tBDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kIN nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: { @current_arg_stack.push(nil) @max_numparam_stack.push(static: true) } top_compstmt { result = val[1] @current_arg_stack.pop @max_numparam_stack.pop } top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :error, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) begin_body = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.multi_assign(val[0], val[1], begin_body) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | defn_head f_opt_paren_args tEQL command { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defn_head f_opt_paren_args tEQL command kRESCUE_MOD arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL command { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL command kRESCUE_MOD arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg tASSOC { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern(val[0], val[1], val[3]) } | arg kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern_p(val[0], val[1], val[3]) } | arg =tLBRACE_ARG expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } def_name: fname { local_push @current_arg_stack.push(nil) result = [ val[0], @context.dup ] @context.in_def = true } defn_head: k_def def_name { result = [ val[0], val[1] ] } defs_head: k_def singleton dot_or_colon { @lexer.state = :expr_fname @context.in_argdef = true } def_name { result = [ val[0], val[1], val[2], val[4] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fitem: fname { result = @builder.symbol_internal(val[0]) } | symbol undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | arg tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | tBDOT2 arg { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 arg { result = @builder.range_exclusive(nil, val[0], val[1]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl { @context.in_defined = true } arg { @context.in_defined = false result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[3] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | defn_head f_opt_paren_args tEQL arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defn_head f_opt_paren_args tEQL arg kRESCUE_MOD arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL arg kRESCUE_MOD arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | primary relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } | tLPAREN2 args tCOMMA args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[3] end result = [val[0], [*val[1], @builder.forwarded_args(val[3])], val[4]] } | tLPAREN2 args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[1] end result = [val[0], [@builder.forwarded_args(val[1])], val[2]] } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } | tAMPER { if !@static_env.declared_anonymous_blockarg? diagnostic :error, :no_anonymous_blockarg, nil, val[0] end result = @builder.block_pass(val[0], nil) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG stmt { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 { @context.in_defined = true } expr rparen { @context.in_defined = false result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[4] ], val[5]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | lambda | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kCASE expr_value opt_terms p_case_body kEND { *in_bodies, (else_t, else_body) = *val[3] result = @builder.case_match(val[0], val[1], in_bodies, else_t, else_body, val[4]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { @context.in_class = true local_push } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | defn_head f_arglist bodystmt kEND { def_t, (name_t, ctx) = val[0] result = @builder.def_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_arglist bodystmt kEND { def_t, recv, dot_t, (name_t, ctx) = val[0] result = @builder.def_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_def: kDEF { result = val[0] @context.in_argdef = true } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA f_rest_marg { result = val[0]. push(val[2]) } | f_marg_list tCOMMA f_rest_marg tCOMMA f_marg_list { result = val[0]. push(val[2]). concat(val[4]) } | f_rest_marg { result = [ val[0] ] } | f_rest_marg tCOMMA f_marg_list { result = [ val[0], *val[2] ] } f_rest_marg: tSTAR f_norm_arg { result = @builder.restarg(val[0], val[1]) } | tSTAR { result = @builder.restarg(val[0]) } f_any_kwrest: f_kwrest | f_no_kwarg f_eq: { @context.in_argdef = false } tEQL { result = val[1] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } excessed_comma: tCOMMA block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg excessed_comma | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1], val[2]) } | tPIPE block_param opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: tLAMBDA { @static_env.extend_dynamic @max_numparam_stack.push(static: false) result = @context.dup @context.in_lambda = true } f_larglist { @lexer.cmdarg.push(false) } lambda_body { lambda_call = @builder.call_lambda(val[0]) args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] begin_t, body, end_t = val[4] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop @context.in_lambda = val[1].in_lambda result = @builder.block(lambda_call, begin_t, args, body, end_t) } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { @context.in_argdef = false @max_numparam_stack.has_ordinary_params! result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { @context.in_argdef = false if val[0].any? @max_numparam_stack.has_ordinary_params! end result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } bodystmt kEND { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } | kDO { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } brace_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } opt_block_param compstmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[1] result = [ args, val[2] ] @max_numparam_stack.pop @static_env.unextend } do_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] result = [ args, val[3] ] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body p_case_body: kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr then { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[1] } compstmt p_cases { result = [ @builder.in_pattern(val[0], *val[2], val[3], val[5]), *val[6] ] } p_cases: opt_else { result = [ val[0] ] } | p_case_body p_top_expr: p_top_expr_body { result = [ val[0], nil ] } | p_top_expr_body kIF_MOD expr_value { result = [ val[0], @builder.if_guard(val[1], val[2]) ] } | p_top_expr_body kUNLESS_MOD expr_value { result = [ val[0], @builder.unless_guard(val[1], val[2]) ] } p_top_expr_body: p_expr | p_expr tCOMMA { # array patterns that end with comma # like 1, 2, # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = @builder.array_pattern(nil, [ item ], nil) } | p_expr tCOMMA p_args { result = @builder.array_pattern(nil, [val[0]].concat(val[2]), nil) } | p_find { result = @builder.find_pattern(nil, val[0], nil) } | p_args_tail { result = @builder.array_pattern(nil, val[0], nil) } | p_kwargs { result = @builder.hash_pattern(nil, val[0], nil) } p_expr: p_as p_as: p_expr tASSOC p_variable { result = @builder.match_as(val[0], val[1], val[2]) } | p_alt p_alt: p_alt tPIPE p_expr_basic { result = @builder.match_alt(val[0], val[1], val[2]) } | p_expr_basic p_lparen: tLPAREN2 { result = val[0] @pattern_hash_keys.push } p_lbracket: tLBRACK2 { result = val[0] @pattern_hash_keys.push } p_expr_basic: p_value | p_variable | p_const p_lparen p_args rparen { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_find rparen { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_kwargs rparen { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLPAREN2 rparen { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | p_const p_lbracket p_args rbracket { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_find rbracket { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_kwargs rbracket { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLBRACK2 rbracket { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | tLBRACK p_args rbracket { result = @builder.array_pattern(val[0], val[1], val[2]) } | tLBRACK p_find rbracket { result = @builder.find_pattern(val[0], val[1], val[2]) } | tLBRACK rbracket { result = @builder.array_pattern(val[0], [], val[1]) } | tLBRACE { @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = false } p_kwargs rbrace { @pattern_hash_keys.pop @context.in_kwarg = val[1] result = @builder.hash_pattern(val[0], val[2], val[3]) } | tLBRACE rbrace { result = @builder.hash_pattern(val[0], [], val[1]) } | tLPAREN { @pattern_hash_keys.push } p_expr rparen { @pattern_hash_keys.pop result = @builder.begin(val[0], val[2], val[3]) } p_args: p_expr { result = [ val[0] ] } | p_args_head { result = val[0] } | p_args_head p_arg { result = [ *val[0], val[1] ] } | p_args_head tSTAR tIDENTIFIER { match_rest = @builder.match_rest(val[1], val[2]) result = [ *val[0], match_rest ] } | p_args_head tSTAR tIDENTIFIER tCOMMA p_args_post { match_rest = @builder.match_rest(val[1], val[2]) result = [ *val[0], match_rest, *val[4] ] } | p_args_head tSTAR { result = [ *val[0], @builder.match_rest(val[1]) ] } | p_args_head tSTAR tCOMMA p_args_post { result = [ *val[0], @builder.match_rest(val[1]), *val[3] ] } | p_args_tail p_args_head: p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = [ item ] } | p_args_head p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` last_item = @builder.match_with_trailing_comma(val[1], val[2]) result = [ *val[0], last_item ] } p_args_tail: p_rest { result = [ val[0] ] } | p_rest tCOMMA p_args_post { result = [ val[0], *val[2] ] } p_find: p_rest tCOMMA p_args_post tCOMMA p_rest { result = [ val[0], *val[2], val[4] ] } p_rest: tSTAR tIDENTIFIER { result = @builder.match_rest(val[0], val[1]) } | tSTAR { result = @builder.match_rest(val[0]) } p_args_post: p_arg { result = [ val[0] ] } | p_args_post tCOMMA p_arg { result = [ *val[0], val[2] ] } p_arg: p_expr p_kwargs: p_kwarg tCOMMA p_any_kwrest { result = [ *val[0], *val[2] ] } | p_kwarg { result = val[0] } | p_kwarg tCOMMA { result = val[0] } | p_any_kwrest { result = val[0] } p_kwarg: p_kw { result = [ val[0] ] } | p_kwarg tCOMMA p_kw { result = [ *val[0], val[2] ] } p_kw: p_kw_label p_expr { result = @builder.match_pair(*val[0], val[1]) } | p_kw_label { result = @builder.match_label(*val[0]) } p_kw_label: tLABEL { result = [:label, val[0]] } | tSTRING_BEG string_contents tLABEL_END { result = [:quoted, [val[0], val[1], val[2]]] } p_kwrest: kwrest_mark tIDENTIFIER { result = [ @builder.match_rest(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.match_rest(val[0], nil) ] } p_kwnorest: kwrest_mark kNIL { result = [ @builder.match_nil_pattern(val[0], val[1]) ] } p_any_kwrest: p_kwrest | p_kwnorest p_value: p_primitive | p_primitive tDOT2 p_primitive { result = @builder.range_inclusive(val[0], val[1], val[2]) } | p_primitive tDOT3 p_primitive { result = @builder.range_exclusive(val[0], val[1], val[2]) } | p_primitive tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | p_primitive tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | p_var_ref | p_expr_ref | p_const | tBDOT2 p_primitive { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 p_primitive { result = @builder.range_exclusive(nil, val[0], val[1]) } p_primitive: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | keyword_variable { result = @builder.accessible(val[0]) } | lambda p_variable: tIDENTIFIER { result = @builder.assignable(@builder.match_var(val[0])) } p_var_ref: tCARET tIDENTIFIER { name = val[1][0] unless static_env.declared?(name) diagnostic :error, :undefined_lvar, { :name => name }, val[1] end lvar = @builder.accessible(@builder.ident(val[1])) result = @builder.pin(val[0], lvar) } | tCARET nonlocal_var { non_lvar = @builder.accessible(val[1]) result = @builder.pin(val[0], non_lvar) } p_expr_ref: tCARET tLPAREN expr_value tRPAREN { expr = @builder.begin(val[1], val[2], val[3]) result = @builder.pin(val[0], expr) } p_const: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | p_const tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCONSTANT { result = @builder.const(val[0]) } opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt tSTRING_DEND { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: ssym | dsym ssym: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } nonlocal_var: tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_opt_paren_args: f_paren_args | none { @context.in_argdef = false result = @builder.args(nil, [], nil) } f_paren_args: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value @context.in_argdef = false } f_arglist: f_paren_args | { result = @context.dup @context.in_kwarg = true @context.in_argdef = true } f_args term { @context.in_kwarg = val[0].in_kwarg @context.in_argdef = false result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } | args_forward { @static_env.declare_forward_args result = [ @builder.forward_arg(val[0]) ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } args_forward: tBDOT3 { result = val[0] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! result = val[0] } f_arg_asgn: f_norm_arg { @current_arg_stack.set(val[0][0]) result = val[0] } f_arg_item: f_arg_asgn { @current_arg_stack.set(0) result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(val[0][0]) @context.in_argdef = false result = val[0] } f_kw: f_label arg_value { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_no_kwarg: kwrest_mark kNIL { result = [ @builder.kwnilarg(val[0], val[1]) ] } f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn f_eq arg_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn f_eq primary_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } | blkarg_mark { @static_env.declare_anonymous_blockarg result = @builder.blockarg(val[0], nil) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tLABEL { result = @builder.pair_label(val[0]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } rbrace: opt_nl tRCURLY { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 31 end def default_encoding Encoding::UTF_8 end def endless_method_name(name_t) if !%w[=== == != <= >=].include?(name_t[0]) && name_t[0].end_with?('=') diagnostic :error, :endless_setter, nil, name_t end end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) @max_numparam_stack.push(static: true) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop @max_numparam_stack.pop end def try_declare_numparam(node) name = node.children[0] if name =~ /\A_[1-9]\z/ && !static_env.declared?(name) && @context.in_dynamic_block? # definitely an implicit param location = node.loc.expression if max_numparam_stack.has_ordinary_params? diagnostic :error, :ordinary_param_defined, nil, [nil, location] end raw_max_numparam_stack = max_numparam_stack.stack.dup # ignore current block scope raw_max_numparam_stack.pop raw_max_numparam_stack.reverse_each do |outer_scope| if outer_scope[:static] # found an outer scope that can't have numparams # like def/class/etc break else outer_scope_has_numparams = outer_scope[:value] > 0 if outer_scope_has_numparams diagnostic :error, :numparam_used_in_outer_scope, nil, [nil, location] else # for now it's ok, but an outer scope can also be a block # like proc { _1; proc { proc { proc { _2 }} }} # with numparams, so we need to continue end end end static_env.declare(name) max_numparam_stack.register(name[1].to_i) true else false end end parser-3.3.4.2/lib/parser/ruby32.y000066400000000000000000003427401465510415600165600ustar00rootroot00000000000000class Parser::Ruby32 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT tBDOT2 tBDOT3 prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 tBDOT2 tBDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kIN nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: { @current_arg_stack.push(nil) @max_numparam_stack.push(static: true) } top_compstmt { result = val[1] @current_arg_stack.pop @max_numparam_stack.pop } top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :error, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) begin_body = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.multi_assign(val[0], val[1], begin_body) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | defn_head f_opt_paren_args tEQL command { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defn_head f_opt_paren_args tEQL command kRESCUE_MOD arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL command { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL command kRESCUE_MOD arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg tASSOC { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern(val[0], val[1], val[3]) } | arg kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern_p(val[0], val[1], val[3]) } | arg =tLBRACE_ARG expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } def_name: fname { local_push @current_arg_stack.push(nil) result = [ val[0], @context.dup ] @context.in_def = true } defn_head: k_def def_name { result = [ val[0], val[1] ] } defs_head: k_def singleton dot_or_colon { @lexer.state = :expr_fname @context.in_argdef = true } def_name { result = [ val[0], val[1], val[2], val[4] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fitem: fname { result = @builder.symbol_internal(val[0]) } | symbol undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | arg tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | tBDOT2 arg { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 arg { result = @builder.range_exclusive(nil, val[0], val[1]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl { @context.in_defined = true } arg { @context.in_defined = false result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[3] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | defn_head f_opt_paren_args tEQL arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defn_head f_opt_paren_args tEQL arg kRESCUE_MOD arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL arg kRESCUE_MOD arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) rescue_body = @builder.rescue_body(val[4], nil, nil, nil, nil, val[5]) method_body = @builder.begin_body(val[3], [ rescue_body ]) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], method_body) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | primary relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } | tLPAREN2 args tCOMMA args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[3] end result = [val[0], [*val[1], @builder.forwarded_args(val[3])], val[4]] } | tLPAREN2 args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[1] end result = [val[0], [@builder.forwarded_args(val[1])], val[2]] } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } | tAMPER { if !@static_env.declared_anonymous_blockarg? diagnostic :error, :no_anonymous_blockarg, nil, val[0] end result = @builder.block_pass(val[0], nil) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR { if !@static_env.declared_anonymous_restarg? diagnostic :error, :no_anonymous_restarg, nil, val[0] end result = [ @builder.forwarded_restarg(val[0]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | args tCOMMA tSTAR { if !@static_env.declared_anonymous_restarg? diagnostic :error, :no_anonymous_restarg, nil, val[2] end result = val[0] << @builder.forwarded_restarg(val[2]) } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG stmt { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN_ARG { @lexer.state = :expr_endarg } opt_nl tRPAREN { result = @builder.begin(val[0], nil, val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 { @context.in_defined = true } expr rparen { @context.in_defined = false result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[4] ], val[5]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | lambda | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kCASE expr_value opt_terms p_case_body kEND { *in_bodies, (else_t, else_body) = *val[3] result = @builder.case_match(val[0], val[1], in_bodies, else_t, else_body, val[4]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { @context.in_class = true local_push } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | defn_head f_arglist bodystmt kEND { def_t, (name_t, ctx) = val[0] result = @builder.def_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_arglist bodystmt kEND { def_t, recv, dot_t, (name_t, ctx) = val[0] result = @builder.def_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_def: kDEF { result = val[0] @context.in_argdef = true } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA f_rest_marg { result = val[0]. push(val[2]) } | f_marg_list tCOMMA f_rest_marg tCOMMA f_marg_list { result = val[0]. push(val[2]). concat(val[4]) } | f_rest_marg { result = [ val[0] ] } | f_rest_marg tCOMMA f_marg_list { result = [ val[0], *val[2] ] } f_rest_marg: tSTAR f_norm_arg { result = @builder.restarg(val[0], val[1]) } | tSTAR { result = @builder.restarg(val[0]) } f_any_kwrest: f_kwrest | f_no_kwarg f_eq: { @context.in_argdef = false } tEQL { result = val[1] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } excessed_comma: tCOMMA block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg excessed_comma | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1], val[2]) } | tPIPE block_param opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: tLAMBDA { @static_env.extend_dynamic @max_numparam_stack.push(static: false) result = @context.dup @context.in_lambda = true } f_larglist { @lexer.cmdarg.push(false) } lambda_body { lambda_call = @builder.call_lambda(val[0]) args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] begin_t, body, end_t = val[4] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop @context.in_lambda = val[1].in_lambda result = @builder.block(lambda_call, begin_t, args, body, end_t) } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { @context.in_argdef = false @max_numparam_stack.has_ordinary_params! result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { @context.in_argdef = false if val[0].any? @max_numparam_stack.has_ordinary_params! end result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } bodystmt kEND { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } | kDO { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } brace_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } opt_block_param compstmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[1] result = [ args, val[2] ] @max_numparam_stack.pop @static_env.unextend } do_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] result = [ args, val[3] ] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body p_case_body: kIN { @lexer.state = :expr_beg @lexer.command_start = false @pattern_variables.push @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = true } p_top_expr then { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[1] } compstmt p_cases { result = [ @builder.in_pattern(val[0], *val[2], val[3], val[5]), *val[6] ] } p_cases: opt_else { result = [ val[0] ] } | p_case_body p_top_expr: p_top_expr_body { result = [ val[0], nil ] } | p_top_expr_body kIF_MOD expr_value { result = [ val[0], @builder.if_guard(val[1], val[2]) ] } | p_top_expr_body kUNLESS_MOD expr_value { result = [ val[0], @builder.unless_guard(val[1], val[2]) ] } p_top_expr_body: p_expr | p_expr tCOMMA { # array patterns that end with comma # like 1, 2, # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = @builder.array_pattern(nil, [ item ], nil) } | p_expr tCOMMA p_args { result = @builder.array_pattern(nil, [val[0]].concat(val[2]), nil) } | p_find { result = @builder.find_pattern(nil, val[0], nil) } | p_args_tail { result = @builder.array_pattern(nil, val[0], nil) } | p_kwargs { result = @builder.hash_pattern(nil, val[0], nil) } p_expr: p_as p_as: p_expr tASSOC p_variable { result = @builder.match_as(val[0], val[1], val[2]) } | p_alt p_alt: p_alt tPIPE p_expr_basic { result = @builder.match_alt(val[0], val[1], val[2]) } | p_expr_basic p_lparen: tLPAREN2 { result = val[0] @pattern_hash_keys.push } p_lbracket: tLBRACK2 { result = val[0] @pattern_hash_keys.push } p_expr_basic: p_value | p_variable | p_const p_lparen p_args rparen { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_find rparen { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_kwargs rparen { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLPAREN2 rparen { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | p_const p_lbracket p_args rbracket { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_find rbracket { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_kwargs rbracket { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLBRACK2 rbracket { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | tLBRACK p_args rbracket { result = @builder.array_pattern(val[0], val[1], val[2]) } | tLBRACK p_find rbracket { result = @builder.find_pattern(val[0], val[1], val[2]) } | tLBRACK rbracket { result = @builder.array_pattern(val[0], [], val[1]) } | tLBRACE { @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = false } p_kwargs rbrace { @pattern_hash_keys.pop @context.in_kwarg = val[1] result = @builder.hash_pattern(val[0], val[2], val[3]) } | tLBRACE rbrace { result = @builder.hash_pattern(val[0], [], val[1]) } | tLPAREN { @pattern_hash_keys.push } p_expr rparen { @pattern_hash_keys.pop result = @builder.begin(val[0], val[2], val[3]) } p_args: p_expr { result = [ val[0] ] } | p_args_head { result = val[0] } | p_args_head p_arg { result = [ *val[0], val[1] ] } | p_args_head p_rest { result = [ *val[0], val[1] ] } | p_args_head p_rest tCOMMA p_args_post { result = [ *val[0], val[1], *val[3] ] } | p_args_tail p_args_head: p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = [ item ] } | p_args_head p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` last_item = @builder.match_with_trailing_comma(val[1], val[2]) result = [ *val[0], last_item ] } p_args_tail: p_rest { result = [ val[0] ] } | p_rest tCOMMA p_args_post { result = [ val[0], *val[2] ] } p_find: p_rest tCOMMA p_args_post tCOMMA p_rest { result = [ val[0], *val[2], val[4] ] } p_rest: tSTAR tIDENTIFIER { result = @builder.match_rest(val[0], val[1]) } | tSTAR { result = @builder.match_rest(val[0]) } p_args_post: p_arg { result = [ val[0] ] } | p_args_post tCOMMA p_arg { result = [ *val[0], val[2] ] } p_arg: p_expr p_kwargs: p_kwarg tCOMMA p_any_kwrest { result = [ *val[0], *val[2] ] } | p_kwarg { result = val[0] } | p_kwarg tCOMMA { result = val[0] } | p_any_kwrest { result = val[0] } p_kwarg: p_kw { result = [ val[0] ] } | p_kwarg tCOMMA p_kw { result = [ *val[0], val[2] ] } p_kw: p_kw_label p_expr { result = @builder.match_pair(*val[0], val[1]) } | p_kw_label { result = @builder.match_label(*val[0]) } p_kw_label: tLABEL { result = [:label, val[0]] } | tSTRING_BEG string_contents tLABEL_END { result = [:quoted, [val[0], val[1], val[2]]] } p_kwrest: kwrest_mark tIDENTIFIER { result = [ @builder.match_rest(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.match_rest(val[0], nil) ] } p_kwnorest: kwrest_mark kNIL { result = val } p_any_kwrest: p_kwrest | p_kwnorest { result = [ @builder.match_nil_pattern(val[0][0], val[0][1]) ] } p_value: p_primitive | p_primitive tDOT2 p_primitive { result = @builder.range_inclusive(val[0], val[1], val[2]) } | p_primitive tDOT3 p_primitive { result = @builder.range_exclusive(val[0], val[1], val[2]) } | p_primitive tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | p_primitive tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | p_var_ref | p_expr_ref | p_const | tBDOT2 p_primitive { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 p_primitive { result = @builder.range_exclusive(nil, val[0], val[1]) } p_primitive: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | keyword_variable { result = @builder.accessible(val[0]) } | lambda p_variable: tIDENTIFIER { result = @builder.assignable(@builder.match_var(val[0])) } p_var_ref: tCARET tIDENTIFIER { name = val[1][0] unless static_env.declared?(name) diagnostic :error, :undefined_lvar, { :name => name }, val[1] end lvar = @builder.accessible(@builder.ident(val[1])) result = @builder.pin(val[0], lvar) } | tCARET nonlocal_var { non_lvar = @builder.accessible(val[1]) result = @builder.pin(val[0], non_lvar) } p_expr_ref: tCARET tLPAREN expr_value rparen { expr = @builder.begin(val[1], val[2], val[3]) result = @builder.pin(val[0], expr) } p_const: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | p_const tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCONSTANT { result = @builder.const(val[0]) } opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word tSPACE { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt tSTRING_DEND { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: ssym | dsym ssym: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } nonlocal_var: tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | nonlocal_var keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_opt_paren_args: f_paren_args | none { @context.in_argdef = false result = @builder.args(nil, [], nil) } f_paren_args: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value @context.in_argdef = false } f_arglist: f_paren_args | { result = @context.dup @context.in_kwarg = true @context.in_argdef = true } f_args term { @context.in_kwarg = val[0].in_kwarg @context.in_argdef = false result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } | args_forward { @static_env.declare_forward_args result = [ @builder.forward_arg(val[0]) ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } args_forward: tBDOT3 { result = val[0] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! result = val[0] } f_arg_asgn: f_norm_arg { @current_arg_stack.set(val[0][0]) result = val[0] } f_arg_item: f_arg_asgn { @current_arg_stack.set(0) result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(val[0][0]) @context.in_argdef = false result = val[0] } f_kw: f_label arg_value { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_no_kwarg: p_kwnorest { result = [ @builder.kwnilarg(val[0][0], val[0][1]) ] } f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { @static_env.declare_anonymous_kwrestarg result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn f_eq arg_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn f_eq primary_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { @static_env.declare_anonymous_restarg result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } | blkarg_mark { @static_env.declare_anonymous_blockarg result = @builder.blockarg(val[0], nil) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tLABEL { result = @builder.pair_label(val[0]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } | tDSTAR { if !@static_env.declared_anonymous_kwrestarg? diagnostic :error, :no_anonymous_kwrestarg, nil, val[0] end result = @builder.forwarded_kwrestarg(val[0]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: operation | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } rbrace: opt_nl tRCURLY { result = val[1] } trailer: opt_nl | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 32 end def default_encoding Encoding::UTF_8 end def endless_method_name(name_t) if !%w[=== == != <= >=].include?(name_t[0]) && name_t[0].end_with?('=') diagnostic :error, :endless_setter, nil, name_t end end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) @max_numparam_stack.push(static: true) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop @max_numparam_stack.pop end def try_declare_numparam(node) name = node.children[0] if name =~ /\A_[1-9]\z/ && !static_env.declared?(name) && @context.in_dynamic_block? # definitely an implicit param location = node.loc.expression if max_numparam_stack.has_ordinary_params? diagnostic :error, :ordinary_param_defined, nil, [nil, location] end raw_max_numparam_stack = max_numparam_stack.stack.dup # ignore current block scope raw_max_numparam_stack.pop raw_max_numparam_stack.reverse_each do |outer_scope| if outer_scope[:static] # found an outer scope that can't have numparams # like def/class/etc break else outer_scope_has_numparams = outer_scope[:value] > 0 if outer_scope_has_numparams diagnostic :error, :numparam_used_in_outer_scope, nil, [nil, location] else # for now it's ok, but an outer scope can also be a block # like proc { _1; proc { proc { proc { _2 }} }} # with numparams, so we need to continue end end end static_env.declare(name) max_numparam_stack.register(name[1].to_i) true else false end end parser-3.3.4.2/lib/parser/ruby33.y000066400000000000000000003372711465510415600165640ustar00rootroot00000000000000class Parser::Ruby33 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT tBDOT2 tBDOT3 prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 tBDOT2 tBDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kIN nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: { @current_arg_stack.push(nil) @max_numparam_stack.push(static: true) } top_compstmt { result = val[1] @current_arg_stack.pop @max_numparam_stack.pop } top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :error, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) begin_body = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.multi_assign(val[0], val[1], begin_body) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | defn_head f_opt_paren_args tEQL endless_command { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL endless_command { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } endless_command: command | endless_command kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | kNOT opt_nl endless_command { result = @builder.not_op(val[0], nil, val[2], nil) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg tASSOC p_in_kwarg p_pvtbl p_pktbl p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern(val[0], val[1], val[5]) } | arg kIN p_in_kwarg p_pvtbl p_pktbl p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern_p(val[0], val[1], val[5]) } | arg =tLBRACE_ARG expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } def_name: fname { local_push @current_arg_stack.push(nil) result = [ val[0], @context.dup ] @context.in_def = true } defn_head: k_def def_name { result = [ val[0], val[1] ] } defs_head: k_def singleton dot_or_colon { @lexer.state = :expr_fname @context.in_argdef = true } def_name { result = [ val[0], val[1], val[2], val[4] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 tCONSTANT tLCURLY brace_body tRCURLY { method_call = @builder.call_method(val[0], val[1], val[2], nil, [], nil) args, body = val[4] result = @builder.block(method_call, val[3], args, body, val[5]) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fitem: fname { result = @builder.symbol_internal(val[0]) } | symbol undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | arg tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | tBDOT2 arg { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 arg { result = @builder.range_exclusive(nil, val[0], val[1]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl begin_defined arg { @context.in_defined = val[2].in_defined result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[3] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | defn_head f_opt_paren_args tEQL endless_arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL endless_arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | primary endless_arg: arg=kRESCUE_MOD | endless_arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | kNOT opt_nl endless_arg { result = @builder.not_op(val[0], nil, val[2], nil) } relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } begin_defined: none { result = @context.dup } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } | tLPAREN2 args tCOMMA args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[3] end result = [val[0], [*val[1], @builder.forwarded_args(val[3])], val[4]] } | tLPAREN2 args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[1] end result = [val[0], [@builder.forwarded_args(val[1])], val[2]] } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } | tAMPER { if !@static_env.declared_anonymous_blockarg? diagnostic :error, :no_anonymous_blockarg, nil, val[0] end if @context.in_dynamic_block? && context.in_def && @static_env.declared_anonymous_blockarg_in_current_scpe? && @static_env.parent_has_anonymous_blockarg? diagnostic :error, :ambiguous_anonymous_blockarg, nil, val[0] end result = @builder.block_pass(val[0], nil) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | arg_splat | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA arg_splat { result = val[0].concat(val[2]) } arg_splat: tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR { if !@static_env.declared_anonymous_restarg? diagnostic :error, :no_anonymous_restarg, nil, val[0] end if @context.in_dynamic_block? && context.in_def && @static_env.declared_anonymous_restarg_in_current_scope? && @static_env.parent_has_anonymous_restarg? diagnostic :error, :ambiguous_anonymous_restarg, nil, val[0] end result = [ @builder.forwarded_restarg(val[0]) ] } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG compstmt { @lexer.state = :expr_endarg } tRPAREN { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 begin_defined expr rparen { @context.in_defined = val[3].in_defined result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[4] ], val[5]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | lambda | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kCASE expr_value opt_terms p_case_body kEND { *in_bodies, (else_t, else_body) = *val[3] result = @builder.case_match(val[0], val[1], in_bodies, else_t, else_body, val[4]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { @context.in_class = true local_push } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr_value term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | defn_head f_arglist bodystmt kEND { def_t, (name_t, ctx) = val[0] result = @builder.def_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_arglist bodystmt kEND { def_t, recv, dot_t, (name_t, ctx) = val[0] result = @builder.def_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_def: kDEF { result = val[0] @context.in_argdef = true } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA f_rest_marg { result = val[0]. push(val[2]) } | f_marg_list tCOMMA f_rest_marg tCOMMA f_marg_list { result = val[0]. push(val[2]). concat(val[4]) } | f_rest_marg { result = [ val[0] ] } | f_rest_marg tCOMMA f_marg_list { result = [ val[0], *val[2] ] } f_rest_marg: tSTAR f_norm_arg { result = @builder.restarg(val[0], val[1]) } | tSTAR { result = @builder.restarg(val[0]) } f_any_kwrest: f_kwrest | f_no_kwarg f_eq: { @context.in_argdef = false } tEQL { result = val[1] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } excessed_comma: tCOMMA block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg excessed_comma | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1], val[2]) } | tPIPE block_param opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: tLAMBDA { @static_env.extend_dynamic @max_numparam_stack.push(static: false) result = @context.dup @context.in_lambda = true } f_larglist { @lexer.cmdarg.push(false) } lambda_body { lambda_call = @builder.call_lambda(val[0]) args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] begin_t, body, end_t = val[4] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop @context.in_lambda = val[1].in_lambda result = @builder.block(lambda_call, begin_t, args, body, end_t) } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { @context.in_argdef = false @max_numparam_stack.has_ordinary_params! result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { @context.in_argdef = false if val[0].any? @max_numparam_stack.has_ordinary_params! end result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } bodystmt kEND { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } | kDO { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } brace_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } opt_block_param compstmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[1] result = [ args, val[2] ] @max_numparam_stack.pop @static_env.unextend } do_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] result = [ args, val[3] ] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body p_pvtbl: none { @pattern_variables.push } p_pktbl: none { @pattern_hash_keys.push } p_in_kwarg: none { result = @context.in_kwarg @lexer.state = :expr_beg @lexer.command_start = false @context.in_kwarg = true } p_case_body: kIN p_in_kwarg p_pvtbl p_pktbl p_top_expr then { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[1] } compstmt p_cases { result = [ @builder.in_pattern(val[0], *val[4], val[5], val[7]), *val[8] ] } p_cases: opt_else { result = [ val[0] ] } | p_case_body p_top_expr: p_top_expr_body { result = [ val[0], nil ] } | p_top_expr_body kIF_MOD expr_value { result = [ val[0], @builder.if_guard(val[1], val[2]) ] } | p_top_expr_body kUNLESS_MOD expr_value { result = [ val[0], @builder.unless_guard(val[1], val[2]) ] } p_top_expr_body: p_expr | p_expr tCOMMA { # array patterns that end with comma # like 1, 2, # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = @builder.array_pattern(nil, [ item ], nil) } | p_expr tCOMMA p_args { result = @builder.array_pattern(nil, [val[0]].concat(val[2]), nil) } | p_find { result = @builder.find_pattern(nil, val[0], nil) } | p_args_tail { result = @builder.array_pattern(nil, val[0], nil) } | p_kwargs { result = @builder.hash_pattern(nil, val[0], nil) } p_expr: p_as p_as: p_expr tASSOC p_variable { result = @builder.match_as(val[0], val[1], val[2]) } | p_alt p_alt: p_alt tPIPE p_expr_basic { result = @builder.match_alt(val[0], val[1], val[2]) } | p_expr_basic p_lparen: tLPAREN2 { result = val[0] @pattern_hash_keys.push } p_lbracket: tLBRACK2 { result = val[0] @pattern_hash_keys.push } p_expr_basic: p_value | p_variable | p_const p_lparen p_args rparen { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_find rparen { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_kwargs rparen { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLPAREN2 rparen { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | p_const p_lbracket p_args rbracket { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_find rbracket { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_kwargs rbracket { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLBRACK2 rbracket { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | tLBRACK p_args rbracket { result = @builder.array_pattern(val[0], val[1], val[2]) } | tLBRACK p_find rbracket { result = @builder.find_pattern(val[0], val[1], val[2]) } | tLBRACK rbracket { result = @builder.array_pattern(val[0], [], val[1]) } | tLBRACE { @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = false } p_kwargs rbrace { @pattern_hash_keys.pop @context.in_kwarg = val[1] result = @builder.hash_pattern(val[0], val[2], val[3]) } | tLBRACE rbrace { result = @builder.hash_pattern(val[0], [], val[1]) } | tLPAREN { @pattern_hash_keys.push } p_expr rparen { @pattern_hash_keys.pop result = @builder.begin(val[0], val[2], val[3]) } p_args: p_expr { result = [ val[0] ] } | p_args_head { result = val[0] } | p_args_head p_arg { result = [ *val[0], val[1] ] } | p_args_head p_rest { result = [ *val[0], val[1] ] } | p_args_head p_rest tCOMMA p_args_post { result = [ *val[0], val[1], *val[3] ] } | p_args_tail p_args_head: p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = [ item ] } | p_args_head p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` last_item = @builder.match_with_trailing_comma(val[1], val[2]) result = [ *val[0], last_item ] } p_args_tail: p_rest { result = [ val[0] ] } | p_rest tCOMMA p_args_post { result = [ val[0], *val[2] ] } p_find: p_rest tCOMMA p_args_post tCOMMA p_rest { result = [ val[0], *val[2], val[4] ] } p_rest: tSTAR tIDENTIFIER { result = @builder.match_rest(val[0], val[1]) } | tSTAR { result = @builder.match_rest(val[0]) } p_args_post: p_arg { result = [ val[0] ] } | p_args_post tCOMMA p_arg { result = [ *val[0], val[2] ] } p_arg: p_expr p_kwargs: p_kwarg tCOMMA p_any_kwrest { result = [ *val[0], *val[2] ] } | p_kwarg { result = val[0] } | p_kwarg tCOMMA { result = val[0] } | p_any_kwrest { result = val[0] } p_kwarg: p_kw { result = [ val[0] ] } | p_kwarg tCOMMA p_kw { result = [ *val[0], val[2] ] } p_kw: p_kw_label p_expr { result = @builder.match_pair(*val[0], val[1]) } | p_kw_label { result = @builder.match_label(*val[0]) } p_kw_label: tLABEL { result = [:label, val[0]] } | tSTRING_BEG string_contents tLABEL_END { result = [:quoted, [val[0], val[1], val[2]]] } p_kwrest: kwrest_mark tIDENTIFIER { result = [ @builder.match_rest(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.match_rest(val[0], nil) ] } p_kwnorest: kwrest_mark kNIL { result = val } p_any_kwrest: p_kwrest | p_kwnorest { result = [ @builder.match_nil_pattern(val[0][0], val[0][1]) ] } p_value: p_primitive | p_primitive tDOT2 p_primitive { result = @builder.range_inclusive(val[0], val[1], val[2]) } | p_primitive tDOT3 p_primitive { result = @builder.range_exclusive(val[0], val[1], val[2]) } | p_primitive tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | p_primitive tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | p_var_ref | p_expr_ref | p_const | tBDOT2 p_primitive { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 p_primitive { result = @builder.range_exclusive(nil, val[0], val[1]) } p_primitive: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | keyword_variable { result = @builder.accessible(val[0]) } | lambda p_variable: tIDENTIFIER { result = @builder.assignable(@builder.match_var(val[0])) } p_var_ref: tCARET tIDENTIFIER { name = val[1][0] unless static_env.declared?(name) diagnostic :error, :undefined_lvar, { :name => name }, val[1] end lvar = @builder.accessible(@builder.ident(val[1])) result = @builder.pin(val[0], lvar) } | tCARET nonlocal_var { non_lvar = @builder.accessible(val[1]) result = @builder.pin(val[0], non_lvar) } p_expr_ref: tCARET tLPAREN expr_value rparen { expr = @builder.begin(val[1], val[2], val[3]) result = @builder.pin(val[0], expr) } p_const: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | p_const tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCONSTANT { result = @builder.const(val[0]) } opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words_sep: tSPACE | words_sep tSPACE words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word words_sep { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word words_sep { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT words_sep { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT words_sep { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt string_dend { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dend: tSTRING_DEND string_dvar: nonlocal_var { result = @builder.accessible(val[0]) } | backref symbol: ssym | dsym ssym: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } nonlocal_var: tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | nonlocal_var keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_opt_paren_args: f_paren_args | none { @context.in_argdef = false result = @builder.args(nil, [], nil) } f_paren_args: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value @context.in_argdef = false } f_arglist: f_paren_args | { result = @context.dup @context.in_kwarg = true @context.in_argdef = true } f_args term { @context.in_kwarg = val[0].in_kwarg @context.in_argdef = false result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } | args_forward { @static_env.declare_forward_args result = [ @builder.forward_arg(val[0]) ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } args_forward: tBDOT3 { result = val[0] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! result = val[0] } f_arg_asgn: f_norm_arg { @current_arg_stack.set(val[0][0]) result = val[0] } f_arg_item: f_arg_asgn { @current_arg_stack.set(0) result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(val[0][0]) @context.in_argdef = false result = val[0] } f_kw: f_label arg_value { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_no_kwarg: p_kwnorest { result = [ @builder.kwnilarg(val[0][0], val[0][1]) ] } f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { @static_env.declare_anonymous_kwrestarg result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn f_eq arg_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn f_eq primary_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { @static_env.declare_anonymous_restarg result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } | blkarg_mark { @static_env.declare_anonymous_blockarg result = @builder.blockarg(val[0], nil) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tLABEL { result = @builder.pair_label(val[0]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } | tDSTAR { if !@static_env.declared_anonymous_kwrestarg? diagnostic :error, :no_anonymous_kwrestarg, nil, val[0] end if @context.in_dynamic_block? && context.in_def && @static_env.declared_anonymous_kwrestarg_in_current_scope? && @static_env.parent_has_anonymous_kwrestarg? diagnostic :error, :ambiguous_anonymous_kwrestarg, nil, val[0] end result = @builder.forwarded_kwrestarg(val[0]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: operation | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } rbrace: opt_nl tRCURLY { result = val[1] } trailer: opt_nl | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 33 end def default_encoding Encoding::UTF_8 end def endless_method_name(name_t) if !%w[=== == != <= >=].include?(name_t[0]) && name_t[0].end_with?('=') diagnostic :error, :endless_setter, nil, name_t end end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) @max_numparam_stack.push(static: true) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop @max_numparam_stack.pop end def try_declare_numparam(node) name = node.children[0] if name =~ /\A_[1-9]\z/ && !static_env.declared?(name) && @context.in_dynamic_block? # definitely an implicit param location = node.loc.expression if max_numparam_stack.has_ordinary_params? diagnostic :error, :ordinary_param_defined, nil, [nil, location] end raw_max_numparam_stack = max_numparam_stack.stack.dup # ignore current block scope raw_max_numparam_stack.pop raw_max_numparam_stack.reverse_each do |outer_scope| if outer_scope[:static] # found an outer scope that can't have numparams # like def/class/etc break else outer_scope_has_numparams = outer_scope[:value] > 0 if outer_scope_has_numparams diagnostic :error, :numparam_used_in_outer_scope, nil, [nil, location] else # for now it's ok, but an outer scope can also be a block # like proc { _1; proc { proc { proc { _2 }} }} # with numparams, so we need to continue end end end static_env.declare(name) max_numparam_stack.register(name[1].to_i) true else false end end parser-3.3.4.2/lib/parser/ruby34.y000066400000000000000000003373331465510415600165640ustar00rootroot00000000000000class Parser::Ruby34 token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tDSTAR tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSYMBOLS_BEG tQSYMBOLS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING_DEND tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tRATIONAL tIMAGINARY tLABEL_END tANDDOT tBDOT2 tBDOT3 prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 tBDOT2 tBDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kIN nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: { @current_arg_stack.push(nil) @max_numparam_stack.push(static: true) } top_compstmt { result = val[1] @current_arg_stack.pop @max_numparam_stack.pop } top_compstmt: top_stmts opt_terms { result = @builder.compstmt(val[0]) } top_stmts: # nothing { result = [] } | top_stmt { result = [ val[0] ] } | top_stmts terms top_stmt { result = val[0] << val[2] } | error top_stmt { result = [ val[1] ] } top_stmt: stmt | klBEGIN begin_block { result = @builder.preexe(val[0], *val[1]) } begin_block: tLCURLY top_compstmt tRCURLY { result = val } bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :error, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt_or_begin { result = [ val[0] ] } | stmts terms stmt_or_begin { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt_or_begin: stmt | klBEGIN begin_block { diagnostic :error, :begin_in_method, nil, val[0] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | command_asgn | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL mrhs_arg kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) begin_body = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.multi_assign(val[0], val[1], begin_body) } | mlhs tEQL mrhs_arg { result = @builder.multi_assign(val[0], val[1], val[2]) } | expr command_asgn: lhs tEQL command_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN command_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | defn_head f_opt_paren_args tEQL endless_command { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL endless_command { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | backref tOP_ASGN command_rhs { @builder.op_assign(val[0], val[1], val[2]) } endless_command: command | endless_command kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | kNOT opt_nl endless_command { result = @builder.not_op(val[0], nil, val[2], nil) } command_rhs: command_call =tOP_ASGN | command_call kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | command_asgn expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg tASSOC p_in_kwarg p_pvtbl p_pktbl p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern(val[0], val[1], val[5]) } | arg kIN p_in_kwarg p_pvtbl p_pktbl p_top_expr_body { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[2] result = @builder.match_pattern_p(val[0], val[1], val[5]) } | arg =tLBRACE_ARG expr_value: expr expr_value_do: { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop result = [ val[1], val[2] ] } def_name: fname { local_push @current_arg_stack.push(nil) result = [ val[0], @context.dup ] @context.in_def = true } defn_head: k_def def_name { result = [ val[0], val[1] ] } defs_head: k_def singleton dot_or_colon { @lexer.state = :expr_fname @context.in_argdef = true } def_name { result = [ val[0], val[1], val[2], val[4] ] } command_call: command | block_command block_command: block_call | block_call dot_or_colon operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } cmd_brace_block: tLBRACE_ARG { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } fcall: operation command: fcall command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], nil, val[1], nil) } | fcall command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], nil, val[1], nil) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 tCONSTANT tLCURLY brace_body tRCURLY { method_call = @builder.call_method(val[0], val[1], val[2], nil, [], nil) args, body = val[4] result = @builder.block(method_call, val[3], args, body, val[5]) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], nil, val[1], nil) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], nil, val[1], nil) } | k_return call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { if (val[1][0] == :anddot) diagnostic :error, :csend_in_lhs_of_masgn, nil, val[1] end result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fitem: fname { result = @builder.symbol_internal(val[0]) } | symbol undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tDSTAR | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg_rhs { result = @builder.assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg_rhs { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_fetch(val[0], val[1], val[2])) result = @builder.op_assign(const, val[3], val[4]) } | tCOLON3 tCONSTANT tOP_ASGN arg_rhs { const = @builder.const_op_assignable( @builder.const_global(val[0], val[1])) result = @builder.op_assign(const, val[2], val[3]) } | backref tOP_ASGN arg_rhs { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | arg tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | tBDOT2 arg { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 arg { result = @builder.range_exclusive(nil, val[0], val[1]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM simple_numeric tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( val[1], val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr =tCMP | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl begin_defined arg { @context.in_defined = val[2].in_defined result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[3] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | defn_head f_opt_paren_args tEQL endless_arg { def_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_opt_paren_args tEQL endless_arg { def_t, recv, dot_t, (name_t, ctx) = val[0] endless_method_name(name_t) result = @builder.def_endless_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | primary endless_arg: arg=kRESCUE_MOD | endless_arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | kNOT opt_nl endless_arg { result = @builder.not_op(val[0], nil, val[2], nil) } relop: tGT | tLT | tGEQ | tLEQ rel_expr: arg relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } | rel_expr relop arg =tGT { result = @builder.binary_op(val[0], val[1], val[2]) } begin_defined: none { result = @context.dup } arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } arg_rhs: arg =tOP_ASGN | arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } paren_args: tLPAREN2 opt_call_args rparen { result = val } | tLPAREN2 args tCOMMA args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[3] end result = [val[0], [*val[1], @builder.forwarded_args(val[3])], val[4]] } | tLPAREN2 args_forward rparen { unless @static_env.declared_forward_args? diagnostic :error, :unexpected_token, { :token => 'tBDOT3' } , val[1] end result = [val[0], [@builder.forwarded_args(val[1])], val[2]] } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args | args tCOMMA | args tCOMMA assocs tCOMMA { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs tCOMMA { result = [ @builder.associate(nil, val[0], nil) ] } call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | block_arg { result = [ val[0] ] } command_args: { # When branch gets invoked by RACC's lookahead # and command args start with '[' or '(' # we need to put `true` to the cmdarg stack # **before** `false` pushed by lexer # m [], n # ^ # Right here we have cmdarg [...0] because # lexer pushed it on '[' # We need to modify cmdarg stack to [...10] # # For all other cases (like `m n` or `m n, []`) we simply put 1 to the stack # and later lexer pushes corresponding bits on top of it. last_token = @last_token[0] lookahead = last_token == :tLBRACK || last_token == :tLPAREN_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.push(true) @lexer.cmdarg.push(top) else @lexer.cmdarg.push(true) end } call_args { # call_args can be followed by tLBRACE_ARG (that does cmdarg.push(0) in the lexer) # but the push must be done after cmdarg.pop() in the parser. # So this code does cmdarg.pop() to pop 0 pushed by tLBRACE_ARG, # cmdarg.pop() to pop 1 pushed by command_args, # and cmdarg.push(0) to restore back the flag set by tLBRACE_ARG. last_token = @last_token[0] lookahead = last_token == :tLBRACE_ARG if lookahead top = @lexer.cmdarg.pop @lexer.cmdarg.pop @lexer.cmdarg.push(top) else @lexer.cmdarg.pop end result = val[1] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } | tAMPER { if !@static_env.declared_anonymous_blockarg? diagnostic :error, :no_anonymous_blockarg, nil, val[0] end if @context.in_dynamic_block? && context.in_def && @static_env.declared_anonymous_blockarg_in_current_scpe? && @static_env.parent_has_anonymous_blockarg? diagnostic :error, :ambiguous_anonymous_blockarg, nil, val[0] end result = @builder.block_pass(val[0], nil) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | arg_splat | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA arg_splat { result = val[0].concat(val[2]) } arg_splat: tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR { if !@static_env.declared_anonymous_restarg? diagnostic :error, :no_anonymous_restarg, nil, val[0] end if @context.in_dynamic_block? && context.in_def && @static_env.declared_anonymous_restarg_in_current_scope? && @static_env.parent_has_anonymous_restarg? diagnostic :error, :ambiguous_anonymous_restarg, nil, val[0] end result = [ @builder.forwarded_restarg(val[0]) ] } mrhs_arg: mrhs { result = @builder.array(nil, val[0], nil) } | arg_value mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN { @lexer.cmdarg.push(false) } bodystmt kEND { @lexer.cmdarg.pop result = @builder.begin_keyword(val[0], val[2], val[3]) } | tLPAREN_ARG compstmt { @lexer.state = :expr_endarg } tRPAREN { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | k_return { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 begin_defined expr rparen { @context.in_defined = val[3].in_defined result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[4] ], val[5]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | fcall brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | lambda | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE expr_value_do compstmt kEND { result = @builder.loop(:while, val[0], *val[1], val[2], val[3]) } | kUNTIL expr_value_do compstmt kEND { result = @builder.loop(:until, val[0], *val[1], val[2], val[3]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kCASE expr_value opt_terms p_case_body kEND { *in_bodies, (else_t, else_body) = *val[3] result = @builder.case_match(val[0], val[1], in_bodies, else_t, else_body, val[4]) } | kFOR for_var kIN expr_value_do compstmt kEND { result = @builder.for(val[0], val[1], val[2], *val[3], val[4], val[5]) } | k_class cpath superclass { @context.in_class = true local_push } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr_value term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | defn_head f_arglist bodystmt kEND { def_t, (name_t, ctx) = val[0] result = @builder.def_method(def_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | defs_head f_arglist bodystmt kEND { def_t, recv, dot_t, (name_t, ctx) = val[0] result = @builder.def_singleton(def_t, recv, dot_t, name_t, val[1], val[2], val[3]) local_pop @current_arg_stack.pop @context.in_def = ctx.in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } k_def: kDEF { result = val[0] @context.in_argdef = true } k_return: kRETURN { if @context.in_class && !@context.in_def && !(context.in_block || context.in_lambda) diagnostic :error, :invalid_return, nil, val[0] end } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg { result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA f_rest_marg { result = val[0]. push(val[2]) } | f_marg_list tCOMMA f_rest_marg tCOMMA f_marg_list { result = val[0]. push(val[2]). concat(val[4]) } | f_rest_marg { result = [ val[0] ] } | f_rest_marg tCOMMA f_marg_list { result = [ val[0], *val[2] ] } f_rest_marg: tSTAR f_norm_arg { result = @builder.restarg(val[0], val[1]) } | tSTAR { result = @builder.restarg(val[0]) } f_any_kwrest: f_kwrest | f_no_kwarg f_eq: { @context.in_argdef = false } tEQL { result = val[1] } block_args_tail: f_block_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_block_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } opt_block_args_tail: tCOMMA block_args_tail { result = val[1] } | # nothing { result = [] } excessed_comma: tCOMMA block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg excessed_comma | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_block_args_tail { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_block_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_block_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | block_args_tail opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1], val[2]) } | tPIPE block_param opt_bv_decl tPIPE { @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(nil) @context.in_argdef = false result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: opt_nl { result = [] } | opt_nl tSEMI bv_decls opt_nl { result = val[2] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: tLAMBDA { @static_env.extend_dynamic @max_numparam_stack.push(static: false) result = @context.dup @context.in_lambda = true } f_larglist { @lexer.cmdarg.push(false) } lambda_body { lambda_call = @builder.call_lambda(val[0]) args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] begin_t, body, end_t = val[4] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop @context.in_lambda = val[1].in_lambda result = @builder.block(lambda_call, begin_t, args, body, end_t) } f_larglist: tLPAREN2 f_args opt_bv_decl tRPAREN { @context.in_argdef = false @max_numparam_stack.has_ordinary_params! result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args { @context.in_argdef = false if val[0].any? @max_numparam_stack.has_ordinary_params! end result = @builder.args(nil, val[0], nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } bodystmt kEND { @context.in_lambda = val[1].in_lambda result = [ val[0], val[2], val[3] ] } do_block: kDO_BLOCK { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call dot_or_colon operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call dot_or_colon operation2 opt_paren_args brace_block { lparen_t, args, rparen_t = val[3] method_call = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | block_call dot_or_colon operation2 command_args do_block { method_call = @builder.call_method(val[0], val[1], val[2], nil, val[3], nil) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } method_call: fcall paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { result = @context.dup @context.in_block = true } brace_body tRCURLY { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } | kDO { result = @context.dup @context.in_block = true } do_body kEND { @context.in_block = val[1].in_block result = [ val[0], *val[2], val[3] ] } brace_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } opt_block_param compstmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[1] result = [ args, val[2] ] @max_numparam_stack.pop @static_env.unextend } do_body: { @static_env.extend_dynamic @max_numparam_stack.push(static: false) } { @lexer.cmdarg.push(false) } opt_block_param bodystmt { args = @max_numparam_stack.has_numparams? ? @builder.numargs(@max_numparam_stack.top) : val[2] result = [ args, val[3] ] @max_numparam_stack.pop @static_env.unextend @lexer.cmdarg.pop } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body p_pvtbl: none { @pattern_variables.push } p_pktbl: none { @pattern_hash_keys.push } p_in_kwarg: none { result = @context.in_kwarg @lexer.state = :expr_beg @lexer.command_start = false @context.in_kwarg = true } p_case_body: kIN p_in_kwarg p_pvtbl p_pktbl p_top_expr then { @pattern_variables.pop @pattern_hash_keys.pop @context.in_kwarg = val[1] } compstmt p_cases { result = [ @builder.in_pattern(val[0], *val[4], val[5], val[7]), *val[8] ] } p_cases: opt_else { result = [ val[0] ] } | p_case_body p_top_expr: p_top_expr_body { result = [ val[0], nil ] } | p_top_expr_body kIF_MOD expr_value { result = [ val[0], @builder.if_guard(val[1], val[2]) ] } | p_top_expr_body kUNLESS_MOD expr_value { result = [ val[0], @builder.unless_guard(val[1], val[2]) ] } p_top_expr_body: p_expr | p_expr tCOMMA { # array patterns that end with comma # like 1, 2, # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = @builder.array_pattern(nil, [ item ], nil) } | p_expr tCOMMA p_args { result = @builder.array_pattern(nil, [val[0]].concat(val[2]), nil) } | p_find { result = @builder.find_pattern(nil, val[0], nil) } | p_args_tail { result = @builder.array_pattern(nil, val[0], nil) } | p_kwargs { result = @builder.hash_pattern(nil, val[0], nil) } p_expr: p_as p_as: p_expr tASSOC p_variable { result = @builder.match_as(val[0], val[1], val[2]) } | p_alt p_alt: p_alt tPIPE p_expr_basic { result = @builder.match_alt(val[0], val[1], val[2]) } | p_expr_basic p_lparen: tLPAREN2 { result = val[0] @pattern_hash_keys.push } p_lbracket: tLBRACK2 { result = val[0] @pattern_hash_keys.push } p_expr_basic: p_value | p_variable | p_const p_lparen p_args rparen { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_find rparen { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lparen p_kwargs rparen { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLPAREN2 rparen { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | p_const p_lbracket p_args rbracket { @pattern_hash_keys.pop pattern = @builder.array_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_find rbracket { @pattern_hash_keys.pop pattern = @builder.find_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const p_lbracket p_kwargs rbracket { @pattern_hash_keys.pop pattern = @builder.hash_pattern(nil, val[2], nil) result = @builder.const_pattern(val[0], val[1], pattern, val[3]) } | p_const tLBRACK2 rbracket { pattern = @builder.array_pattern(val[1], nil, val[2]) result = @builder.const_pattern(val[0], val[1], pattern, val[2]) } | tLBRACK p_args rbracket { result = @builder.array_pattern(val[0], val[1], val[2]) } | tLBRACK p_find rbracket { result = @builder.find_pattern(val[0], val[1], val[2]) } | tLBRACK rbracket { result = @builder.array_pattern(val[0], [], val[1]) } | tLBRACE { @pattern_hash_keys.push result = @context.in_kwarg @context.in_kwarg = false } p_kwargs rbrace { @pattern_hash_keys.pop @context.in_kwarg = val[1] result = @builder.hash_pattern(val[0], val[2], val[3]) } | tLBRACE rbrace { result = @builder.hash_pattern(val[0], [], val[1]) } | tLPAREN { @pattern_hash_keys.push } p_expr rparen { @pattern_hash_keys.pop result = @builder.begin(val[0], val[2], val[3]) } p_args: p_expr { result = [ val[0] ] } | p_args_head { result = val[0] } | p_args_head p_arg { result = [ *val[0], val[1] ] } | p_args_head p_rest { result = [ *val[0], val[1] ] } | p_args_head p_rest tCOMMA p_args_post { result = [ *val[0], val[1], *val[3] ] } | p_args_tail p_args_head: p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` item = @builder.match_with_trailing_comma(val[0], val[1]) result = [ item ] } | p_args_head p_arg tCOMMA { # array patterns that end with comma # like [1, 2,] # must be emitted as `array_pattern_with_tail` last_item = @builder.match_with_trailing_comma(val[1], val[2]) result = [ *val[0], last_item ] } p_args_tail: p_rest { result = [ val[0] ] } | p_rest tCOMMA p_args_post { result = [ val[0], *val[2] ] } p_find: p_rest tCOMMA p_args_post tCOMMA p_rest { result = [ val[0], *val[2], val[4] ] } p_rest: tSTAR tIDENTIFIER { result = @builder.match_rest(val[0], val[1]) } | tSTAR { result = @builder.match_rest(val[0]) } p_args_post: p_arg { result = [ val[0] ] } | p_args_post tCOMMA p_arg { result = [ *val[0], val[2] ] } p_arg: p_expr p_kwargs: p_kwarg tCOMMA p_any_kwrest { result = [ *val[0], *val[2] ] } | p_kwarg { result = val[0] } | p_kwarg tCOMMA { result = val[0] } | p_any_kwrest { result = val[0] } p_kwarg: p_kw { result = [ val[0] ] } | p_kwarg tCOMMA p_kw { result = [ *val[0], val[2] ] } p_kw: p_kw_label p_expr { result = @builder.match_pair(*val[0], val[1]) } | p_kw_label { result = @builder.match_label(*val[0]) } p_kw_label: tLABEL { result = [:label, val[0]] } | tSTRING_BEG string_contents tLABEL_END { result = [:quoted, [val[0], val[1], val[2]]] } p_kwrest: kwrest_mark tIDENTIFIER { result = [ @builder.match_rest(val[0], val[1]) ] } | kwrest_mark { result = [ @builder.match_rest(val[0], nil) ] } p_kwnorest: kwrest_mark kNIL { result = val } p_any_kwrest: p_kwrest | p_kwnorest { result = [ @builder.match_nil_pattern(val[0][0], val[0][1]) ] } p_value: p_primitive | p_primitive tDOT2 p_primitive { result = @builder.range_inclusive(val[0], val[1], val[2]) } | p_primitive tDOT3 p_primitive { result = @builder.range_exclusive(val[0], val[1], val[2]) } | p_primitive tDOT2 { result = @builder.range_inclusive(val[0], val[1], nil) } | p_primitive tDOT3 { result = @builder.range_exclusive(val[0], val[1], nil) } | p_var_ref | p_expr_ref | p_const | tBDOT2 p_primitive { result = @builder.range_inclusive(nil, val[0], val[1]) } | tBDOT3 p_primitive { result = @builder.range_exclusive(nil, val[0], val[1]) } p_primitive: literal | strings | xstring | regexp | words | qwords | symbols | qsymbols | keyword_variable { result = @builder.accessible(val[0]) } | lambda p_variable: tIDENTIFIER { result = @builder.assignable(@builder.match_var(val[0])) } p_var_ref: tCARET tIDENTIFIER { name = val[1][0] unless static_env.declared?(name) diagnostic :error, :undefined_lvar, { :name => name }, val[1] end lvar = @builder.accessible(@builder.ident(val[1])) result = @builder.pin(val[0], lvar) } | tCARET nonlocal_var { non_lvar = @builder.accessible(val[1]) result = @builder.pin(val[0], non_lvar) } p_expr_ref: tCARET tLPAREN expr_value rparen { expr = @builder.begin(val[1], val[2], val[3]) result = @builder.pin(val[0], expr) } p_const: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | p_const tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCONSTANT { result = @builder.const(val[0]) } opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { string = @builder.string_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tSTRING { string = @builder.string(val[0]) result = @builder.dedent_string(string, @lexer.dedent_level) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { string = @builder.xstring_compose(val[0], val[1], val[2]) result = @builder.dedent_string(string, @lexer.dedent_level) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words_sep: tSPACE | words_sep tSPACE words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word words_sep { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } symbols: tSYMBOLS_BEG symbol_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } symbol_list: # nothing { result = [] } | symbol_list word words_sep { result = val[0] << @builder.word(val[1]) } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qsymbols: tQSYMBOLS_BEG qsym_list tSTRING_END { result = @builder.symbols_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT words_sep { result = val[0] << @builder.string_internal(val[1]) } qsym_list: # nothing { result = [] } | qsym_list tSTRING_CONTENT words_sep { result = val[0] << @builder.symbol_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cmdarg.push(false) @lexer.cond.push(false) } compstmt string_dend { @lexer.cmdarg.pop @lexer.cond.pop result = @builder.begin(val[0], val[2], val[3]) } string_dend: tSTRING_DEND string_dvar: nonlocal_var { result = @builder.accessible(val[0]) } | backref symbol: ssym | dsym ssym: tSYMBOL { @lexer.state = :expr_end result = @builder.symbol(val[0]) } dsym: tSYMBEG string_contents tSTRING_END { @lexer.state = :expr_end result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: simple_numeric { result = val[0] } | tUNARY_NUM simple_numeric =tLOWEST { if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], val[1]) else result = @builder.unary_num(val[0], val[1]) end } simple_numeric: tINTEGER { @lexer.state = :expr_end result = @builder.integer(val[0]) } | tFLOAT { @lexer.state = :expr_end result = @builder.float(val[0]) } | tRATIONAL { @lexer.state = :expr_end result = @builder.rational(val[0]) } | tIMAGINARY { @lexer.state = :expr_end result = @builder.complex(val[0]) } nonlocal_var: tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } user_variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | nonlocal_var keyword_variable: kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: user_variable { result = @builder.accessible(val[0]) } | keyword_variable { result = @builder.accessible(val[0]) } var_lhs: user_variable { result = @builder.assignable(val[0]) } | keyword_variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: tLT { @lexer.state = :expr_value } expr_value term { result = [ val[0], val[2] ] } | # nothing { result = nil } f_opt_paren_args: f_paren_args | none { @context.in_argdef = false result = @builder.args(nil, [], nil) } f_paren_args: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value @context.in_argdef = false } f_arglist: f_paren_args | { result = @context.dup @context.in_kwarg = true @context.in_argdef = true } f_args term { @context.in_kwarg = val[0].in_kwarg @context.in_argdef = false result = @builder.args(nil, val[1], nil) } args_tail: f_kwarg tCOMMA f_kwrest opt_f_block_arg { result = val[0].concat(val[2]).concat(val[3]) } | f_kwarg opt_f_block_arg { result = val[0].concat(val[1]) } | f_any_kwrest opt_f_block_arg { result = val[0].concat(val[1]) } | f_block_arg { result = [ val[0] ] } | args_forward { @static_env.declare_forward_args result = [ @builder.forward_arg(val[0]) ] } opt_args_tail: tCOMMA args_tail { result = val[1] } | # nothing { result = [] } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_args_tail { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_args_tail { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_args_tail { result = val[0]. concat(val[2]). concat(val[3]) } | args_tail { result = val[0] } | # nothing { result = [] } args_forward: tBDOT3 { result = val[0] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! result = val[0] } f_arg_asgn: f_norm_arg { @current_arg_stack.set(val[0][0]) result = val[0] } f_arg_item: f_arg_asgn { @current_arg_stack.set(0) result = @builder.arg(val[0]) } | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_label: tLABEL { check_kwarg_name(val[0]) @static_env.declare val[0][0] @max_numparam_stack.has_ordinary_params! @current_arg_stack.set(val[0][0]) @context.in_argdef = false result = val[0] } f_kw: f_label arg_value { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @current_arg_stack.set(nil) @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kw: f_label primary_value { @context.in_argdef = true result = @builder.kwoptarg(val[0], val[1]) } | f_label { @context.in_argdef = true result = @builder.kwarg(val[0]) } f_block_kwarg: f_block_kw { result = [ val[0] ] } | f_block_kwarg tCOMMA f_block_kw { result = val[0] << val[2] } f_kwarg: f_kw { result = [ val[0] ] } | f_kwarg tCOMMA f_kw { result = val[0] << val[2] } kwrest_mark: tPOW | tDSTAR f_no_kwarg: p_kwnorest { result = [ @builder.kwnilarg(val[0][0], val[0][1]) ] } f_kwrest: kwrest_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.kwrestarg(val[0], val[1]) ] } | kwrest_mark { @static_env.declare_anonymous_kwrestarg result = [ @builder.kwrestarg(val[0]) ] } f_opt: f_arg_asgn f_eq arg_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: f_arg_asgn f_eq primary_value { @current_arg_stack.set(0) @context.in_argdef = true result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { @static_env.declare_anonymous_restarg result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } | blkarg_mark { @static_env.declare_anonymous_blockarg result = @builder.blockarg(val[0], nil) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } | tLABEL { result = @builder.pair_label(val[0]) } | tSTRING_BEG string_contents tLABEL_END arg_value { result = @builder.pair_quoted(val[0], val[1], val[2], val[3]) } | tDSTAR arg_value { result = @builder.kwsplat(val[0], val[1]) } | tDSTAR { if !@static_env.declared_anonymous_kwrestarg? diagnostic :error, :no_anonymous_kwrestarg, nil, val[0] end if @context.in_dynamic_block? && context.in_def && @static_env.declared_anonymous_kwrestarg_in_current_scope? && @static_env.parent_has_anonymous_kwrestarg? diagnostic :error, :ambiguous_anonymous_kwrestarg, nil, val[0] end result = @builder.forwarded_kwrestarg(val[0]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: operation | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } rbrace: opt_nl tRCURLY { result = val[1] } trailer: opt_nl | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 34 end def default_encoding Encoding::UTF_8 end def endless_method_name(name_t) if !%w[=== == != <= >=].include?(name_t[0]) && name_t[0].end_with?('=') diagnostic :error, :endless_setter, nil, name_t end end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) @max_numparam_stack.push(static: true) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop @max_numparam_stack.pop end def try_declare_numparam(node) name = node.children[0] if name =~ /\A_[1-9]\z/ && !static_env.declared?(name) && @context.in_dynamic_block? # definitely an implicit param location = node.loc.expression if max_numparam_stack.has_ordinary_params? diagnostic :error, :ordinary_param_defined, nil, [nil, location] end raw_max_numparam_stack = max_numparam_stack.stack.dup # ignore current block scope raw_max_numparam_stack.pop raw_max_numparam_stack.reverse_each do |outer_scope| if outer_scope[:static] # found an outer scope that can't have numparams # like def/class/etc break else outer_scope_has_numparams = outer_scope[:value] > 0 if outer_scope_has_numparams diagnostic :error, :numparam_used_in_outer_scope, nil, [nil, location] else # for now it's ok, but an outer scope can also be a block # like proc { _1; proc { proc { proc { _2 }} }} # with numparams, so we need to continue end end end static_env.declare(name) max_numparam_stack.register(name[1].to_i) true else false end end parser-3.3.4.2/lib/parser/rubymotion.y000066400000000000000000002332301465510415600176320ustar00rootroot00000000000000class Parser::RubyMotion token kCLASS kMODULE kDEF kUNDEF kBEGIN kRESCUE kENSURE kEND kIF kUNLESS kTHEN kELSIF kELSE kCASE kWHEN kWHILE kUNTIL kFOR kBREAK kNEXT kREDO kRETRY kIN kDO kDO_COND kDO_BLOCK kDO_LAMBDA kRETURN kYIELD kSUPER kSELF kNIL kTRUE kFALSE kAND kOR kNOT kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD kRESCUE_MOD kALIAS kDEFINED klBEGIN klEND k__LINE__ k__FILE__ k__ENCODING__ tIDENTIFIER tFID tGVAR tIVAR tCONSTANT tLABEL tCVAR tNTH_REF tBACK_REF tSTRING_CONTENT tINTEGER tFLOAT tUPLUS tUMINUS tUNARY_NUM tPOW tCMP tEQ tEQQ tNEQ tGEQ tLEQ tANDOP tOROP tMATCH tNMATCH tDOT tDOT2 tDOT3 tAREF tASET tLSHFT tRSHFT tCOLON2 tCOLON3 tOP_ASGN tASSOC tLPAREN tLPAREN2 tRPAREN tLPAREN_ARG tLBRACK tLBRACK2 tRBRACK tLBRACE tLBRACE_ARG tSTAR tSTAR2 tAMPER tAMPER2 tTILDE tPERCENT tDIVIDE tPLUS tMINUS tLT tGT tPIPE tBANG tCARET tLCURLY tRCURLY tBACK_REF2 tSYMBEG tSTRING_BEG tXSTRING_BEG tREGEXP_BEG tREGEXP_OPT tWORDS_BEG tQWORDS_BEG tSTRING_DBEG tSTRING_DVAR tSTRING_END tSTRING tSYMBOL tNL tEH tCOLON tCOMMA tSPACE tSEMI tLAMBDA tLAMBEG tCHARACTER tANDDOT prechigh right tBANG tTILDE tUPLUS right tPOW right tUNARY_NUM tUMINUS left tSTAR2 tDIVIDE tPERCENT left tPLUS tMINUS left tLSHFT tRSHFT left tAMPER2 left tPIPE tCARET left tGT tGEQ tLT tLEQ nonassoc tCMP tEQ tEQQ tNEQ tMATCH tNMATCH left tANDOP left tOROP nonassoc tDOT2 tDOT3 right tEH tCOLON left kRESCUE_MOD right tEQL tOP_ASGN nonassoc kDEFINED right kNOT left kOR kAND nonassoc kIF_MOD kUNLESS_MOD kWHILE_MOD kUNTIL_MOD nonassoc tLBRACE_ARG nonassoc tLOWEST preclow rule program: compstmt bodystmt: compstmt opt_rescue opt_else opt_ensure { rescue_bodies = val[1] else_t, else_ = val[2] ensure_t, ensure_ = val[3] if rescue_bodies.empty? && !else_t.nil? diagnostic :warning, :useless_else, nil, else_t end result = @builder.begin_body(val[0], rescue_bodies, else_t, else_, ensure_t, ensure_) } compstmt: stmts opt_terms { result = @builder.compstmt(val[0]) } stmts: # nothing { result = [] } | stmt { result = [ val[0] ] } | stmts terms stmt { result = val[0] << val[2] } | error stmt { result = [ val[1] ] } stmt: kALIAS fitem { @lexer.state = :expr_fname } fitem { result = @builder.alias(val[0], val[1], val[3]) } | kALIAS tGVAR tGVAR { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.gvar(val[2])) } | kALIAS tGVAR tBACK_REF { result = @builder.alias(val[0], @builder.gvar(val[1]), @builder.back_ref(val[2])) } | kALIAS tGVAR tNTH_REF { diagnostic :error, :nth_ref_alias, nil, val[2] } | kUNDEF undef_list { result = @builder.undef_method(val[0], val[1]) } | stmt kIF_MOD expr_value { result = @builder.condition_mod(val[0], nil, val[1], val[2]) } | stmt kUNLESS_MOD expr_value { result = @builder.condition_mod(nil, val[0], val[1], val[2]) } | stmt kWHILE_MOD expr_value { result = @builder.loop_mod(:while, val[0], val[1], val[2]) } | stmt kUNTIL_MOD expr_value { result = @builder.loop_mod(:until, val[0], val[1], val[2]) } | stmt kRESCUE_MOD stmt { rescue_body = @builder.rescue_body(val[1], nil, nil, nil, nil, val[2]) result = @builder.begin_body(val[0], [ rescue_body ]) } | klBEGIN tLCURLY compstmt tRCURLY { if @context.in_def diagnostic :error, :begin_in_method, nil, val[0] end result = @builder.preexe(val[0], val[1], val[2], val[3]) } | klEND tLCURLY compstmt tRCURLY { result = @builder.postexe(val[0], val[1], val[2], val[3]) } | lhs tEQL command_call { result = @builder.assign(val[0], val[1], val[2]) } | mlhs tEQL command_call { result = @builder.multi_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN command_call { result = @builder.op_assign(val[0], val[1], val[2]) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN command_call { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN command_call { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | backref tOP_ASGN command_call { @builder.op_assign(val[0], val[1], val[2]) } | lhs tEQL mrhs { result = @builder.assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | mlhs tEQL arg_value { result = @builder.multi_assign(val[0], val[1], val[2]) } | mlhs tEQL mrhs { result = @builder.multi_assign(val[0], val[1], @builder.array(nil, val[2], nil)) } | expr expr: command_call | expr kAND expr { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | expr kOR expr { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kNOT opt_nl expr { result = @builder.not_op(val[0], nil, val[2], nil) } | tBANG command_call { result = @builder.not_op(val[0], nil, val[1], nil) } | arg expr_value: expr command_call: command | block_command | kRETURN call_args { result = @builder.keyword_cmd(:return, val[0], nil, val[1], nil) } | kBREAK call_args { result = @builder.keyword_cmd(:break, val[0], nil, val[1], nil) } | kNEXT call_args { result = @builder.keyword_cmd(:next, val[0], nil, val[1], nil) } block_command: block_call | block_call call_op operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } | block_call tCOLON2 operation2 command_args { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } cmd_brace_block: tLBRACE_ARG { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } command: operation command_args =tLOWEST { result = @builder.call_method(nil, nil, val[0], *val[1]) } | operation command_args cmd_brace_block { method_call = @builder.call_method(nil, nil, val[0], *val[1]) begin_t, args, body, end_t = val[2] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value call_op operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } | primary_value call_op operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], *val[3]) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | primary_value tCOLON2 operation2 command_args =tLOWEST { result = @builder.call_method(val[0], val[1], val[2], *val[3]) } | primary_value tCOLON2 operation2 command_args cmd_brace_block { method_call = @builder.call_method(val[0], val[1], val[2], *val[3]) begin_t, args, body, end_t = val[4] result = @builder.block(method_call, begin_t, args, body, end_t) } | kSUPER command_args { result = @builder.keyword_cmd(:super, val[0], *val[1]) } | kYIELD command_args { result = @builder.keyword_cmd(:yield, val[0], *val[1]) } mlhs: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_inner: mlhs_basic { result = @builder.multi_lhs(nil, val[0], nil) } | tLPAREN mlhs_inner rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } mlhs_basic: mlhs_head | mlhs_head mlhs_item { result = val[0]. push(val[1]) } | mlhs_head tSTAR mlhs_node { result = val[0]. push(@builder.splat(val[1], val[2])) } | mlhs_head tSTAR mlhs_node tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1], val[2])). concat(val[4]) } | mlhs_head tSTAR { result = val[0]. push(@builder.splat(val[1])) } | mlhs_head tSTAR tCOMMA mlhs_post { result = val[0]. push(@builder.splat(val[1])). concat(val[3]) } | tSTAR mlhs_node { result = [ @builder.splat(val[0], val[1]) ] } | tSTAR mlhs_node tCOMMA mlhs_post { result = [ @builder.splat(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.splat(val[0]) ] } | tSTAR tCOMMA mlhs_post { result = [ @builder.splat(val[0]), *val[2] ] } mlhs_item: mlhs_node | tLPAREN mlhs_inner rparen { result = @builder.begin(val[0], val[1], val[2]) } mlhs_head: mlhs_item tCOMMA { result = [ val[0] ] } | mlhs_head mlhs_item tCOMMA { result = val[0] << val[1] } mlhs_post: mlhs_item { result = [ val[0] ] } | mlhs_post tCOMMA mlhs_item { result = val[0] << val[2] } mlhs_node: variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } lhs: variable { result = @builder.assignable(val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index_asgn(val[0], val[1], val[2], val[3]) } | primary_value call_op tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tIDENTIFIER { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value call_op tCONSTANT { result = @builder.attr_asgn(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.assignable( @builder.const_fetch(val[0], val[1], val[2])) } | tCOLON3 tCONSTANT { result = @builder.assignable( @builder.const_global(val[0], val[1])) } | backref { result = @builder.assignable(val[0]) } cname: tIDENTIFIER { diagnostic :error, :module_name_const, nil, val[0] } | tCONSTANT cpath: tCOLON3 cname { result = @builder.const_global(val[0], val[1]) } | cname { result = @builder.const(val[0]) } | primary_value tCOLON2 cname { result = @builder.const_fetch(val[0], val[1], val[2]) } fname: tIDENTIFIER | tCONSTANT | tFID | op | reswords fsym: fname { result = @builder.symbol_internal(val[0]) } | symbol fitem: fsym | dsym undef_list: fitem { result = [ val[0] ] } | undef_list tCOMMA { @lexer.state = :expr_fname } fitem { result = val[0] << val[3] } op: tPIPE | tCARET | tAMPER2 | tCMP | tEQ | tEQQ | tMATCH | tNMATCH | tGT | tGEQ | tLT | tLEQ | tNEQ | tLSHFT | tRSHFT | tPLUS | tMINUS | tSTAR2 | tSTAR | tDIVIDE | tPERCENT | tPOW | tBANG | tTILDE | tUPLUS | tUMINUS | tAREF | tASET | tBACK_REF2 reswords: k__LINE__ | k__FILE__ | k__ENCODING__ | klBEGIN | klEND | kALIAS | kAND | kBEGIN | kBREAK | kCASE | kCLASS | kDEF | kDEFINED | kDO | kELSE | kELSIF | kEND | kENSURE | kFALSE | kFOR | kIN | kMODULE | kNEXT | kNIL | kNOT | kOR | kREDO | kRESCUE | kRETRY | kRETURN | kSELF | kSUPER | kTHEN | kTRUE | kUNDEF | kWHEN | kYIELD | kIF | kUNLESS | kWHILE | kUNTIL arg: lhs tEQL arg { result = @builder.assign(val[0], val[1], val[2]) } | lhs tEQL arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.assign(val[0], val[1], rescue_) } | var_lhs tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | var_lhs tOP_ASGN arg kRESCUE_MOD arg { rescue_body = @builder.rescue_body(val[3], nil, nil, nil, nil, val[4]) rescue_ = @builder.begin_body(val[2], [ rescue_body ]) result = @builder.op_assign(val[0], val[1], rescue_) } | primary_value tLBRACK2 opt_call_args rbracket tOP_ASGN arg { result = @builder.op_assign( @builder.index( val[0], val[1], val[2], val[3]), val[4], val[5]) } | primary_value call_op tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value call_op tCONSTANT tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tIDENTIFIER tOP_ASGN arg { result = @builder.op_assign( @builder.call_method( val[0], val[1], val[2]), val[3], val[4]) } | primary_value tCOLON2 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[2], [ val[3] ] } | tCOLON3 tCONSTANT tOP_ASGN arg { diagnostic :error, :dynamic_const, nil, val[1], [ val[2] ] } | backref tOP_ASGN arg { result = @builder.op_assign(val[0], val[1], val[2]) } | arg tDOT2 arg { result = @builder.range_inclusive(val[0], val[1], val[2]) } | arg tDOT3 arg { result = @builder.range_exclusive(val[0], val[1], val[2]) } | arg tPLUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMINUS arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tSTAR2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tDIVIDE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPERCENT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tPOW arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tUNARY_NUM tINTEGER tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.integer(val[1]), val[2], val[3])) } | tUNARY_NUM tFLOAT tPOW arg { result = @builder.unary_op(val[0], @builder.binary_op( @builder.float(val[1]), val[2], val[3])) } | tUPLUS arg { result = @builder.unary_op(val[0], val[1]) } | tUMINUS arg { result = @builder.unary_op(val[0], val[1]) } | arg tPIPE arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCARET arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tAMPER2 arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tCMP arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tGEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tLEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tEQQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tNEQ arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tMATCH arg { result = @builder.match_op(val[0], val[1], val[2]) } | arg tNMATCH arg { result = @builder.binary_op(val[0], val[1], val[2]) } | tBANG arg { result = @builder.not_op(val[0], nil, val[1], nil) } | tTILDE arg { result = @builder.unary_op(val[0], val[1]) } | arg tLSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tRSHFT arg { result = @builder.binary_op(val[0], val[1], val[2]) } | arg tANDOP arg { result = @builder.logical_op(:and, val[0], val[1], val[2]) } | arg tOROP arg { result = @builder.logical_op(:or, val[0], val[1], val[2]) } | kDEFINED opt_nl arg { result = @builder.keyword_cmd(:defined?, val[0], nil, [ val[2] ], nil) } | arg tEH arg opt_nl tCOLON arg { result = @builder.ternary(val[0], val[1], val[2], val[4], val[5]) } | primary arg_value: arg aref_args: none | args trailer | args tCOMMA assocs trailer { result = val[0] << @builder.associate(nil, val[2], nil) } | assocs trailer { result = [ @builder.associate(nil, val[0], nil) ] } paren_args: tLPAREN2 opt_call_args rparen { result = val } opt_paren_args: # nothing { result = [ nil, [], nil ] } | paren_args opt_call_args: # nothing { result = [] } | call_args call_args: command { result = [ val[0] ] } | args opt_block_arg { result = val[0].concat(val[1]) } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil) ] result.concat(val[1]) } | args tCOMMA assocs opt_block_arg { assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[3]) } | args tCOMMA assocs tCOMMA args opt_block_arg { val[2][-1] = @builder.objc_varargs(val[2][-1], val[4]) assocs = @builder.associate(nil, val[2], nil) result = val[0] << assocs result.concat(val[5]) } | block_arg { result = [ val[0] ] } call_args2: arg_value tCOMMA args opt_block_arg { result = [ val[0], *val[2].concat(val[3]) ] } | arg_value tCOMMA block_arg { result = [ val[0], val[2] ] } | assocs opt_block_arg { result = [ @builder.associate(nil, val[0], nil), *val[1] ] } | arg_value tCOMMA assocs opt_block_arg { result = [ val[0], @builder.associate(nil, val[2], nil), *val[3] ] } | arg_value tCOMMA args tCOMMA assocs opt_block_arg { result = [ val[0], *val[2]. push(@builder.associate(nil, val[4], nil)). concat(val[5]) ] } | block_arg { result = [ val[0] ] } command_args: { result = @lexer.cmdarg.dup @lexer.cmdarg.push(true) } open_args { @lexer.cmdarg = val[0] result = val[1] } open_args: call_args { result = [ nil, val[0], nil ] } | tLPAREN_ARG { @lexer.state = :expr_endarg } rparen { result = [ val[0], [], val[2] ] } | tLPAREN_ARG call_args2 { @lexer.state = :expr_endarg } rparen { result = [ val[0], val[1], val[3] ] } block_arg: tAMPER arg_value { result = @builder.block_pass(val[0], val[1]) } opt_block_arg: tCOMMA block_arg { result = [ val[1] ] } | tCOMMA { result = [] } | # nothing { result = [] } args: arg_value { result = [ val[0] ] } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } | args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } mrhs: args tCOMMA arg_value { result = val[0] << val[2] } | args tCOMMA tSTAR arg_value { result = val[0] << @builder.splat(val[2], val[3]) } | tSTAR arg_value { result = [ @builder.splat(val[0], val[1]) ] } primary: literal | strings | xstring | regexp | words | qwords | var_ref | backref | tFID { result = @builder.call_method(nil, nil, val[0]) } | kBEGIN bodystmt kEND { result = @builder.begin_keyword(val[0], val[1], val[2]) } | tLPAREN_ARG expr { @lexer.state = :expr_endarg } rparen { result = @builder.begin(val[0], val[1], val[3]) } | tLPAREN compstmt tRPAREN { result = @builder.begin(val[0], val[1], val[2]) } | primary_value tCOLON2 tCONSTANT { result = @builder.const_fetch(val[0], val[1], val[2]) } | tCOLON3 tCONSTANT { result = @builder.const_global(val[0], val[1]) } | tLBRACK aref_args tRBRACK { result = @builder.array(val[0], val[1], val[2]) } | tLBRACE assoc_list tRCURLY { result = @builder.associate(val[0], val[1], val[2]) } | kRETURN { result = @builder.keyword_cmd(:return, val[0]) } | kYIELD tLPAREN2 call_args rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], val[2], val[3]) } | kYIELD tLPAREN2 rparen { result = @builder.keyword_cmd(:yield, val[0], val[1], [], val[2]) } | kYIELD { result = @builder.keyword_cmd(:yield, val[0]) } | kDEFINED opt_nl tLPAREN2 expr rparen { result = @builder.keyword_cmd(:defined?, val[0], val[2], [ val[3] ], val[4]) } | kNOT tLPAREN2 expr rparen { result = @builder.not_op(val[0], val[1], val[2], val[3]) } | kNOT tLPAREN2 rparen { result = @builder.not_op(val[0], val[1], nil, val[2]) } | operation brace_block { method_call = @builder.call_method(nil, nil, val[0]) begin_t, args, body, end_t = val[1] result = @builder.block(method_call, begin_t, args, body, end_t) } | method_call | method_call brace_block { begin_t, args, body, end_t = val[1] result = @builder.block(val[0], begin_t, args, body, end_t) } | tLAMBDA { result = @context.dup @context.in_lambda = true } lambda { lambda_call = @builder.call_lambda(val[0]) args, (begin_t, body, end_t) = val[2] result = @builder.block(lambda_call, begin_t, args, body, end_t) @context.in_lambda = val[1].in_lambda } | kIF expr_value then compstmt if_tail kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, val[5]) } | kUNLESS expr_value then compstmt opt_else kEND { else_t, else_ = val[4] result = @builder.condition(val[0], val[1], val[2], else_, else_t, val[3], val[5]) } | kWHILE { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:while, val[0], val[2], val[3], val[5], val[6]) } | kUNTIL { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.loop(:until, val[0], val[2], val[3], val[5], val[6]) } | kCASE expr_value opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[3] result = @builder.case(val[0], val[1], when_bodies, else_t, else_body, val[4]) } | kCASE opt_terms case_body kEND { *when_bodies, (else_t, else_body) = *val[2] result = @builder.case(val[0], nil, when_bodies, else_t, else_body, val[3]) } | kFOR for_var kIN { @lexer.cond.push(true) } expr_value do { @lexer.cond.pop } compstmt kEND { result = @builder.for(val[0], val[1], val[2], val[4], val[5], val[7], val[8]) } | k_class cpath superclass { local_push @context.in_class = true } bodystmt kEND { k_class, ctx = val[0] if @context.in_def diagnostic :error, :class_in_def, nil, k_class end lt_t, superclass = val[2] result = @builder.def_class(k_class, val[1], lt_t, superclass, val[4], val[5]) local_pop @context.in_class = ctx.in_class } | k_class tLSHFT expr term { @context.in_def = false @context.in_class = false local_push } bodystmt kEND { k_class, ctx = val[0] result = @builder.def_sclass(k_class, val[1], val[2], val[5], val[6]) local_pop @context.in_def = ctx.in_def @context.in_class = ctx.in_class } | k_module cpath { @context.in_class = true local_push } bodystmt kEND { k_mod, ctx = val[0] if @context.in_def diagnostic :error, :module_in_def, nil, k_mod end result = @builder.def_module(k_mod, val[1], val[3], val[4]) local_pop @context.in_class = ctx.in_class } | kDEF fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_method(val[0], val[1], val[3], val[4], val[5]) local_pop @context.in_def = val[2].in_def } | kDEF singleton dot_or_colon { @lexer.state = :expr_fname } fname { local_push result = context.dup @context.in_def = true } f_arglist bodystmt kEND { result = @builder.def_singleton(val[0], val[1], val[2], val[4], val[6], val[7], val[8]) local_pop @context.in_def = val[5].in_def } | kBREAK { result = @builder.keyword_cmd(:break, val[0]) } | kNEXT { result = @builder.keyword_cmd(:next, val[0]) } | kREDO { result = @builder.keyword_cmd(:redo, val[0]) } | kRETRY { result = @builder.keyword_cmd(:retry, val[0]) } primary_value: primary k_class: kCLASS { result = [ val[0], @context.dup ] } k_module: kMODULE { result = [ val[0], @context.dup ] } then: term | kTHEN | term kTHEN { result = val[1] } do: term | kDO_COND if_tail: opt_else | kELSIF expr_value then compstmt if_tail { else_t, else_ = val[4] result = [ val[0], @builder.condition(val[0], val[1], val[2], val[3], else_t, else_, nil), ] } opt_else: none | kELSE compstmt { result = val } for_var: lhs | mlhs f_marg: f_norm_arg | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_marg_list: f_marg { result = [ val[0] ] } | f_marg_list tCOMMA f_marg { result = val[0] << val[2] } f_margs: f_marg_list | f_marg_list tCOMMA tSTAR f_norm_arg { result = val[0]. push(@builder.objc_restarg(val[2], val[3])) } | f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list { result = val[0]. push(@builder.objc_restarg(val[2], val[3])). concat(val[5]) } | f_marg_list tCOMMA tSTAR { result = val[0]. push(@builder.objc_restarg(val[2])) } | f_marg_list tCOMMA tSTAR tCOMMA f_marg_list { result = val[0]. push(@builder.objc_restarg(val[2])). concat(val[4]) } | tSTAR f_norm_arg { result = [ @builder.objc_restarg(val[0], val[1]) ] } | tSTAR f_norm_arg tCOMMA f_marg_list { result = [ @builder.objc_restarg(val[0], val[1]), *val[3] ] } | tSTAR { result = [ @builder.objc_restarg(val[0]) ] } | tSTAR tCOMMA f_marg_list { result = [ @builder.objc_restarg(val[0]), *val[2] ] } block_param: f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_block_optarg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_f_block_arg { if val[1].empty? && val[0].size == 1 result = [@builder.procarg0(val[0][0])] else result = val[0].concat(val[1]) end } | f_block_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_block_optarg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_block_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_arg { result = [ val[0] ] } opt_block_param: # nothing { result = @builder.args(nil, [], nil) } | block_param_def { @lexer.state = :expr_value } block_param_def: tPIPE opt_bv_decl tPIPE { result = @builder.args(val[0], val[1], val[2]) } | tOROP { result = @builder.args(val[0], [], val[0]) } | tPIPE block_param opt_bv_decl tPIPE { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } opt_bv_decl: # nothing { result = [] } | tSEMI bv_decls { result = val[1] } bv_decls: bvar { result = [ val[0] ] } | bv_decls tCOMMA bvar { result = val[0] << val[2] } bvar: tIDENTIFIER { @static_env.declare val[0][0] result = @builder.shadowarg(val[0]) } | f_bad_arg lambda: { @static_env.extend_dynamic } f_larglist lambda_body { result = [ val[1], val[2] ] @static_env.unextend } f_larglist: tLPAREN2 f_args opt_bv_decl rparen { result = @builder.args(val[0], val[1].concat(val[2]), val[3]) } | f_args opt_bv_decl { result = @builder.args(nil, val[0].concat(val[1]), nil) } lambda_body: tLAMBEG { result = @context.dup @context.in_lambda = true } compstmt tRCURLY { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } | kDO_LAMBDA { result = @context.dup @context.in_lambda = true } compstmt kEND { result = [ val[0], val[2], val[3] ] @context.in_lambda = val[1].in_lambda } do_block: kDO_BLOCK { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } block_call: command do_block { begin_t, block_args, body, end_t = val[1] result = @builder.block(val[0], begin_t, block_args, body, end_t) } | block_call call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | block_call tCOLON2 operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } method_call: operation paren_args { lparen_t, args, rparen_t = val[1] result = @builder.call_method(nil, nil, val[0], lparen_t, args, rparen_t) } | primary_value call_op operation2 opt_paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation2 paren_args { lparen_t, args, rparen_t = val[3] result = @builder.call_method(val[0], val[1], val[2], lparen_t, args, rparen_t) } | primary_value tCOLON2 operation3 { result = @builder.call_method(val[0], val[1], val[2]) } | primary_value call_op paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | primary_value tCOLON2 paren_args { lparen_t, args, rparen_t = val[2] result = @builder.call_method(val[0], val[1], nil, lparen_t, args, rparen_t) } | kSUPER paren_args { lparen_t, args, rparen_t = val[1] result = @builder.keyword_cmd(:super, val[0], lparen_t, args, rparen_t) } | kSUPER { result = @builder.keyword_cmd(:zsuper, val[0]) } | primary_value tLBRACK2 opt_call_args rbracket { result = @builder.index(val[0], val[1], val[2], val[3]) } brace_block: tLCURLY { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt tRCURLY { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } | kDO { @static_env.extend_dynamic result = @context.dup @context.in_block = true } opt_block_param compstmt kEND { result = [ val[0], val[2], val[3], val[4] ] @static_env.unextend @context.in_block = val[1].in_block } case_body: kWHEN args then compstmt cases { result = [ @builder.when(val[0], val[1], val[2], val[3]), *val[4] ] } cases: opt_else { result = [ val[0] ] } | case_body opt_rescue: kRESCUE exc_list exc_var then compstmt opt_rescue { assoc_t, exc_var = val[2] if val[1] exc_list = @builder.array(nil, val[1], nil) end result = [ @builder.rescue_body(val[0], exc_list, assoc_t, exc_var, val[3], val[4]), *val[5] ] } | { result = [] } exc_list: arg_value { result = [ val[0] ] } | mrhs | none exc_var: tASSOC lhs { result = [ val[0], val[1] ] } | none opt_ensure: kENSURE compstmt { result = [ val[0], val[1] ] } | none literal: numeric | symbol | dsym strings: string { result = @builder.string_compose(nil, val[0], nil) } string: string1 { result = [ val[0] ] } | string string1 { result = val[0] << val[1] } string1: tSTRING_BEG string_contents tSTRING_END { result = @builder.string_compose(val[0], val[1], val[2]) } | tSTRING { result = @builder.string(val[0]) } | tCHARACTER { result = @builder.character(val[0]) } xstring: tXSTRING_BEG xstring_contents tSTRING_END { result = @builder.xstring_compose(val[0], val[1], val[2]) } regexp: tREGEXP_BEG regexp_contents tSTRING_END tREGEXP_OPT { opts = @builder.regexp_options(val[3]) result = @builder.regexp_compose(val[0], val[1], val[2], opts) } words: tWORDS_BEG word_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } word_list: # nothing { result = [] } | word_list word tSPACE { result = val[0] << @builder.word(val[1]) } word: string_content { result = [ val[0] ] } | word string_content { result = val[0] << val[1] } qwords: tQWORDS_BEG qword_list tSTRING_END { result = @builder.words_compose(val[0], val[1], val[2]) } qword_list: # nothing { result = [] } | qword_list tSTRING_CONTENT tSPACE { result = val[0] << @builder.string_internal(val[1]) } string_contents: # nothing { result = [] } | string_contents string_content { result = val[0] << val[1] } xstring_contents: # nothing { result = [] } | xstring_contents string_content { result = val[0] << val[1] } regexp_contents: # nothing { result = [] } | regexp_contents string_content { result = val[0] << val[1] } string_content: tSTRING_CONTENT { result = @builder.string_internal(val[0]) } | tSTRING_DVAR string_dvar { result = val[1] } | tSTRING_DBEG { @lexer.cond.push(false) @lexer.cmdarg.push(false) } compstmt tRCURLY { @lexer.cond.lexpop @lexer.cmdarg.lexpop result = @builder.begin(val[0], val[2], val[3]) } string_dvar: tGVAR { result = @builder.gvar(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | backref symbol: tSYMBOL { result = @builder.symbol(val[0]) } dsym: tSYMBEG xstring_contents tSTRING_END { result = @builder.symbol_compose(val[0], val[1], val[2]) } numeric: tINTEGER { result = @builder.integer(val[0]) } | tFLOAT { result = @builder.float(val[0]) } | tUNARY_NUM tINTEGER =tLOWEST { num = @builder.integer(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } | tUNARY_NUM tFLOAT =tLOWEST { num = @builder.float(val[1]) if @builder.respond_to? :negate # AST builder interface compatibility result = @builder.negate(val[0], num) else result = @builder.unary_num(val[0], num) end } variable: tIDENTIFIER { result = @builder.ident(val[0]) } | tIVAR { result = @builder.ivar(val[0]) } | tGVAR { result = @builder.gvar(val[0]) } | tCONSTANT { result = @builder.const(val[0]) } | tCVAR { result = @builder.cvar(val[0]) } | kNIL { result = @builder.nil(val[0]) } | kSELF { result = @builder.self(val[0]) } | kTRUE { result = @builder.true(val[0]) } | kFALSE { result = @builder.false(val[0]) } | k__FILE__ { result = @builder.__FILE__(val[0]) } | k__LINE__ { result = @builder.__LINE__(val[0]) } | k__ENCODING__ { result = @builder.__ENCODING__(val[0]) } var_ref: variable { result = @builder.accessible(val[0]) } var_lhs: variable { result = @builder.assignable(val[0]) } backref: tNTH_REF { result = @builder.nth_ref(val[0]) } | tBACK_REF { result = @builder.back_ref(val[0]) } superclass: term { result = nil } | tLT expr_value term { result = [ val[0], val[1] ] } | error term { yyerrok result = nil } f_arglist: tLPAREN2 f_args rparen { result = @builder.args(val[0], val[1], val[2]) @lexer.state = :expr_value } | f_args term { result = @builder.args(nil, val[0], nil) } f_args: f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[6]). concat(val[7]) } | f_arg tCOMMA f_optarg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_rest_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[4]). concat(val[5]) } | f_optarg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_optarg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_rest_arg opt_f_block_arg { result = val[0]. concat(val[1]) } | f_rest_arg tCOMMA f_arg opt_f_block_arg { result = val[0]. concat(val[2]). concat(val[3]) } | f_block_arg { result = [ val[0] ] } | # nothing { result = [] } f_bad_arg: tCONSTANT { diagnostic :error, :argument_const, nil, val[0] } | tIVAR { diagnostic :error, :argument_ivar, nil, val[0] } | tGVAR { diagnostic :error, :argument_gvar, nil, val[0] } | tCVAR { diagnostic :error, :argument_cvar, nil, val[0] } f_norm_arg: f_bad_arg | tIDENTIFIER { @static_env.declare val[0][0] result = @builder.arg(val[0]) } | tIDENTIFIER tASSOC tIDENTIFIER { @static_env.declare val[2][0] result = @builder.objc_kwarg(val[0], val[1], val[2]) } | tLABEL tIDENTIFIER { @static_env.declare val[1][0] result = @builder.objc_kwarg(val[0], nil, val[1]) } f_arg_item: f_norm_arg | tLPAREN f_margs rparen { result = @builder.multi_lhs(val[0], val[1], val[2]) } f_arg: f_arg_item { result = [ val[0] ] } | f_arg tCOMMA f_arg_item { result = val[0] << val[2] } f_opt: tIDENTIFIER tEQL arg_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_opt: tIDENTIFIER tEQL primary_value { @static_env.declare val[0][0] result = @builder.optarg(val[0], val[1], val[2]) } f_block_optarg: f_block_opt { result = [ val[0] ] } | f_block_optarg tCOMMA f_block_opt { result = val[0] << val[2] } f_optarg: f_opt { result = [ val[0] ] } | f_optarg tCOMMA f_opt { result = val[0] << val[2] } restarg_mark: tSTAR2 | tSTAR f_rest_arg: restarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = [ @builder.restarg(val[0], val[1]) ] } | restarg_mark { result = [ @builder.restarg(val[0]) ] } blkarg_mark: tAMPER2 | tAMPER f_block_arg: blkarg_mark tIDENTIFIER { @static_env.declare val[1][0] result = @builder.blockarg(val[0], val[1]) } opt_f_block_arg: tCOMMA f_block_arg { result = [ val[1] ] } | # nothing { result = [] } singleton: var_ref | tLPAREN2 expr rparen { result = val[1] } assoc_list: # nothing { result = [] } | assocs trailer assocs: assoc { result = [ val[0] ] } | assocs tCOMMA assoc { result = val[0] << val[2] } assoc: arg_value tASSOC arg_value { result = @builder.pair(val[0], val[1], val[2]) } | tLABEL arg_value { result = @builder.pair_keyword(val[0], val[1]) } operation: tIDENTIFIER | tCONSTANT | tFID operation2: tIDENTIFIER | tCONSTANT | tFID | op operation3: tIDENTIFIER | tFID | op dot_or_colon: call_op | tCOLON2 call_op: tDOT { result = [:dot, val[0][1]] } | tANDDOT { result = [:anddot, val[0][1]] } opt_terms: | terms opt_nl: | tNL rparen: opt_nl tRPAREN { result = val[1] } rbracket: opt_nl tRBRACK { result = val[1] } trailer: | tNL | tCOMMA term: tSEMI { yyerrok } | tNL terms: term | terms tSEMI none: # nothing { result = nil } end ---- header require_relative '../parser' ---- inner def version 19 # closest released match: v1_9_0_2 end def default_encoding Encoding::BINARY end def local_push @static_env.extend_static @lexer.cmdarg.push(false) @lexer.cond.push(false) end def local_pop @static_env.unextend @lexer.cmdarg.pop @lexer.cond.pop end parser-3.3.4.2/lib/parser/runner.rb000066400000000000000000000164121465510415600170700ustar00rootroot00000000000000# frozen_string_literal: true require 'benchmark' require 'find' require 'optparse' require_relative '../parser' module Parser class Runner def self.go(options) new.execute(options) end def initialize @option_parser = OptionParser.new { |opts| setup_option_parsing(opts) } @legacy = {} @parser_class = nil @parser = nil @files = [] @fragments = [] @warnings = false @benchmark = false @source_count = 0 @source_size = 0 end def execute(options) parse_options(options) setup_builder_default prepare_parser process_all_input end private LEGACY_MODES = %i[lambda procarg0 encoding index arg_inside_procarg0 forward_arg kwargs match_pattern].freeze def runner_name raise NotImplementedError, "implement #{self.class}##{__callee__}" end def setup_option_parsing(opts) opts.banner = "Usage: #{runner_name} [options] FILE|DIRECTORY..." opts.on_tail '-h', '--help', 'Display this help message and exit' do puts opts.help puts <<-HELP If you specify a DIRECTORY, then all *.rb files are fetched from it recursively and appended to the file list. The default parsing mode is for current Ruby (#{RUBY_VERSION}). HELP exit end opts.on_tail '-V', '--version', 'Output version information and exit' do puts "#{runner_name} based on parser version #{Parser::VERSION}" exit end opts.on '--18', 'Parse as Ruby 1.8.7 would' do require_relative 'ruby18' @parser_class = Parser::Ruby18 end opts.on '--19', 'Parse as Ruby 1.9.3 would' do require_relative 'ruby19' @parser_class = Parser::Ruby19 end opts.on '--20', 'Parse as Ruby 2.0 would' do require_relative 'ruby20' @parser_class = Parser::Ruby20 end opts.on '--21', 'Parse as Ruby 2.1 would' do require_relative 'ruby21' @parser_class = Parser::Ruby21 end opts.on '--22', 'Parse as Ruby 2.2 would' do require_relative 'ruby22' @parser_class = Parser::Ruby22 end opts.on '--23', 'Parse as Ruby 2.3 would' do require_relative 'ruby23' @parser_class = Parser::Ruby23 end opts.on '--24', 'Parse as Ruby 2.4 would' do require_relative 'ruby24' @parser_class = Parser::Ruby24 end opts.on '--25', 'Parse as Ruby 2.5 would' do require_relative 'ruby25' @parser_class = Parser::Ruby25 end opts.on '--26', 'Parse as Ruby 2.6 would' do require_relative 'ruby26' @parser_class = Parser::Ruby26 end opts.on '--27', 'Parse as Ruby 2.7 would' do require_relative 'ruby27' @parser_class = Parser::Ruby27 end opts.on '--30', 'Parse as Ruby 3.0 would' do require_relative 'ruby30' @parser_class = Parser::Ruby30 end opts.on '--31', 'Parse as Ruby 3.1 would' do require_relative 'ruby31' @parser_class = Parser::Ruby31 end opts.on '--32', 'Parse as Ruby 3.2 would' do require_relative 'ruby32' @parser_class = Parser::Ruby32 end opts.on '--33', 'Parse as Ruby 3.3 would' do require_relative 'ruby33' @parser_class = Parser::Ruby33 end opts.on '--34', 'Parse as Ruby 3.4 would' do require_relative 'ruby34' @parser_class = Parser::Ruby34 end opts.on '--mac', 'Parse as MacRuby 0.12 would' do require_relative 'macruby' @parser_class = Parser::MacRuby end opts.on '--ios', 'Parse as mid-2015 RubyMotion would' do require_relative 'rubymotion' @parser_class = Parser::RubyMotion end opts.on '--legacy', "Parse with all legacy modes" do @legacy = Hash.new(true) end LEGACY_MODES.each do |mode| opt_name = "--legacy-#{mode.to_s.gsub('_', '-')}" opts.on opt_name, "Parse with legacy mode for emit_#{mode}" do @legacy[mode] = true end end opts.on '-w', '--warnings', 'Enable warnings' do |w| @warnings = w end opts.on '-B', '--benchmark', 'Benchmark the processor' do |b| @benchmark = b end opts.on '-e fragment', 'Process a fragment of Ruby code' do |fragment| @fragments << fragment end end def parse_options(options) @option_parser.parse!(options) # Slop has just removed recognized options from `options`. @fragments << $stdin.read if options.delete('-') options.each do |file_or_dir| if File.directory?(file_or_dir) Find.find(file_or_dir) do |path| @files << path if path.end_with? '.rb' end else @files << file_or_dir end end if @files.empty? && @fragments.empty? $stderr.puts 'Need something to parse!' exit 1 end if @parser_class.nil? require_relative 'current' @parser_class = Parser::CurrentRuby end end def setup_builder_default LEGACY_MODES.each do |mode| Parser::Builders::Default.send(:"emit_#{mode}=", !@legacy[mode]) end end def prepare_parser @parser = @parser_class.new @parser.diagnostics.all_errors_are_fatal = true @parser.diagnostics.ignore_warnings = !@warnings @parser.diagnostics.consumer = lambda do |diagnostic| puts(diagnostic.render) end end def input_size @files.size + @fragments.size end def process_all_input parsing_time = Benchmark.measure do process_fragments process_files end if @benchmark report_with_time(parsing_time) end end def process_fragments @fragments.each_with_index do |fragment, index| fragment = fragment.dup.force_encoding(@parser.default_encoding) buffer = Source::Buffer.new("(fragment:#{index})") buffer.source = fragment process_buffer(buffer) end end def process_files @files.each do |filename| source = File.read(filename).force_encoding(@parser.default_encoding) buffer = Parser::Source::Buffer.new(filename) if @parser.class.name == 'Parser::Ruby18' buffer.raw_source = source else buffer.source = source end process_buffer(buffer) end end def process_buffer(buffer) @parser.reset process(buffer) @source_count += 1 @source_size += buffer.source.size rescue Parser::SyntaxError # skip rescue StandardError $stderr.puts("Failed on: #{buffer.name}") raise end def process(buffer) raise NotImplementedError, "implement #{self.class}##{__callee__}" end def report_with_time(parsing_time) cpu_time = parsing_time.utime speed = '%.3f' % (@source_size / cpu_time / 1000) puts "Parsed #{@source_count} files (#{@source_size} characters)" \ " in #{'%.2f' % cpu_time} seconds (#{speed} kchars/s)." if defined?(RUBY_ENGINE) engine = RUBY_ENGINE else engine = 'ruby' end puts "Running on #{engine} #{RUBY_VERSION}." end end end parser-3.3.4.2/lib/parser/runner/000077500000000000000000000000001465510415600165375ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/runner/ruby_parse.rb000066400000000000000000000073411465510415600212440ustar00rootroot00000000000000# frozen_string_literal: true require_relative '../runner' require_relative '../color' require_relative '../lexer/explanation' require 'json' module Parser class Runner::RubyParse < Parser::Runner class LocationProcessor < Parser::AST::Processor def process(node) if node p node source_line_no = nil source_line = '' hilight_line = '' print_line = lambda do unless hilight_line.empty? puts hilight_line. gsub(/[a-z_]+/) { |m| Color.yellow(m, bold: true) }. gsub(/[~.]+/) { |m| Color.magenta(m, bold: true) } hilight_line = '' end end print_source = lambda do |range| source_line = range.source_line puts Color.green(source_line) source_line end (node.loc || {}).to_hash. sort_by do |name, range| [(range ? range.line : 0), (name == :expression ? 1 : 0)] end. each do |name, range| next if range.nil? if source_line_no != range.line print_line.call() source_line = print_source.call(range) source_line_no = range.line end beg_col = range.begin.column if beg_col + range.length > source_line.length multiline = true range_length = source_line.length - beg_col + 3 else multiline = false range_length = range.length end length = range_length + 1 + name.length end_col = beg_col + length if beg_col > 0 col_range = (beg_col - 1)...end_col else col_range = beg_col...end_col end if hilight_line.length < end_col hilight_line = hilight_line.ljust(end_col) end if hilight_line[col_range] =~ /^\s*$/ if multiline tail = ('~' * (source_line.length - beg_col)) + '...' else tail = '~' * range_length end tail = ' ' + tail if beg_col > 0 hilight_line[col_range] = tail + " #{name}" else print_line.call redo end end print_line.call end super end end def initialize super @locate = false @emit_ruby = false @emit_json = false end private def runner_name 'ruby-parse' end def setup_option_parsing(opts) super(opts) opts.on '-L', '--locate', 'Explain how source maps for AST nodes are laid out' do |v| @locate = v end opts.on '-E', '--explain', 'Explain how the source is tokenized' do ENV['RACC_DEBUG'] = '1' Lexer.send :include, Lexer::Explanation end opts.on '--emit-ruby', 'Emit S-expressions as valid Ruby code' do @emit_ruby = true end opts.on '--emit-json', 'Emit S-expressions as valid JSON' do @emit_json = true end end def process_all_input if input_size > 1 puts "Using #{@parser_class} to parse #{input_size} files." end super end def process(buffer) ast = @parser.parse(buffer) if @locate LocationProcessor.new.process(ast) elsif !@benchmark if @emit_ruby puts ast.inspect elsif @emit_json puts(ast ? JSON.generate(ast.to_sexp_array) : nil) else puts ast.to_s end end end end end parser-3.3.4.2/lib/parser/runner/ruby_rewrite.rb000066400000000000000000000042521465510415600216110ustar00rootroot00000000000000# frozen_string_literal: true require_relative '../runner' require 'tempfile' module Parser class Runner::RubyRewrite < Runner def initialize super @rewriters = [] @modify = false end private def runner_name 'ruby-rewrite' end def setup_option_parsing(opts) super(opts) opts.on '-l file', '--load', 'Load a rewriter' do |file| load_and_discover(file) end opts.on '-m', '--modify', 'Assume rewriters normally modify AST' do @modify = true end end def load_and_discover(file) load file const_name = File.basename(file). sub(/\.rb$/, ''). gsub(/(^|_)([a-z])/) do |m| "#{$2.upcase}" end @rewriters << Object.const_get(const_name) end def process(initial_buffer) buffer = initial_buffer original_name = buffer.name @rewriters.each do |rewriter_class| @parser.reset ast = @parser.parse(buffer) rewriter = rewriter_class.new new_source = rewriter.rewrite(buffer, ast) new_buffer = Source::Buffer.new(initial_buffer.name + '|after ' + rewriter_class.name, source: new_source) @parser.reset new_ast = @parser.parse(new_buffer) if !@modify && ast != new_ast $stderr.puts 'ASTs do not match:' old = Tempfile.new('old') old.write ast.inspect + "\n"; old.flush new = Tempfile.new('new') new.write new_ast.inspect + "\n"; new.flush IO.popen("diff -u #{old.path} #{new.path}") do |io| diff = io.read. sub(/^---.*/, "--- #{buffer.name}"). sub(/^\+\+\+.*/, "+++ #{new_buffer.name}") $stderr.write diff end exit 1 end buffer = new_buffer end if File.exist?(original_name) File.open(original_name, 'w') do |file| file.write buffer.source end else if input_size > 1 puts "Rewritten content of #{buffer.name}:" end puts buffer.source end end end end parser-3.3.4.2/lib/parser/source/000077500000000000000000000000001465510415600165265ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/source/buffer.rb000066400000000000000000000240041465510415600203240ustar00rootroot00000000000000# encoding: ascii-8bit # frozen_string_literal: true module Parser module Source ## # A buffer with source code. {Buffer} contains the source code itself, # associated location information (name and first line), and takes care # of encoding. # # A source buffer is immutable once populated. # # @!attribute [r] name # Buffer name. If the buffer was created from a file, the name corresponds # to relative path to the file. # @return [String] buffer name # # @!attribute [r] first_line # First line of the buffer, 1 by default. # @return [Integer] first line # # @api public # class Buffer attr_reader :name, :first_line ## # @api private # ENCODING_RE = /[\s#](en)?coding\s*[:=]\s* ( # Special-case: there's a UTF8-MAC encoding. (utf8-mac) | # Chew the suffix; it's there for emacs compat. ([A-Za-z0-9_-]+?)(-unix|-dos|-mac) | ([A-Za-z0-9_-]+) ) /x ## # Try to recognize encoding of `string` as Ruby would, i.e. by looking for # magic encoding comment or UTF-8 BOM. `string` can be in any encoding. # # @param [String] string # @raise [Parser::UnknownEncodingInMagicComment] if the encoding is not recognized # @return [String, nil] encoding name, if recognized # def self.recognize_encoding(string) return if string.empty? # extract the first two lines in an efficient way string =~ /\A(.*)\n?(.*\n)?/ first_line, second_line = $1, $2 if first_line.start_with?("\xef\xbb\xbf".freeze) # BOM return Encoding::UTF_8 elsif first_line[0, 2] == '#!'.freeze encoding_line = second_line else encoding_line = first_line end return nil if encoding_line.nil? || encoding_line[0] != '#' if (result = ENCODING_RE.match(encoding_line)) begin Encoding.find(result[3] || result[4] || result[6]) rescue ArgumentError => e raise Parser::UnknownEncodingInMagicComment, e.message end else nil end end ## # Recognize encoding of `input` and process it so it could be lexed. # # * If `input` does not contain BOM or magic encoding comment, it is # kept in the original encoding. # * If the detected encoding is binary, `input` is kept in binary. # * Otherwise, `input` is re-encoded into UTF-8 and returned as a # new string. # # This method mutates the encoding of `input`, but not its content. # # @param [String] input # @return [String] # @raise [EncodingError] # def self.reencode_string(input) original_encoding = input.encoding detected_encoding = recognize_encoding(input.force_encoding(Encoding::BINARY)) if detected_encoding.nil? input.force_encoding(original_encoding) elsif detected_encoding == Encoding::BINARY input else input. force_encoding(detected_encoding). encode(Encoding::UTF_8) end end def initialize(name, first_line = 1, source: nil) @name = name.to_s @source = nil @first_line = first_line @lines = nil @line_begins = nil # UTF-32-reencoded source for O(1) slicing @slice_source = nil # Cache for fast lookup @line_index_for_position = {} self.source = source if source end ## # Populate this buffer from correspondingly named file. # # @example # Parser::Source::Buffer.new('foo/bar.rb').read # # @return [Buffer] self # @raise [ArgumentError] if already populated # def read File.open(@name, 'rb') do |io| self.source = io.read end self end ## # Source code contained in this buffer. # # @return [String] source code # @raise [RuntimeError] if buffer is not populated yet # def source if @source.nil? raise RuntimeError, 'Cannot extract source from uninitialized Source::Buffer' end @source end ## # Populate this buffer from a string with encoding autodetection. # `input` is mutated if not frozen. # # @param [String] input # @raise [ArgumentError] if already populated # @raise [EncodingError] if `input` includes invalid byte sequence for the encoding # @return [String] # def source=(input) input = input.dup if input.frozen? input = self.class.reencode_string(input) unless input.valid_encoding? raise EncodingError, "invalid byte sequence in #{input.encoding.name}" end self.raw_source = input end ## # Populate this buffer from a string without encoding autodetection. # # @param [String] input # @raise [ArgumentError] if already populated # @return [String] # def raw_source=(input) if @source raise ArgumentError, 'Source::Buffer is immutable' end @source = input.gsub("\r\n".freeze, "\n".freeze).freeze if !@source.ascii_only? && @source.encoding != Encoding::UTF_32LE && @source.encoding != Encoding::BINARY @slice_source = @source.encode(Encoding::UTF_32LE) end end def slice(start, length = nil) if length.nil? if start.is_a?(::Range) length = start.size start = start.begin else length = 1 end end if @slice_source.nil? @source[start, length] else @slice_source[start, length].encode(@source.encoding) end end ## # Convert a character index into the source to a `[line, column]` tuple. # # @param [Integer] position # @return [[Integer, Integer]] `[line, column]` # def decompose_position(position) line_index = line_index_for_position(position) line_begin = line_begins[line_index] [ @first_line + line_index , position - line_begin ] end ## # Convert a character index into the source to a line number. # # @param [Integer] position # @return [Integer] line # @api private # def line_for_position(position) line_index_for_position(position) + @first_line end ## # Convert a character index into the source to a column number. # # @param [Integer] position # @return [Integer] column # @api private # def column_for_position(position) line_index = line_index_for_position(position) position - line_begins[line_index] end ## # Return an `Array` of source code lines. # # @return [Array] # def source_lines @lines ||= begin lines = @source.lines.to_a lines << ''.dup if @source.end_with?("\n".freeze) lines.each do |line| line.chomp!("\n".freeze) line.freeze end lines.freeze end end ## # Extract line `lineno` from source, taking `first_line` into account. # # @param [Integer] lineno # @return [String] # @raise [IndexError] if `lineno` is out of bounds # def source_line(lineno) source_lines.fetch(lineno - @first_line).dup end ## # Extract line `lineno` as a new `Range`, taking `first_line` into account. # # @param [Integer] lineno # @return [Range] # @raise [IndexError] if `lineno` is out of bounds # def line_range(lineno) index = lineno - @first_line if index < 0 || index + 1 >= line_begins.size raise IndexError, 'Parser::Source::Buffer: range for line ' \ "#{lineno} requested, valid line numbers are #{@first_line}.." \ "#{@first_line + line_begins.size - 2}" else Range.new(self, line_begins[index], line_begins[index + 1] - 1) end end ## # @return [Range] A range covering the whole source # def source_range @source_range ||= Range.new(self, 0, source.size) end ## # Number of last line in the buffer # # @return [Integer] # def last_line line_begins.size + @first_line - 2 end # :nodoc: def freeze source_lines; line_begins; source_range # build cache super end # :nodoc: def inspect "#<#{self.class} #{name}>" end private # @returns [0, line_begin_of_line_1, ..., source.size + 1] def line_begins @line_begins ||= begin begins = [0] index = 0 while index = @source.index("\n".freeze, index) index += 1 begins << index end begins << @source.size + 1 begins end end # @returns 0-based line index of position def line_index_for_position(position) @line_index_for_position[position] || begin index = bsearch(line_begins, position) - 1 @line_index_for_position[position] = index unless @line_index_for_position.frozen? index end end if Array.method_defined?(:bsearch_index) # RUBY_VERSION >= 2.3 def bsearch(line_begins, position) line_begins.bsearch_index do |line_begin| position < line_begin end || line_begins.size - 1 # || only for out of bound values end else def bsearch(line_begins, position) @line_range ||= 0...line_begins.size @line_range.bsearch do |i| position < line_begins[i] end || line_begins.size - 1 # || only for out of bound values end end end end end parser-3.3.4.2/lib/parser/source/comment.rb000066400000000000000000000063421465510415600205220ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # A comment in the source code. # # @!attribute [r] text # @return [String] # # @!attribute [r] location # @return [Parser::Source::Range] # # @api public # class Comment attr_reader :text attr_reader :location alias_method :loc, :location ## # Associate `comments` with `ast` nodes by their corresponding node. # # @param [Parser::AST::Node] ast # @param [Array] comments # @return [Hash>] # @see Parser::Source::Comment::Associator#associate # @deprecated Use {associate_locations}. # def self.associate(ast, comments) associator = Associator.new(ast, comments) associator.associate end ## # Associate `comments` with `ast` nodes by their location in the # source. # # @param [Parser::AST::Node] ast # @param [Array] comments # @return [Hash>] # @see Parser::Source::Comment::Associator#associate_locations # def self.associate_locations(ast, comments) associator = Associator.new(ast, comments) associator.associate_locations end ## # Associate `comments` with `ast` nodes using identity. # # @param [Parser::AST::Node] ast # @param [Array] comments # @return [Hash>] # @see Parser::Source::Comment::Associator#associate_by_identity # def self.associate_by_identity(ast, comments) associator = Associator.new(ast, comments) associator.associate_by_identity end ## # @param [Parser::Source::Range] range # def initialize(range) @location = Parser::Source::Map.new(range) @text = range.source.freeze freeze end ## # Type of this comment. # # * Inline comments correspond to `:inline`: # # # whatever # # * Block comments correspond to `:document`: # # =begin # hi i am a document # =end # # @return [Symbol] # def type if text.start_with?("#".freeze) :inline elsif text.start_with?("=begin".freeze) :document end end ## # @see #type # @return [Boolean] true if this is an inline comment. # def inline? type == :inline end ## # @see #type # @return [Boolean] true if this is a block comment. # def document? type == :document end ## # Compares comments. Two comments are equal if they # correspond to the same source range. # # @param [Object] other # @return [Boolean] # def ==(other) other.is_a?(Source::Comment) && @location == other.location end ## # @return [String] a human-readable representation of this comment # def inspect "#" end end end end parser-3.3.4.2/lib/parser/source/comment/000077500000000000000000000000001465510415600201705ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/source/comment/associator.rb000066400000000000000000000160721465510415600226720ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # A processor which associates AST nodes with comments based on their # location in source code. It may be used, for example, to implement # rdoc-style processing. # # @example # require 'parser/current' # # ast, comments = Parser::CurrentRuby.parse_with_comments(<<-CODE) # # Class stuff # class Foo # # Attr stuff # # @see bar # attr_accessor :foo # end # CODE # # p Parser::Source::Comment.associate(ast, comments) # # => { # # (class (const nil :Foo) ...) => # # [#], # # (send nil :attr_accessor (sym :foo)) => # # [#, # # #] # # } # # @see {associate} # # @!attribute skip_directives # Skip file processing directives disguised as comments. # Namely: # # * Shebang line, # * Magic encoding comment. # # @return [Boolean] # # @api public # class Comment::Associator attr_accessor :skip_directives ## # @param [Parser::AST::Node] ast # @param [Array] comments def initialize(ast, comments) @ast = ast @comments = comments @skip_directives = true end ## # Compute a mapping between AST nodes and comments. Comment is # associated with the node, if it is one of the following types: # # - preceding comment, it ends before the node start # - sparse comment, it is located inside the node, after all child nodes # - decorating comment, it starts at the same line, where the node ends # # This rule is unambiguous and produces the result # one could reasonably expect; for example, this code # # # foo # hoge # bar # + fuga # # will result in the following association: # # { # (send (lvar :hoge) :+ (lvar :fuga)) => # [#], # (lvar :fuga) => # [#] # } # # Note that comments after the end of the end of a passed tree range are # ignored (except root decorating comment). # # Note that {associate} produces unexpected result for nodes which are # equal but have distinct locations; comments for these nodes are merged. # You may prefer using {associate_by_identity} or {associate_locations}. # # @return [Hash>] # @deprecated Use {associate_locations}. # def associate @map_using = :eql do_associate end ## # Same as {associate}, but uses `node.loc` instead of `node` as # the hash key, thus producing an unambiguous result even in presence # of equal nodes. # # @return [Hash>] # def associate_locations @map_using = :location do_associate end ## # Same as {associate}, but compares by identity, thus producing an unambiguous # result even in presence of equal nodes. # # @return [Hash>] # def associate_by_identity @map_using = :identity do_associate end private POSTFIX_TYPES = Set[:if, :while, :while_post, :until, :until_post, :masgn].freeze def children_in_source_order(node) if POSTFIX_TYPES.include?(node.type) # All these types have either nodes with expressions, or `nil` # so a compact will do, but they need to be sorted. node.children.compact.sort_by { |child| child.loc.expression.begin_pos } else node.children.select do |child| child.is_a?(AST::Node) && child.loc && child.loc.expression end end end def do_associate @mapping = Hash.new { |h, k| h[k] = [] } @mapping.compare_by_identity if @map_using == :identity @comment_num = -1 advance_comment advance_through_directives if @skip_directives visit(@ast) if @ast @mapping end def visit(node) process_leading_comments(node) return unless @current_comment # If the next comment is beyond the last line of this node, we don't # need to iterate over its subnodes # (Unless this node is a heredoc... there could be a comment in its body, # inside an interpolation) node_loc = node.location if @current_comment.location.line <= node_loc.last_line || node_loc.is_a?(Map::Heredoc) children_in_source_order(node).each { |child| visit(child) } process_trailing_comments(node) end end def process_leading_comments(node) return if node.type == :begin while current_comment_before?(node) # preceding comment associate_and_advance_comment(node) end end def process_trailing_comments(node) while current_comment_before_end?(node) associate_and_advance_comment(node) # sparse comment end while current_comment_decorates?(node) associate_and_advance_comment(node) # decorating comment end end def advance_comment @comment_num += 1 @current_comment = @comments[@comment_num] end def current_comment_before?(node) return false if !@current_comment comment_loc = @current_comment.location.expression node_loc = node.location.expression comment_loc.end_pos <= node_loc.begin_pos end def current_comment_before_end?(node) return false if !@current_comment comment_loc = @current_comment.location.expression node_loc = node.location.expression comment_loc.end_pos <= node_loc.end_pos end def current_comment_decorates?(node) return false if !@current_comment @current_comment.location.line == node.location.last_line end def associate_and_advance_comment(node) key = @map_using == :location ? node.location : node @mapping[key] << @current_comment advance_comment end MAGIC_COMMENT_RE = /^#\s*(-\*-|)\s*(frozen_string_literal|warn_indent|warn_past_scope):.*\1$/ def advance_through_directives # Skip shebang. if @current_comment && @current_comment.text.start_with?('#!'.freeze) advance_comment end # Skip magic comments. if @current_comment && @current_comment.text =~ MAGIC_COMMENT_RE advance_comment end # Skip encoding line. if @current_comment && @current_comment.text =~ Buffer::ENCODING_RE advance_comment end end end end end parser-3.3.4.2/lib/parser/source/map.rb000066400000000000000000000123111465510415600176260ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # {Map} relates AST nodes to the source code they were parsed from. # More specifically, a {Map} or its subclass contains a set of ranges: # # * `expression`: smallest range which includes all source corresponding # to the node and all `expression` ranges of its children. # * other ranges (`begin`, `end`, `operator`, ...): node-specific ranges # pointing to various interesting tokens corresponding to the node. # # Note that the {Map::Heredoc} map is the only one whose `expression` does # not include other ranges. It only covers the heredoc marker (`< 2]').children[0].loc # # => > # # The {file:doc/AST_FORMAT.md} document describes how ranges associated to source # code tokens. For example, the entry # # (array (int 1) (int 2)) # # "[1, 2]" # ^ begin # ^ end # ~~~~~~ expression # # means that if `node` is an {Parser::AST::Node} `(array (int 1) (int 2))`, # then `node.loc` responds to `begin`, `end` and `expression`, and # `node.loc.begin` returns a range pointing at the opening bracket, and so on. # # If you want to write code polymorphic by the source map (i.e. accepting # several subclasses of {Map}), use `respond_to?` instead of `is_a?` to # check whether the map features the range you need. Concrete {Map} # subclasses may not be preserved between versions, but their interfaces # will be kept compatible. # # You can visualize the source maps with `ruby-parse -E` command-line tool. # # @example # require 'parser/current' # # p Parser::CurrentRuby.parse('[1, 2]').loc # # => #, # # @begin=#, # # @expression=#> # # @!attribute [r] node # The node that is described by this map. Nodes and maps have 1:1 correspondence. # @return [Parser::AST::Node] # # @!attribute [r] expression # @return [Range] # # @api public # class Map attr_reader :node attr_reader :expression ## # @param [Range] expression def initialize(expression) @expression = expression end ## # @api private def initialize_copy(other) super @node = nil end ## # @api private def node=(node) @node = node freeze @node end ## # A shortcut for `self.expression.line`. # @return [Integer] # def line @expression.line end alias_method :first_line, :line ## # A shortcut for `self.expression.column`. # @return [Integer] # def column @expression.column end ## # A shortcut for `self.expression.last_line`. # @return [Integer] # def last_line @expression.last_line end ## # A shortcut for `self.expression.last_column`. # @return [Integer] # def last_column @expression.last_column end ## # @api private # def with_expression(expression_l) with { |map| map.update_expression(expression_l) } end ## # Compares source maps. # @return [Boolean] # def ==(other) other.class == self.class && instance_variables.map do |ivar| instance_variable_get(ivar) == other.send(:instance_variable_get, ivar) end.reduce(:&) end ## # Converts this source map to a hash with keys corresponding to # ranges. For example, if called on an instance of {Collection}, # which adds the `begin` and `end` ranges, the resulting hash # will contain keys `:expression`, `:begin` and `:end`. # # @example # require 'parser/current' # # p Parser::CurrentRuby.parse('[1, 2]').loc.to_hash # # => { # # :begin => #, # # :end => #, # # :expression => # # # } # # @return [Hash] # def to_hash instance_variables.inject({}) do |hash, ivar| next hash if ivar.to_sym == :@node hash[ivar[1..-1].to_sym] = instance_variable_get(ivar) hash end end protected def with(&block) dup.tap(&block) end def update_expression(expression_l) @expression = expression_l end end end end parser-3.3.4.2/lib/parser/source/map/000077500000000000000000000000001465510415600173035ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/source/map/collection.rb000066400000000000000000000004421465510415600217630ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Collection < Map attr_reader :begin attr_reader :end def initialize(begin_l, end_l, expression_l) @begin, @end = begin_l, end_l super(expression_l) end end end end parser-3.3.4.2/lib/parser/source/map/condition.rb000066400000000000000000000006231465510415600216170ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Condition < Map attr_reader :keyword attr_reader :begin attr_reader :else attr_reader :end def initialize(keyword_l, begin_l, else_l, end_l, expression_l) @keyword = keyword_l @begin, @else, @end = begin_l, else_l, end_l super(expression_l) end end end end parser-3.3.4.2/lib/parser/source/map/constant.rb000066400000000000000000000010751465510415600214640ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Constant < Map attr_reader :double_colon attr_reader :name attr_reader :operator def initialize(double_colon, name, expression) @double_colon, @name = double_colon, name super(expression) end ## # @api private # def with_operator(operator_l) with { |map| map.update_operator(operator_l) } end protected def update_operator(operator_l) @operator = operator_l end end end end parser-3.3.4.2/lib/parser/source/map/definition.rb000066400000000000000000000006631465510415600217650ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Definition < Map attr_reader :keyword attr_reader :operator attr_reader :name attr_reader :end def initialize(keyword_l, operator_l, name_l, end_l) @keyword = keyword_l @operator = operator_l @name = name_l @end = end_l super(@keyword.join(@end)) end end end end parser-3.3.4.2/lib/parser/source/map/for.rb000066400000000000000000000005441465510415600204210ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::For < Map attr_reader :keyword, :in attr_reader :begin, :end def initialize(keyword_l, in_l, begin_l, end_l, expression_l) @keyword, @in = keyword_l, in_l @begin, @end = begin_l, end_l super(expression_l) end end end end parser-3.3.4.2/lib/parser/source/map/heredoc.rb000066400000000000000000000004721465510415600212440ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Heredoc < Map attr_reader :heredoc_body attr_reader :heredoc_end def initialize(begin_l, body_l, end_l) @heredoc_body = body_l @heredoc_end = end_l super(begin_l) end end end end parser-3.3.4.2/lib/parser/source/map/index.rb000066400000000000000000000010761465510415600207430ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Index < Map attr_reader :begin attr_reader :end attr_reader :operator def initialize(begin_l, end_l, expression_l) @begin, @end = begin_l, end_l @operator = nil super(expression_l) end ## # @api private # def with_operator(operator_l) with { |map| map.update_operator(operator_l) } end protected def update_operator(operator_l) @operator = operator_l end end end end parser-3.3.4.2/lib/parser/source/map/keyword.rb000066400000000000000000000005461465510415600213210ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Keyword < Map attr_reader :keyword attr_reader :begin attr_reader :end def initialize(keyword_l, begin_l, end_l, expression_l) @keyword = keyword_l @begin, @end = begin_l, end_l super(expression_l) end end end end parser-3.3.4.2/lib/parser/source/map/method_definition.rb000066400000000000000000000010331465510415600233150ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::MethodDefinition < Map attr_reader :keyword attr_reader :operator attr_reader :name attr_reader :end attr_reader :assignment def initialize(keyword_l, operator_l, name_l, end_l, assignment_l, body_l) @keyword = keyword_l @operator = operator_l @name = name_l @end = end_l @assignment = assignment_l super(@keyword.join(end_l || body_l)) end end end end parser-3.3.4.2/lib/parser/source/map/objc_kwarg.rb000066400000000000000000000005741465510415600217460ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::ObjcKwarg < Map attr_reader :keyword attr_reader :operator attr_reader :argument def initialize(keyword_l, operator_l, argument_l, expression_l) @keyword, @operator, @argument = keyword_l, operator_l, argument_l super(expression_l) end end end end parser-3.3.4.2/lib/parser/source/map/operator.rb000066400000000000000000000003711465510415600214640ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Operator < Map attr_reader :operator def initialize(operator, expression) @operator = operator super(expression) end end end end parser-3.3.4.2/lib/parser/source/map/rescue_body.rb000066400000000000000000000005711465510415600221360ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::RescueBody < Map attr_reader :keyword attr_reader :assoc attr_reader :begin def initialize(keyword_l, assoc_l, begin_l, expression_l) @keyword = keyword_l @assoc = assoc_l @begin = begin_l super(expression_l) end end end end parser-3.3.4.2/lib/parser/source/map/send.rb000066400000000000000000000012521465510415600205610ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Send < Map attr_reader :dot attr_reader :selector attr_reader :operator attr_reader :begin attr_reader :end def initialize(dot_l, selector_l, begin_l, end_l, expression_l) @dot = dot_l @selector = selector_l @begin, @end = begin_l, end_l super(expression_l) end ## # @api private # def with_operator(operator_l) with { |map| map.update_operator(operator_l) } end protected def update_operator(operator_l) @operator = operator_l end end end end parser-3.3.4.2/lib/parser/source/map/ternary.rb000066400000000000000000000004631465510415600213170ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Ternary < Map attr_reader :question attr_reader :colon def initialize(question_l, colon_l, expression_l) @question, @colon = question_l, colon_l super(expression_l) end end end end parser-3.3.4.2/lib/parser/source/map/variable.rb000066400000000000000000000010011465510415600214050ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source class Map::Variable < Map attr_reader :name attr_reader :operator def initialize(name_l, expression_l=name_l) @name = name_l super(expression_l) end ## # @api private # def with_operator(operator_l) with { |map| map.update_operator(operator_l) } end protected def update_operator(operator_l) @operator = operator_l end end end end parser-3.3.4.2/lib/parser/source/range.rb000066400000000000000000000212201465510415600201440ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # A range of characters in a particular source buffer. # # The range is always exclusive, i.e. a range with `begin_pos` of 3 and # `end_pos` of 5 will contain the following characters: # # example # ^^ # # @!attribute [r] source_buffer # @return [Parser::Source::Buffer] # # @!attribute [r] begin_pos # @return [Integer] index of the first character in the range # # @!attribute [r] end_pos # @return [Integer] index of the character after the last character in the range # # @api public # class Range include Comparable attr_reader :source_buffer attr_reader :begin_pos, :end_pos ## # @param [Buffer] source_buffer # @param [Integer] begin_pos # @param [Integer] end_pos # def initialize(source_buffer, begin_pos, end_pos) if end_pos < begin_pos raise ArgumentError, 'Parser::Source::Range: end_pos must not be less than begin_pos' end if source_buffer.nil? raise ArgumentError, 'Parser::Source::Range: source_buffer must not be nil' end @source_buffer = source_buffer @begin_pos, @end_pos = begin_pos, end_pos freeze end ## # @return [Range] a zero-length range located just before the beginning # of this range. # def begin with(end_pos: @begin_pos) end ## # @return [Range] a zero-length range located just after the end # of this range. # def end with(begin_pos: @end_pos) end ## # @return [Integer] amount of characters included in this range. # def size @end_pos - @begin_pos end alias length size ## # Line number of the beginning of this range. By default, the first line # of a buffer is 1; as such, line numbers are most commonly one-based. # # @see Buffer # @return [Integer] line number of the beginning of this range. # def line @source_buffer.line_for_position(@begin_pos) end alias_method :first_line, :line ## # @return [Integer] zero-based column number of the beginning of this range. # def column @source_buffer.column_for_position(@begin_pos) end ## # @return [Integer] line number of the end of this range. # def last_line @source_buffer.line_for_position(@end_pos) end ## # @return [Integer] zero-based column number of the end of this range. # def last_column @source_buffer.column_for_position(@end_pos) end ## # @return [::Range] a range of columns spanned by this range. # @raise RangeError # def column_range if line != last_line raise RangeError, "#{self.inspect} spans more than one line" end column...last_column end ## # @return [String] a line of source code containing the beginning of this range. # def source_line @source_buffer.source_line(line) end ## # @return [String] all source code covered by this range. # def source @source_buffer.slice(@begin_pos, @end_pos - @begin_pos) end ## # `is?` provides a concise way to compare the source corresponding to this range. # For example, `r.source == '(' || r.source == 'begin'` is equivalent to # `r.is?('(', 'begin')`. # def is?(*what) what.include?(source) end ## # @return [Array] a set of character indexes contained in this range. # def to_a (@begin_pos...@end_pos).to_a end ## # @return [Range] a Ruby range with the same `begin_pos` and `end_pos` # def to_range self.begin_pos...self.end_pos end ## # Composes a GNU/Clang-style string representation of the beginning of this # range. # # For example, for the following range in file `foo.rb`, # # def foo # ^^^ # # `to_s` will return `foo.rb:1:5`. # Note that the column index is one-based. # # @return [String] # def to_s line, column = @source_buffer.decompose_position(@begin_pos) [@source_buffer.name, line, column + 1].join(':') end ## # @param [Hash] Endpoint(s) to change, any combination of :begin_pos or :end_pos # @return [Range] the same range as this range but with the given end point(s) changed # to the given value(s). # def with(begin_pos: @begin_pos, end_pos: @end_pos) Range.new(@source_buffer, begin_pos, end_pos) end ## # @param [Hash] Endpoint(s) to change, any combination of :begin_pos or :end_pos # @return [Range] the same range as this range but with the given end point(s) adjusted # by the given amount(s) # def adjust(begin_pos: 0, end_pos: 0) Range.new(@source_buffer, @begin_pos + begin_pos, @end_pos + end_pos) end ## # @param [Integer] new_size # @return [Range] a range beginning at the same point as this range and length `new_size`. # def resize(new_size) with(end_pos: @begin_pos + new_size) end ## # @param [Range] other # @return [Range] smallest possible range spanning both this range and `other`. # def join(other) Range.new(@source_buffer, [@begin_pos, other.begin_pos].min, [@end_pos, other.end_pos].max) end ## # @param [Range] other # @return [Range] overlapping region of this range and `other`, or `nil` # if they do not overlap # def intersect(other) unless disjoint?(other) Range.new(@source_buffer, [@begin_pos, other.begin_pos].max, [@end_pos, other.end_pos].min) end end ## # Return `true` iff this range and `other` are disjoint. # # Two ranges must be one and only one of ==, disjoint?, contains?, contained? or crossing? # # @param [Range] other # @return [Boolean] # def disjoint?(other) if empty? && other.empty? @begin_pos != other.begin_pos else @begin_pos >= other.end_pos || other.begin_pos >= @end_pos end end ## # Return `true` iff this range is not disjoint from `other`. # # @param [Range] other # @return [Boolean] `true` if this range and `other` overlap # def overlaps?(other) !disjoint?(other) end ## # Returns true iff this range contains (strictly) `other`. # # Two ranges must be one and only one of ==, disjoint?, contains?, contained? or crossing? # # @param [Range] other # @return [Boolean] # def contains?(other) (other.begin_pos <=> @begin_pos) + (@end_pos <=> other.end_pos) >= (other.empty? ? 2 : 1) end ## # Return `other.contains?(self)` # # Two ranges must be one and only one of ==, disjoint?, contains?, contained? or crossing? # # @param [Range] other # @return [Boolean] # def contained?(other) other.contains?(self) end ## # Returns true iff both ranges intersect and also have different elements from one another. # # Two ranges must be one and only one of ==, disjoint?, contains?, contained? or crossing? # # @param [Range] other # @return [Boolean] # def crossing?(other) return false unless overlaps?(other) (@begin_pos <=> other.begin_pos) * (@end_pos <=> other.end_pos) == 1 end ## # Checks if a range is empty; if it contains no characters # @return [Boolean] def empty? @begin_pos == @end_pos end ## # Compare ranges, first by begin_pos, then by end_pos. # def <=>(other) return nil unless other.is_a?(::Parser::Source::Range) && @source_buffer == other.source_buffer (@begin_pos <=> other.begin_pos).nonzero? || (@end_pos <=> other.end_pos) end alias_method :eql?, :== ## # Support for Ranges be used in as Hash indices and in Sets. # def hash [@source_buffer, @begin_pos, @end_pos].hash end ## # @return [String] a human-readable representation of this range. # def inspect "#" end end end end parser-3.3.4.2/lib/parser/source/rewriter.rb000066400000000000000000000410011465510415600207120ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # {Rewriter} is deprecated. Use {TreeRewriter} instead. # # TreeRewriter has simplified semantics, and customizable policies # with regards to clobbering. Please read the documentation. # # Keep in mind: # - Rewriter was discarding the `end_pos` of the given range for `insert_before`, # and the `begin_pos` for `insert_after`. These are meaningful in TreeRewriter. # - TreeRewriter's wrap/insert_before/insert_after are multiple by default, while # Rewriter would raise clobbering errors if the non '_multi' version was called. # - The TreeRewriter policy closest to Rewriter's behavior is: # different_replacements: :raise, # swallowed_insertions: :raise, # crossing_deletions: :accept # # @!attribute [r] source_buffer # @return [Source::Buffer] # # @!attribute [r] diagnostics # @return [Diagnostic::Engine] # # @api public # @deprecated Use {TreeRewriter} # class Rewriter attr_reader :source_buffer attr_reader :diagnostics ## # @param [Source::Buffer] source_buffer # @deprecated Use {TreeRewriter} # def initialize(source_buffer) self.class.warn_of_deprecation @diagnostics = Diagnostic::Engine.new @diagnostics.consumer = lambda do |diag| $stderr.puts diag.render end @source_buffer = source_buffer @queue = [] @clobber = 0 @insertions = 0 # clobbered zero-length positions; index 0 is the far left @insert_before_multi_order = 0 @insert_after_multi_order = 0 @pending_queue = nil @pending_clobber = nil @pending_insertions = nil end ## # Removes the source range. # # @param [Range] range # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # @deprecated Use {TreeRewriter#remove} # def remove(range) append Rewriter::Action.new(range, ''.freeze) end ## # Inserts new code before the given source range. # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # @deprecated Use {TreeRewriter#insert_before} # def insert_before(range, content) append Rewriter::Action.new(range.begin, content) end ## # Inserts new code before and after the given source range. # # @param [Range] range # @param [String] before # @param [String] after # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # @deprecated Use {TreeRewriter#wrap} # def wrap(range, before, after) append Rewriter::Action.new(range.begin, before) append Rewriter::Action.new(range.end, after) end ## # Inserts new code before the given source range by allowing other # insertions at the same position. # Note that an insertion with latter invocation comes _before_ earlier # insertion at the same position in the rewritten source. # # @example Inserting '[(' # rewriter. # insert_before_multi(range, '('). # insert_before_multi(range, '['). # process # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # @deprecated Use {TreeRewriter#insert_before} # def insert_before_multi(range, content) @insert_before_multi_order -= 1 append Rewriter::Action.new(range.begin, content, true, @insert_before_multi_order) end ## # Inserts new code after the given source range. # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # @deprecated Use {TreeRewriter#insert_after} # def insert_after(range, content) append Rewriter::Action.new(range.end, content) end ## # Inserts new code after the given source range by allowing other # insertions at the same position. # Note that an insertion with latter invocation comes _after_ earlier # insertion at the same position in the rewritten source. # # @example Inserting ')]' # rewriter. # insert_after_multi(range, ')'). # insert_after_multi(range, ']'). # process # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # @deprecated Use {TreeRewriter#insert_after} # def insert_after_multi(range, content) @insert_after_multi_order += 1 append Rewriter::Action.new(range.end, content, true, @insert_after_multi_order) end ## # Replaces the code of the source range `range` with `content`. # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # @deprecated Use {TreeRewriter#replace} # def replace(range, content) append Rewriter::Action.new(range, content) end ## # Applies all scheduled changes to the `source_buffer` and returns # modified source as a new string. # # @return [String] # @deprecated Use {TreeRewriter#process} # def process if in_transaction? raise "Do not call #{self.class}##{__method__} inside a transaction" end adjustment = 0 source = @source_buffer.source.dup @queue.sort.each do |action| begin_pos = action.range.begin_pos + adjustment end_pos = begin_pos + action.range.length source[begin_pos...end_pos] = action.replacement adjustment += (action.replacement.length - action.range.length) end source end ## # Provides a protected block where a sequence of multiple rewrite actions # are handled atomically. If any of the actions failed by clobbering, # all the actions are rolled back. # # @example # begin # rewriter.transaction do # rewriter.insert_before(range_of_something, '(') # rewriter.insert_after(range_of_something, ')') # end # rescue Parser::ClobberingError # end # # @raise [RuntimeError] when no block is passed # @raise [RuntimeError] when already in a transaction # @deprecated Use {TreeRewriter#transaction} # def transaction unless block_given? raise "#{self.class}##{__method__} requires block" end if in_transaction? raise 'Nested transaction is not supported' end @pending_queue = @queue.dup @pending_clobber = @clobber @pending_insertions = @insertions yield @queue = @pending_queue @clobber = @pending_clobber @insertions = @pending_insertions self ensure @pending_queue = nil @pending_clobber = nil @pending_insertions = nil end private # Schedule a code update. If it overlaps with another update, check # whether they conflict, and raise a clobbering error if they do. # (As a special case, zero-length ranges at the same position are # considered to "overlap".) Otherwise, merge them. # # Updates which are adjacent to each other, but do not overlap, are also # merged. # # RULES: # # - Insertion ("replacing" a zero-length range): # - Two insertions at the same point conflict. This is true even # if the earlier insertion has already been merged with an adjacent # update, and even if they are both inserting the same text. # - An insertion never conflicts with a replace or remove operation # on its right or left side, which does not overlap it (in other # words, which does not update BOTH its right and left sides). # - An insertion always conflicts with a remove operation which spans # both its sides. # - An insertion conflicts with a replace operation which spans both its # sides, unless the replacement text is longer than the replaced text # by the size of the insertion (or more), and the portion of # replacement text immediately after the insertion position is # identical to the inserted text. # # - Removal operations never conflict with each other. # # - Replacement operations: # - Take the portion of each replacement text which falls within: # - The other operation's replaced region # - The other operation's replacement text, if it extends past the # end of its own replaced region (in other words, if the replacement # text is longer than the text it replaces) # - If and only if the taken texts are identical for both operations, # they do not conflict. # def append(action) range = action.range # Is this an insertion? if range.empty? # Replacing nothing with... nothing? return self if action.replacement.empty? if !action.allow_multiple_insertions? && (conflicting = clobbered_insertion?(range)) raise_clobber_error(action, [conflicting]) end record_insertion(range) if (adjacent = adjacent_updates?(range)) conflicting = adjacent.find do |a| a.range.overlaps?(range) && !replace_compatible_with_insertion?(a, action) end raise_clobber_error(action, [conflicting]) if conflicting merge_actions!(action, adjacent) else active_queue << action end else # It's a replace or remove operation. if (insertions = adjacent_insertions?(range)) insertions.each do |insertion| if range.overlaps?(insertion.range) && !replace_compatible_with_insertion?(action, insertion) raise_clobber_error(action, [insertion]) else action = merge_actions(action, [insertion]) active_queue.delete(insertion) end end end if (adjacent = adjacent_updates?(range)) if can_merge?(action, adjacent) record_replace(range) merge_actions!(action, adjacent) else raise_clobber_error(action, adjacent) end else record_replace(range) active_queue << action end end self end def record_insertion(range) self.active_insertions = active_insertions | (1 << range.begin_pos) end def record_replace(range) self.active_clobber = active_clobber | clobbered_position_mask(range) end def clobbered_position_mask(range) ((1 << range.size) - 1) << range.begin_pos end def adjacent_position_mask(range) ((1 << (range.size + 2)) - 1) << (range.begin_pos - 1) end def adjacent_insertion_mask(range) ((1 << (range.size + 1)) - 1) << range.begin_pos end def clobbered_insertion?(insertion) insertion_pos = insertion.begin_pos if active_insertions & (1 << insertion_pos) != 0 # The clobbered insertion may have already been merged with other # updates, so it won't necessarily have the same begin_pos. active_queue.find do |a| a.range.begin_pos <= insertion_pos && insertion_pos <= a.range.end_pos end end end def adjacent_insertions?(range) # Just retrieve insertions which have not been merged with an adjacent # remove or replace. if active_insertions & adjacent_insertion_mask(range) != 0 result = active_queue.select do |a| a.range.empty? && adjacent?(range, a.range) end result.empty? ? nil : result end end def adjacent_updates?(range) if active_clobber & adjacent_position_mask(range) != 0 active_queue.select { |a| adjacent?(range, a.range) } end end def replace_compatible_with_insertion?(replace, insertion) (replace.replacement.length - replace.range.size) >= insertion.range.size && (offset = insertion.range.begin_pos - replace.range.begin_pos) && replace.replacement[offset, insertion.replacement.length] == insertion.replacement end def can_merge?(action, existing) # Compare 2 replace/remove operations (neither is an insertion) range = action.range existing.all? do |other| overlap = range.intersect(other.range) next true if overlap.nil? # adjacent, not overlapping repl1_offset = overlap.begin_pos - range.begin_pos repl2_offset = overlap.begin_pos - other.range.begin_pos repl1_length = [other.range.length - repl2_offset, other.replacement.length - repl2_offset].max repl2_length = [range.length - repl1_offset, action.replacement.length - repl1_offset].max replacement1 = action.replacement[repl1_offset, repl1_length] || ''.freeze replacement2 = other.replacement[repl2_offset, repl2_length] || ''.freeze replacement1 == replacement2 end end def merge_actions(action, existing) actions = existing.push(action).sort_by do |a| [a.range.begin_pos, a.range.end_pos] end range = actions.first.range.join(actions.max_by { |a| a.range.end_pos }.range) Rewriter::Action.new(range, merge_replacements(actions)) end def merge_actions!(action, existing) new_action = merge_actions(action, existing) active_queue.delete(action) replace_actions(existing, new_action) end def merge_replacements(actions) result = ''.dup prev_act = nil actions.each do |act| if !prev_act || act.range.disjoint?(prev_act.range) result << act.replacement else prev_end = [prev_act.range.begin_pos + prev_act.replacement.length, prev_act.range.end_pos].max offset = prev_end - act.range.begin_pos result << act.replacement[offset..-1] if offset < act.replacement.size end prev_act = act end result end def replace_actions(old, updated) old.each { |act| active_queue.delete(act) } active_queue << updated end def raise_clobber_error(action, existing) # cannot replace 3 characters with "foobar" diagnostic = Diagnostic.new(:error, :invalid_action, { :action => action }, action.range) @diagnostics.process(diagnostic) # clobbered by: remove 3 characters diagnostic = Diagnostic.new(:note, :clobbered, { :action => existing[0] }, existing[0].range) @diagnostics.process(diagnostic) raise ClobberingError, "Parser::Source::Rewriter detected clobbering" end def in_transaction? !@pending_queue.nil? end def active_queue @pending_queue || @queue end def active_clobber @pending_clobber || @clobber end def active_insertions @pending_insertions || @insertions end def active_clobber=(value) if @pending_clobber @pending_clobber = value else @clobber = value end end def active_insertions=(value) if @pending_insertions @pending_insertions = value else @insertions = value end end def adjacent?(range1, range2) range1.begin_pos <= range2.end_pos && range2.begin_pos <= range1.end_pos end DEPRECATION_WARNING = [ 'Parser::Source::Rewriter is deprecated.', 'Please update your code to use Parser::Source::TreeRewriter instead' ].join("\n").freeze extend Deprecation end end end parser-3.3.4.2/lib/parser/source/rewriter/000077500000000000000000000000001465510415600203715ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/source/rewriter/action.rb000066400000000000000000000021211465510415600221670ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # @api private # class Rewriter::Action include Comparable attr_reader :range, :replacement, :allow_multiple_insertions, :order alias_method :allow_multiple_insertions?, :allow_multiple_insertions def initialize(range, replacement='', allow_multiple_insertions = false, order = 0) @range = range @replacement = replacement @allow_multiple_insertions = allow_multiple_insertions @order = order freeze end def <=>(other) result = range.begin_pos <=> other.range.begin_pos return result unless result.zero? order <=> other.order end def to_s if @range.length == 0 && @replacement.empty? 'do nothing' elsif @range.length == 0 "insert #{@replacement.inspect}" elsif @replacement.empty? "remove #{@range.length} character(s)" else "replace #{@range.length} character(s) with #{@replacement.inspect}" end end end end end parser-3.3.4.2/lib/parser/source/tree_rewriter.rb000066400000000000000000000335121465510415600217410ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # {TreeRewriter} performs the heavy lifting in the source rewriting process. # It schedules code updates to be performed in the correct order. # # For simple cases, the resulting source will be obvious. # # Examples for more complex cases follow. Assume these examples are acting on # the source `'puts(:hello, :world)`. The methods #wrap, #remove, etc. # receive a Range as first argument; for clarity, examples below use english # sentences and a string of raw code instead. # # ## Overlapping ranges: # # Any two rewriting actions on overlapping ranges will fail and raise # a `ClobberingError`, unless they are both deletions (covered next). # # * wrap ':hello, ' with '(' and ')' # * wrap ', :world' with '(' and ')' # => CloberringError # # ## Overlapping deletions: # # * remove ':hello, ' # * remove ', :world' # # The overlapping ranges are merged and `':hello, :world'` will be removed. # This policy can be changed. `:crossing_deletions` defaults to `:accept` # but can be set to `:warn` or `:raise`. # # ## Multiple actions at the same end points: # # Results will always be independent on the order they were given. # Exception: rewriting actions done on exactly the same range (covered next). # # Example: # * replace ', ' by ' => ' # * wrap ':hello, :world' with '{' and '}' # * replace ':world' with ':everybody' # * wrap ':world' with '[', ']' # # The resulting string will be `'puts({:hello => [:everybody]})'` # and this result is independent on the order the instructions were given in. # # Note that if the two "replace" were given as a single replacement of ', :world' # for ' => :everybody', the result would be a `ClobberingError` because of the wrap # in square brackets. # # ## Multiple wraps on same range: # * wrap ':hello' with '(' and ')' # * wrap ':hello' with '[' and ']' # # The wraps are combined in order given and results would be `'puts([(:hello)], :world)'`. # # ## Multiple replacements on same range: # * replace ':hello' by ':hi', then # * replace ':hello' by ':hey' # # The replacements are made in the order given, so the latter replacement # supersedes the former and ':hello' will be replaced by ':hey'. # # This policy can be changed. `:different_replacements` defaults to `:accept` # but can be set to `:warn` or `:raise`. # # ## Swallowed insertions: # wrap 'world' by '__', '__' # replace ':hello, :world' with ':hi' # # A containing replacement will swallow the contained rewriting actions # and `':hello, :world'` will be replaced by `':hi'`. # # This policy can be changed for swallowed insertions. `:swallowed_insertions` # defaults to `:accept` but can be set to `:warn` or `:raise` # # ## Implementation # The updates are organized in a tree, according to the ranges they act on # (where children are strictly contained by their parent), hence the name. # # @!attribute [r] source_buffer # @return [Source::Buffer] # # @!attribute [r] diagnostics # @return [Diagnostic::Engine] # # @api public # class TreeRewriter attr_reader :source_buffer attr_reader :diagnostics ## # @param [Source::Buffer] source_buffer # def initialize(source_buffer, crossing_deletions: :accept, different_replacements: :accept, swallowed_insertions: :accept) @diagnostics = Diagnostic::Engine.new @diagnostics.consumer = -> diag { $stderr.puts diag.render } @source_buffer = source_buffer @in_transaction = false @policy = {crossing_deletions: crossing_deletions, different_replacements: different_replacements, swallowed_insertions: swallowed_insertions}.freeze check_policy_validity @enforcer = method(:enforce_policy) # We need a range that would be jugded as containing all other ranges, # including 0...0 and size...size: all_encompassing_range = @source_buffer.source_range.adjust(begin_pos: -1, end_pos: +1) @action_root = TreeRewriter::Action.new(all_encompassing_range, @enforcer) end ## # Returns true iff no (non trivial) update has been recorded # # @return [Boolean] # def empty? @action_root.empty? end ## # Merges the updates of argument with the receiver. # Policies of the receiver are used. # This action is atomic in that it won't change the receiver # unless it succeeds. # # @param [Rewriter] with # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # def merge!(with) raise 'TreeRewriter are not for the same source_buffer' unless source_buffer == with.source_buffer @action_root = @action_root.combine(with.action_root) self end ## # Returns a new rewriter that consists of the updates of the received # and the given argument. Policies of the receiver are used. # # @param [Rewriter] with # @return [Rewriter] merge of receiver and argument # @raise [ClobberingError] when clobbering is detected # def merge(with) dup.merge!(with) end ## # For special cases where one needs to merge a rewriter attached to a different source_buffer # or that needs to be offset. Policies of the receiver are used. # # @param [TreeRewriter] rewriter from different source_buffer # @param [Integer] offset # @return [Rewriter] self # @raise [IndexError] if action ranges (once offset) don't fit the current buffer # def import!(foreign_rewriter, offset: 0) return self if foreign_rewriter.empty? contracted = foreign_rewriter.action_root.contract merge_effective_range = ::Parser::Source::Range.new( @source_buffer, contracted.range.begin_pos + offset, contracted.range.end_pos + offset, ) check_range_validity(merge_effective_range) merge_with = contracted.moved(@source_buffer, offset) @action_root = @action_root.combine(merge_with) self end ## # Replaces the code of the source range `range` with `content`. # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # def replace(range, content) combine(range, replacement: content) end ## # Inserts the given strings before and after the given range. # # @param [Range] range # @param [String, nil] insert_before # @param [String, nil] insert_after # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # def wrap(range, insert_before, insert_after) combine(range, insert_before: insert_before.to_s, insert_after: insert_after.to_s) end ## # Shortcut for `replace(range, '')` # # @param [Range] range # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # def remove(range) replace(range, ''.freeze) end ## # Shortcut for `wrap(range, content, nil)` # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # def insert_before(range, content) wrap(range, content, nil) end ## # Shortcut for `wrap(range, nil, content)` # # @param [Range] range # @param [String] content # @return [Rewriter] self # @raise [ClobberingError] when clobbering is detected # def insert_after(range, content) wrap(range, nil, content) end ## # Applies all scheduled changes to the `source_buffer` and returns # modified source as a new string. # # @return [String] # def process source = @source_buffer.source chunks = [] last_end = 0 @action_root.ordered_replacements.each do |range, replacement| chunks << source[last_end...range.begin_pos] << replacement last_end = range.end_pos end chunks << source[last_end...source.length] chunks.join end ## # Returns a representation of the rewriter as an ordered list of replacements. # # rewriter.as_replacements # => [ [1...1, '('], # [2...4, 'foo'], # [5...6, ''], # [6...6, '!'], # [10...10, ')'], # ] # # This representation is sufficient to recreate the result of `process` but it is # not sufficient to recreate completely the rewriter for further merging/actions. # See `as_nested_actions` # # @return [Array] an ordered list of pairs of range & replacement # def as_replacements @action_root.ordered_replacements end ## # Returns a representation of the rewriter as nested insertions (:wrap) and replacements. # # rewriter.as_actions # =>[ [:wrap, 1...10, '(', ')'], # [:wrap, 2...6, '', '!'], # aka "insert_after" # [:replace, 2...4, 'foo'], # [:replace, 5...6, ''], # aka "removal" # ], # # Contrary to `as_replacements`, this representation is sufficient to recreate exactly # the rewriter. # # @return [Array<(Symbol, Range, String{, String})>] # def as_nested_actions @action_root.nested_actions end ## # Provides a protected block where a sequence of multiple rewrite actions # are handled atomically. If any of the actions failed by clobbering, # all the actions are rolled back. Transactions can be nested. # # @raise [RuntimeError] when no block is passed # def transaction unless block_given? raise "#{self.class}##{__method__} requires block" end previous = @in_transaction @in_transaction = true restore_root = @action_root yield restore_root = nil self ensure @action_root = restore_root if restore_root @in_transaction = previous end def in_transaction? @in_transaction end # :nodoc: def inspect "#<#{self.class} #{source_buffer.name}: #{action_summary}>" end ## # @api private # @deprecated Use insert_after or wrap # def insert_before_multi(range, text) self.class.warn_of_deprecation insert_before(range, text) end ## # @api private # @deprecated Use insert_after or wrap # def insert_after_multi(range, text) self.class.warn_of_deprecation insert_after(range, text) end DEPRECATION_WARNING = [ 'TreeRewriter#insert_before_multi and insert_before_multi exist only for legacy compatibility.', 'Please update your code to use `wrap`, `insert_before` or `insert_after` instead.' ].join("\n").freeze extend Deprecation protected attr_reader :action_root private def action_summary replacements = as_replacements case replacements.size when 0 then return 'empty' when 1..3 then #ok else replacements = replacements.first(3) suffix = '…' end parts = replacements.map do |(range, str)| if str.empty? # is this a deletion? "-#{range.to_range}" elsif range.size == 0 # is this an insertion? "+#{str.inspect}@#{range.begin_pos}" else # it is a replacement "^#{str.inspect}@#{range.to_range}" end end parts << suffix if suffix parts.join(', ') end ACTIONS = %i[accept warn raise].freeze def check_policy_validity invalid = @policy.values - ACTIONS raise ArgumentError, "Invalid policy: #{invalid.join(', ')}" unless invalid.empty? end def combine(range, attributes) range = check_range_validity(range) action = TreeRewriter::Action.new(range, @enforcer, **attributes) @action_root = @action_root.combine(action) self end def check_range_validity(range) if range.begin_pos < 0 || range.end_pos > @source_buffer.source.size raise IndexError, "The range #{range.to_range} is outside the bounds of the source" end range end def enforce_policy(event) return if @policy[event] == :accept return unless (values = yield) trigger_policy(event, **values) end POLICY_TO_LEVEL = {warn: :warning, raise: :error}.freeze def trigger_policy(event, range: raise, conflict: nil, **arguments) action = @policy[event] || :raise diag = Parser::Diagnostic.new(POLICY_TO_LEVEL[action], event, arguments, range) @diagnostics.process(diag) if conflict range, *highlights = conflict diag = Parser::Diagnostic.new(POLICY_TO_LEVEL[action], :"#{event}_conflict", arguments, range, highlights) @diagnostics.process(diag) end raise Parser::ClobberingError, "Parser::Source::TreeRewriter detected clobbering" if action == :raise end end end end parser-3.3.4.2/lib/parser/source/tree_rewriter/000077500000000000000000000000001465510415600214105ustar00rootroot00000000000000parser-3.3.4.2/lib/parser/source/tree_rewriter/action.rb000066400000000000000000000224061465510415600232160ustar00rootroot00000000000000# frozen_string_literal: true module Parser module Source ## # @api private # # Actions are arranged in a tree and get combined so that: # # * Children are strictly contained by their parent # * Siblings are all disjointed from one another and ordered # * Only actions with `replacement == nil` may have children # class TreeRewriter::Action attr_reader :range, :replacement, :insert_before, :insert_after def initialize(range, enforcer, insert_before: '', replacement: nil, insert_after: '', children: [] ) @range, @enforcer, @children, @insert_before, @replacement, @insert_after = range, enforcer, children.freeze, insert_before.freeze, replacement, insert_after.freeze freeze end def combine(action) return self if action.empty? # Ignore empty action do_combine(action) end def empty? @insert_before.empty? && @insert_after.empty? && @children.empty? && (@replacement == nil || (@replacement.empty? && @range.empty?)) end def ordered_replacements reps = [] reps << [@range.begin, @insert_before] unless @insert_before.empty? reps << [@range, @replacement] if @replacement reps.concat(@children.flat_map(&:ordered_replacements)) reps << [@range.end, @insert_after] unless @insert_after.empty? reps end def nested_actions actions = [] actions << [:wrap, @range, @insert_before, @insert_after] if !@insert_before.empty? || !@insert_after.empty? actions << [:replace, @range, @replacement] if @replacement actions.concat(@children.flat_map(&:nested_actions)) end def insertion? !insert_before.empty? || !insert_after.empty? || (replacement && !replacement.empty?) end ## # A root action has its range set to the whole source range, even # though it typically does not act on that range. # This method returns the action as if it was a child action with # its range contracted. # @return [Action] def contract raise 'Empty actions can not be contracted' if empty? return self if insertion? range = @range.with( begin_pos: children.first.range.begin_pos, end_pos: children.last.range.end_pos, ) with(range: range) end ## # @return [Action] that has been moved to the given source_buffer and with the given offset # No check is done on validity of resulting range. def moved(source_buffer, offset) moved_range = ::Parser::Source::Range.new( source_buffer, @range.begin_pos + offset, @range.end_pos + offset ) with( range: moved_range, children: children.map { |child| child.moved(source_buffer, offset) } ) end protected attr_reader :children def with(range: @range, enforcer: @enforcer, children: @children, insert_before: @insert_before, replacement: @replacement, insert_after: @insert_after) children = swallow(children) if replacement self.class.new(range, enforcer, children: children, insert_before: insert_before, replacement: replacement, insert_after: insert_after) end # Assumes range.contains?(action.range) && action.children.empty? def do_combine(action) if action.range == @range merge(action) else place_in_hierarchy(action) end end def place_in_hierarchy(action) family = analyse_hierarchy(action) if family[:fusible] fuse_deletions(action, family[:fusible], [*family[:sibbling_left], *family[:child], *family[:sibbling_right]]) else extra_sibbling = if family[:parent] # action should be a descendant of one of the children family[:parent].do_combine(action) elsif family[:child] # or it should become the parent of some of the children, action.with(children: family[:child], enforcer: @enforcer) .combine_children(action.children) else # or else it should become an additional child action end with(children: [*family[:sibbling_left], extra_sibbling, *family[:sibbling_right]]) end end # Assumes `more_children` all contained within `@range` def combine_children(more_children) more_children.inject(self) do |parent, new_child| parent.place_in_hierarchy(new_child) end end def fuse_deletions(action, fusible, other_sibblings) without_fusible = with(children: other_sibblings) fused_range = [action, *fusible].map(&:range).inject(:join) fused_deletion = action.with(range: fused_range) without_fusible.do_combine(fused_deletion) end # Similar to @children.bsearch_index || size # except allows for a starting point # and `bsearch_index` is only Ruby 2.3+ def bsearch_child_index(from = 0) size = @children.size (from...size).bsearch { |i| yield @children[i] } || size end # Returns the children in a hierarchy with respect to `action`: # :sibbling_left, sibbling_right (for those that are disjoint from `action`) # :parent (in case one of our children contains `action`) # :child (in case `action` strictly contains some of our children) # :fusible (in case `action` overlaps some children but they can be fused in one deletion) # or raises a `CloberingError` # In case a child has equal range to `action`, it is returned as `:parent` # Reminder: an empty range 1...1 is considered disjoint from 1...10 def analyse_hierarchy(action) r = action.range # left_index is the index of the first child that isn't completely to the left of action left_index = bsearch_child_index { |child| child.range.end_pos > r.begin_pos } # right_index is the index of the first child that is completely on the right of action start = left_index == 0 ? 0 : left_index - 1 # See "corner case" below for reason of -1 right_index = bsearch_child_index(start) { |child| child.range.begin_pos >= r.end_pos } center = right_index - left_index case center when 0 # All children are disjoint from action, nothing else to do when -1 # Corner case: if a child has empty range == action's range # then it will appear to be both disjoint and to the left of action, # as well as disjoint and to the right of action. # Since ranges are equal, we return it as parent left_index -= 1 # Fix indices, as otherwise this child would be right_index += 1 # considered as a sibbling (both left and right!) parent = @children[left_index] else overlap_left = @children[left_index].range.begin_pos <=> r.begin_pos overlap_right = @children[right_index-1].range.end_pos <=> r.end_pos # For one child to be the parent of action, we must have: if center == 1 && overlap_left <= 0 && overlap_right >= 0 parent = @children[left_index] else # Otherwise consider all non disjoint elements (center) to be contained... contained = @children[left_index...right_index] fusible = check_fusible(action, (contained.shift if overlap_left < 0), # ... but check first and last one (contained.pop if overlap_right > 0) # ... for overlaps ) end end { parent: parent, sibbling_left: @children[0...left_index], sibbling_right: @children[right_index...@children.size], fusible: fusible, child: contained, } end # @param [Array(Action | nil)] fusible def check_fusible(action, *fusible) fusible.compact! return if fusible.empty? fusible.each do |child| kind = action.insertion? || child.insertion? ? :crossing_insertions : :crossing_deletions @enforcer.call(kind) { {range: action.range, conflict: child.range} } end fusible end # Assumes action.range == range && action.children.empty? def merge(action) call_enforcer_for_merge(action) with( insert_before: "#{action.insert_before}#{insert_before}", replacement: action.replacement || @replacement, insert_after: "#{insert_after}#{action.insert_after}", ).combine_children(action.children) end def call_enforcer_for_merge(action) @enforcer.call(:different_replacements) do if @replacement && action.replacement && @replacement != action.replacement {range: @range, replacement: action.replacement, other_replacement: @replacement} end end end def swallow(children) @enforcer.call(:swallowed_insertions) do insertions = children.select(&:insertion?) {range: @range, conflict: insertions.map(&:range)} unless insertions.empty? end [] end end end end parser-3.3.4.2/lib/parser/static_environment.rb000066400000000000000000000062161465510415600214730ustar00rootroot00000000000000# frozen_string_literal: true module Parser class StaticEnvironment FORWARD_ARGS = :FORWARD_ARGS ANONYMOUS_RESTARG_IN_CURRENT_SCOPE = :ANONYMOUS_RESTARG_IN_CURRENT_SCOPE ANONYMOUS_RESTARG_INHERITED = :ANONYMOUS_RESTARG_INHERITED ANONYMOUS_KWRESTARG_IN_CURRENT_SCOPE = :ANONYMOUS_KWRESTARG_IN_CURRENT_SCOPE ANONYMOUS_KWRESTARG_INHERITED = :ANONYMOUS_KWRESTARG_INHERITED ANONYMOUS_BLOCKARG_IN_CURRENT_SCOPE = :ANONYMOUS_BLOCKARG_IN_CURRENT_SCOPE ANONYMOUS_BLOCKARG_INHERITED = :ANONYMOUS_BLOCKARG_INHERITED def initialize reset end def reset @variables = Set[] @stack = [] end def extend_static @stack.push(@variables) @variables = Set[] self end def extend_dynamic @stack.push(@variables) @variables = @variables.dup if @variables.delete(ANONYMOUS_BLOCKARG_IN_CURRENT_SCOPE) @variables.add(ANONYMOUS_BLOCKARG_INHERITED) end if @variables.delete(ANONYMOUS_RESTARG_IN_CURRENT_SCOPE) @variables.add(ANONYMOUS_RESTARG_INHERITED) end if @variables.delete(ANONYMOUS_KWRESTARG_IN_CURRENT_SCOPE) @variables.add(ANONYMOUS_KWRESTARG_INHERITED) end self end def unextend @variables = @stack.pop self end def declare(name) @variables.add(name.to_sym) self end def declared?(name) @variables.include?(name.to_sym) end # Forward args def declare_forward_args declare(FORWARD_ARGS) end def declared_forward_args? declared?(FORWARD_ARGS) end # Anonymous blockarg def declare_anonymous_blockarg declare(ANONYMOUS_BLOCKARG_IN_CURRENT_SCOPE) end def declared_anonymous_blockarg? declared?(ANONYMOUS_BLOCKARG_IN_CURRENT_SCOPE) || declared?(ANONYMOUS_BLOCKARG_INHERITED) end def declared_anonymous_blockarg_in_current_scpe? declared?(ANONYMOUS_BLOCKARG_IN_CURRENT_SCOPE) end def parent_has_anonymous_blockarg? @stack.any? { |variables| variables.include?(ANONYMOUS_BLOCKARG_IN_CURRENT_SCOPE) } end # Anonymous restarg def declare_anonymous_restarg declare(ANONYMOUS_RESTARG_IN_CURRENT_SCOPE) end def declared_anonymous_restarg? declared?(ANONYMOUS_RESTARG_IN_CURRENT_SCOPE) || declared?(ANONYMOUS_RESTARG_INHERITED) end def declared_anonymous_restarg_in_current_scope? declared?(ANONYMOUS_RESTARG_IN_CURRENT_SCOPE) end def parent_has_anonymous_restarg? @stack.any? { |variables| variables.include?(ANONYMOUS_RESTARG_IN_CURRENT_SCOPE) } end # Anonymous kwresarg def declare_anonymous_kwrestarg declare(ANONYMOUS_KWRESTARG_IN_CURRENT_SCOPE) end def declared_anonymous_kwrestarg? declared?(ANONYMOUS_KWRESTARG_IN_CURRENT_SCOPE) || declared?(ANONYMOUS_KWRESTARG_INHERITED) end def declared_anonymous_kwrestarg_in_current_scope? declared?(ANONYMOUS_KWRESTARG_IN_CURRENT_SCOPE) end def parent_has_anonymous_kwrestarg? @stack.any? { |variables| variables.include?(ANONYMOUS_KWRESTARG_IN_CURRENT_SCOPE) } end def empty? @stack.empty? end end end parser-3.3.4.2/lib/parser/syntax_error.rb000066400000000000000000000006731465510415600203200ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # {Parser::SyntaxError} is raised whenever parser detects a syntax error, # similar to the standard SyntaxError class. # # @api public # # @!attribute [r] diagnostic # @return [Parser::Diagnostic] # class SyntaxError < StandardError attr_reader :diagnostic def initialize(diagnostic) @diagnostic = diagnostic super(diagnostic.message) end end end parser-3.3.4.2/lib/parser/tree_rewriter.rb000066400000000000000000000067251465510415600204470ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # {Parser::TreeRewriter} offers a basic API that makes it easy to rewrite # existing ASTs. It's built on top of {Parser::AST::Processor} and # {Parser::Source::TreeRewriter} # # For example, assume you want to remove `do` tokens from a while statement. # You can do this as following: # # require 'parser/current' # # class RemoveDo < Parser::TreeRewriter # def on_while(node) # # Check if the statement starts with "do" # if node.location.begin.is?('do') # remove(node.location.begin) # end # end # end # # code = <<-EOF # while true do # puts 'hello' # end # EOF # # ast = Parser::CurrentRuby.parse code # buffer = Parser::Source::Buffer.new('(example)', source: code) # rewriter = RemoveDo.new # # # Rewrite the AST, returns a String with the new form. # puts rewriter.rewrite(buffer, ast) # # This would result in the following Ruby code: # # while true # puts 'hello' # end # # Keep in mind that {Parser::TreeRewriter} does not take care of indentation when # inserting/replacing code so you'll have to do this yourself. # # See also [a blog entry](http://whitequark.org/blog/2013/04/26/lets-play-with-ruby-code/) # describing rewriters in greater detail. # # @api public # class TreeRewriter < Parser::AST::Processor ## # Rewrites the AST/source buffer and returns a String containing the new # version. # # @param [Parser::Source::Buffer] source_buffer # @param [Parser::AST::Node] ast # @param [Symbol] crossing_deletions:, different_replacements:, swallowed_insertions: # policy arguments for TreeRewriter (optional) # @return [String] # def rewrite(source_buffer, ast, **policy) @source_rewriter = Parser::Source::TreeRewriter.new(source_buffer, **policy) process(ast) @source_rewriter.process end ## # Returns `true` if the specified node is an assignment node, returns false # otherwise. # # @param [Parser::AST::Node] node # @return [Boolean] # def assignment?(node) [:lvasgn, :ivasgn, :gvasgn, :cvasgn, :casgn].include?(node.type) end ## # Removes the source range. # # @param [Parser::Source::Range] range # def remove(range) @source_rewriter.remove(range) end ## # Wraps the given source range with the given values. # # @param [Parser::Source::Range] range # @param [String] content # def wrap(range, before, after) @source_rewriter.wrap(range, before, after) end ## # Inserts new code before the given source range. # # @param [Parser::Source::Range] range # @param [String] content # def insert_before(range, content) @source_rewriter.insert_before(range, content) end ## # Inserts new code after the given source range. # # @param [Parser::Source::Range] range # @param [String] content # def insert_after(range, content) @source_rewriter.insert_after(range, content) end ## # Replaces the code of the source range `range` with `content`. # # @param [Parser::Source::Range] range # @param [String] content # def replace(range, content) @source_rewriter.replace(range, content) end end end parser-3.3.4.2/lib/parser/unknown_encoding_in_magic_comment_error.rb000066400000000000000000000007611465510415600257050ustar00rootroot00000000000000# frozen_string_literal: true module Parser ## # {Parser::UnknownEncodingInMagicComment} is raised when a magic encoding # comment is encountered that the currently running Ruby version doesn't # recognize. It inherits from {ArgumentError} since that is the exception # Ruby itself raises when trying to execute a file with an unknown encoding. # As such, it is also not a {Parser::SyntaxError}. # # @api public # class UnknownEncodingInMagicComment < ArgumentError end end parser-3.3.4.2/lib/parser/variables_stack.rb000066400000000000000000000006731465510415600207160ustar00rootroot00000000000000# frozen_string_literal: true module Parser class VariablesStack def initialize @stack = [] push end def empty? @stack.empty? end def push @stack << Set.new end def pop @stack.pop end def reset @stack.clear end def declare(name) @stack.last << name.to_sym end def declared?(name) @stack.last.include?(name.to_sym) end end end parser-3.3.4.2/lib/parser/version.rb000066400000000000000000000001071465510415600172360ustar00rootroot00000000000000# frozen_string_literal: true module Parser VERSION = '3.3.4.2' end parser-3.3.4.2/parser.gemspec000066400000000000000000000031431465510415600160260ustar00rootroot00000000000000# encoding: utf-8 # frozen_string_literal: true require File.expand_path('../lib/parser/version', __FILE__) Gem::Specification.new do |spec| spec.name = 'parser' spec.version = Parser::VERSION spec.authors = ['whitequark'] spec.email = ['whitequark@whitequark.org'] spec.description = 'A Ruby parser written in pure Ruby.' spec.summary = spec.description spec.homepage = 'https://github.com/whitequark/parser' spec.license = 'MIT' spec.metadata = { 'bug_tracker_uri' => 'https://github.com/whitequark/parser/issues', 'changelog_uri' => "https://github.com/whitequark/parser/blob/v#{spec.version}/CHANGELOG.md", 'documentation_uri' => "https://www.rubydoc.info/gems/parser/#{spec.version}", 'source_code_uri' => "https://github.com/whitequark/parser/tree/v#{spec.version}" } spec.files = Dir['bin/*', 'lib/**/*.rb', 'parser.gemspec', 'LICENSE.txt'] spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.require_paths = ['lib'] spec.required_ruby_version = '>= 2.0.0' spec.add_dependency 'ast', '~> 2.4.1' spec.add_dependency 'racc' spec.add_development_dependency 'bundler', '>= 1.15', '< 3.0.0' spec.add_development_dependency 'rake', '~> 13.0.1' spec.add_development_dependency 'cliver', '~> 0.3.2' spec.add_development_dependency 'yard' spec.add_development_dependency 'kramdown' spec.add_development_dependency 'minitest', '~> 5.10' spec.add_development_dependency 'simplecov', '~> 0.15.1' spec.add_development_dependency 'gauntlet' end parser-3.3.4.2/test/000077500000000000000000000000001465510415600141435ustar00rootroot00000000000000parser-3.3.4.2/test/bug_163/000077500000000000000000000000001465510415600153115ustar00rootroot00000000000000parser-3.3.4.2/test/bug_163/fixtures/000077500000000000000000000000001465510415600171625ustar00rootroot00000000000000parser-3.3.4.2/test/bug_163/fixtures/input.rb000066400000000000000000000001031465510415600206400ustar00rootroot00000000000000# frozen_string_literal: true if(true) puts "Hello, world!" end parser-3.3.4.2/test/bug_163/fixtures/output.rb000066400000000000000000000001021465510415600210400ustar00rootroot00000000000000# frozen_string_literal: true if true puts "Hello, world!" end parser-3.3.4.2/test/bug_163/rewriter.rb000066400000000000000000000012501465510415600174770ustar00rootroot00000000000000# frozen_string_literal: true class Rewriter < Parser::Rewriter def on_if(node) # Crude, totally-not-usable-in-the-real-world code to remove optional # parens from control keywords. # # In a perfect test scenario we'd simply make this a no-op, to demonstrate # that the bug happens when any rewriter is loaded regardless of whether it # actually changes anything but that makes assertions much harder to get # right. It's much easier to just show that the file did, or did not # get changed. if node.children[0].type == :begin replace node.children[0].loc.begin, ' ' remove node.children[0].loc.end end super end end parser-3.3.4.2/test/helper.rb000066400000000000000000000041611465510415600157510ustar00rootroot00000000000000# frozen_string_literal: true require 'tempfile' require 'simplecov' if ENV.include?('COVERAGE') && SimpleCov.usable? require_relative 'racc_coverage_helper' RaccCoverage.start( %w( ruby18.y ruby19.y ruby20.y ruby21.y ruby22.y ruby23.y ruby24.y ruby25.y ruby26.y ruby27.y ruby30.y ), File.expand_path('../../lib/parser', __FILE__)) # Report results faster. at_exit { RaccCoverage.stop } SimpleCov.start do self.formatter = SimpleCov::Formatter::MultiFormatter.new( SimpleCov::Formatter::HTMLFormatter ) add_group 'Grammars' do |source_file| source_file.filename =~ %r{\.y$} end # Exclude the testsuite itself. add_filter '/test/' # Exclude generated files. add_filter do |source_file| source_file.filename =~ %r{/lib/parser/(lexer|ruby\d+|macruby|rubymotion)\.rb$} end end end # minitest/autorun must go after SimpleCov to preserve # correct order of at_exit hooks. require 'minitest/autorun' $LOAD_PATH.unshift(File.expand_path('../../lib', __FILE__)) require 'parser' module NodeCollector extend self attr_accessor :callbacks, :nodes self.callbacks = [] self.nodes = [] def check @callbacks.each do |callback| @nodes.each { |node| callback.call(node) } end puts "#{callbacks.size} additional tests on #{nodes.size} nodes ran successfully" end Minitest.after_run { check } end def for_each_node(&block) NodeCollector.callbacks << block end class Parser::AST::Node def initialize(type, *) NodeCollector.nodes << self super end end # Special test extension that records a context of the parser # for any node that is created module NodeContextExt module NodeExt attr_reader :context def assign_properties(properties) super if (context = properties[:context]) @context = context end end end Parser::AST::Node.prepend(NodeExt) module BuilderExt def n(type, children, source_map) super.updated(nil, nil, context: @parser.context.dup) end end Parser::Builders::Default.prepend(BuilderExt) end parser-3.3.4.2/test/parse_helper.rb000066400000000000000000000273061465510415600171510ustar00rootroot00000000000000# frozen_string_literal: true module ParseHelper include AST::Sexp require 'parser/all' require 'parser/macruby' require 'parser/rubymotion' ALL_VERSIONS = %w(1.8 1.9 2.0 2.1 2.2 2.3 2.4 2.5 2.6 2.7 3.0 3.1 3.2 3.3 3.4 mac ios) def setup @diagnostics = [] super if defined?(super) end def parser_for_ruby_version(version) case version when '1.8' then parser = Parser::Ruby18.new when '1.9' then parser = Parser::Ruby19.new when '2.0' then parser = Parser::Ruby20.new when '2.1' then parser = Parser::Ruby21.new when '2.2' then parser = Parser::Ruby22.new when '2.3' then parser = Parser::Ruby23.new when '2.4' then parser = Parser::Ruby24.new when '2.5' then parser = Parser::Ruby25.new when '2.6' then parser = Parser::Ruby26.new when '2.7' then parser = Parser::Ruby27.new when '3.0' then parser = Parser::Ruby30.new when '3.1' then parser = Parser::Ruby31.new when '3.2' then parser = Parser::Ruby32.new when '3.3' then parser = Parser::Ruby33.new when '3.4' then parser = Parser::Ruby34.new when 'mac' then parser = Parser::MacRuby.new when 'ios' then parser = Parser::RubyMotion.new else raise "Unrecognized Ruby version #{version}" end parser.diagnostics.consumer = lambda do |diagnostic| @diagnostics << diagnostic end parser end def with_versions(versions) (versions & ALL_VERSIONS).each do |version| @diagnostics.clear parser = parser_for_ruby_version(version) yield version, parser end end def assert_source_range(expect_range, range, version, what) if expect_range == nil # Avoid "Use assert_nil if expecting nil from .... This will fail in Minitest 6."" assert_nil range, "(#{version}) range of #{what}" else assert range.is_a?(Parser::Source::Range), "(#{version}) #{range.inspect}.is_a?(Source::Range) for #{what}" assert_equal expect_range, range.to_range, "(#{version}) range of #{what}" end end # Use like this: # ~~~ # assert_parses( # s(:send, s(:lit, 10), :+, s(:lit, 20)) # %q{10 + 20}, # %q{~~~~~~~ expression # | ^ operator # | ~~ expression (lit) # }, # %w(1.8 1.9) # optional # ) # ~~~ def assert_parses(ast, code, source_maps='', versions=ALL_VERSIONS) with_versions(versions) do |version, parser| try_parsing(ast, code, parser, source_maps, version) end # Also try parsing with lexer set to use UTF-32LE internally with_versions(versions) do |version, parser| parser.instance_eval { @lexer.force_utf32 = true } try_parsing(ast, code, parser, source_maps, version) end # Also check that it doesn't throw anything # except (possibly) Parser::SyntaxError on other versions of Ruby with_versions(ALL_VERSIONS - versions) do |version, parser| begin source_file = Parser::Source::Buffer.new('(assert_older_rubies)', source: code) parser.parse(source_file) rescue Parser::SyntaxError # ok rescue StandardError # unacceptable raise else # No error means that `code` is valid for `version`, but has a different meaning. # Sometimes Ruby has breaking changes (like numparams) # that re-use constructions from previous versions. end end end def try_parsing(ast, code, parser, source_maps, version) source_file = Parser::Source::Buffer.new('(assert_parses)', source: code) begin parsed_ast = parser.parse(source_file) rescue => exc backtrace = exc.backtrace Exception.instance_method(:initialize).bind(exc). call("(#{version}) #{exc.message}") exc.set_backtrace(backtrace) raise end if ast.nil? assert_nil parsed_ast, "(#{version}) AST equality" return end assert_equal ast, parsed_ast, "(#{version}) AST equality" parse_source_map_descriptions(source_maps) do |range, map_field, ast_path, line| astlet = traverse_ast(parsed_ast, ast_path) if astlet.nil? # This is a testsuite bug. raise "No entity with AST path #{ast_path} in #{parsed_ast.inspect}" end assert astlet.frozen? assert astlet.location.respond_to?(map_field), "(#{version}) #{astlet.location.inspect}.respond_to?(#{map_field.inspect}) for:\n#{parsed_ast.inspect}" found_range = astlet.location.send(map_field) assert_source_range(range, found_range, version, line.inspect) end assert_state_is_final(parser, version) end # Use like this: # ~~~ # assert_diagnoses( # [:warning, :ambiguous_prefix, { prefix: '*' }], # %q{foo *bar}, # %q{ ^ location # | ~~~ highlights (0)}) # ~~~ def assert_diagnoses(diagnostic, code, source_maps='', versions=ALL_VERSIONS) with_versions(versions) do |version, parser| source_file = Parser::Source::Buffer.new('(assert_diagnoses)', source: code) begin parser = parser.parse(source_file) rescue Parser::SyntaxError # do nothing; the diagnostic was reported end assert_equal 1, @diagnostics.count, "(#{version}) emits a single diagnostic, not\n" \ "#{@diagnostics.map(&:render).join("\n")}" emitted_diagnostic = @diagnostics.first level, reason, arguments = diagnostic arguments ||= {} message = Parser::Messages.compile(reason, arguments) assert_equal level, emitted_diagnostic.level assert_equal reason, emitted_diagnostic.reason assert_equal arguments, emitted_diagnostic.arguments assert_equal message, emitted_diagnostic.message parse_source_map_descriptions(source_maps) do |range, map_field, ast_path, line| case map_field when 'location' assert_source_range range, emitted_diagnostic.location, version, 'location' when 'highlights' index = ast_path.first.to_i assert_source_range range, emitted_diagnostic.highlights[index], version, "#{index}th highlight" else raise "Unknown diagnostic range #{map_field}" end end end end # Use like this: # ~~~ # assert_diagnoses_many( # [ # [:warning, :ambiguous_literal], # [:error, :unexpected_token, { :token => :tLCURLY }] # ], # %q{m /foo/ {}}, # SINCE_2_4) # ~~~ def assert_diagnoses_many(diagnostics, code, versions=ALL_VERSIONS) with_versions(versions) do |version, parser| source_file = Parser::Source::Buffer.new('(assert_diagnoses_many)', source: code) begin parser = parser.parse(source_file) rescue Parser::SyntaxError # do nothing; the diagnostic was reported end assert_equal diagnostics.count, @diagnostics.count diagnostics.zip(@diagnostics) do |expected_diagnostic, actual_diagnostic| level, reason, arguments = expected_diagnostic arguments ||= {} message = Parser::Messages.compile(reason, arguments) assert_equal level, actual_diagnostic.level assert_equal reason, actual_diagnostic.reason assert_equal arguments, actual_diagnostic.arguments assert_equal message, actual_diagnostic.message end end end def refute_diagnoses(code, versions=ALL_VERSIONS) with_versions(versions) do |version, parser| source_file = Parser::Source::Buffer.new('(refute_diagnoses)', source: code) begin parser = parser.parse(source_file) rescue Parser::SyntaxError # do nothing; the diagnostic was reported end assert_empty @diagnostics, "(#{version}) emits no diagnostics, not\n" \ "#{@diagnostics.map(&:render).join("\n")}" end end def assert_context(context, code, versions=ALL_VERSIONS) with_versions(versions) do |version, parser| source_file = Parser::Source::Buffer.new('(assert_context)', source: code) parsed_ast = parser.parse(source_file) nodes = find_matching_nodes(parsed_ast) { |node| node.type == :send && node.children[1] == :get_context } assert_equal 1, nodes.count, "there must exactly 1 `get_context()` call" node = nodes.first actual_context = Parser::Context::FLAGS.each_with_object([]) { |flag, acc| acc << flag if node.context.public_send(flag) } assert_equal context.sort, actual_context.sort, "(#{version}) expect parsing context to match" end end SOURCE_MAP_DESCRIPTION_RE = /(?x) ^(?# $1 skip) ^(\s*) (?# $2 highlight) ([~\^]+|\!) \s+ (?# $3 source_map_field) ([a-z_]+) (?# $5 ast_path) (\s+\(([a-z_.\/0-9]+)\))? $/ def parse_source_map_descriptions(descriptions) unless block_given? return to_enum(:parse_source_map_descriptions, descriptions) end descriptions.each_line do |line| # Remove leading " |", if it exists. line = line.sub(/^\s*\|/, '').rstrip next if line.empty? if (match = SOURCE_MAP_DESCRIPTION_RE.match(line)) if match[2] != '!' begin_pos = match[1].length end_pos = begin_pos + match[2].length range = begin_pos...end_pos end source_map_field = match[3] if match[5] ast_path = match[5].split('.') else ast_path = [] end yield range, source_map_field, ast_path, line else raise "Cannot parse source map description line: #{line.inspect}." end end end def traverse_ast(ast, path) path.inject(ast) do |astlet, path_component| # Split "dstr/2" to :dstr and 1 type_str, index_str = path_component.split('/') type = type_str.to_sym if index_str.nil? index = 0 else index = index_str.to_i - 1 end matching_children = \ astlet.children.select do |child| AST::Node === child && child.type == type end matching_children[index] end end def find_matching_nodes(ast, &block) return [] unless ast.is_a?(AST::Node) result = [] result << ast if block.call(ast) ast.children.each { |child| result += find_matching_nodes(child, &block) } result end def assert_state_is_final(parser, version) lexer = parser.lexer assert lexer.cmdarg.empty?, "(#{version}) expected cmdarg to be empty after parsing" assert lexer.cond.empty?, "(#{version}) expected cond to be empty after parsing" assert lexer.cmdarg_stack.empty?, "(#{version}) expected cmdarg_stack to be empty after parsing" assert lexer.cond_stack.empty?, "(#{version}) expected cond_stack to be empty after parsing" assert_equal 0, lexer.paren_nest, "(#{version}) expected paren_nest to be 0 after parsing" assert lexer.lambda_stack.empty?, "(#{version}) expected lambda_stack to be empty after parsing" assert parser.static_env.empty?, "(#{version}) expected static_env to be empty after parsing" Parser::Context::FLAGS.each do |ctx_flag| refute parser.context.public_send(ctx_flag), "(#{version}) expected context.#{ctx_flag} to be `false` after parsing" end assert parser.max_numparam_stack.empty?, "(#{version}) expected max_numparam_stack to be empty after parsing" assert parser.current_arg_stack.empty?, "(#{version}) expected current_arg_stack to be empty after parsing" assert parser.pattern_variables.empty?, "(#{version}) expected pattern_variables to be empty after parsing" assert parser.pattern_hash_keys.empty?, "(#{version}) expected pattern_hash_keys to be empty after parsing" end end parser-3.3.4.2/test/racc_coverage_helper.rb000066400000000000000000000064061465510415600206200ustar00rootroot00000000000000# frozen_string_literal: true require 'racc/grammarfileparser' # Unfortunately, Ruby's Coverage module ignores module_eval statements, # which Racc uses to map `parser.y` locations in the generated # `parser.rb`. module RaccCoverage @coverage = {} @base_path = nil @trace = nil def self.start(parsers, base_path) @base_path = base_path parsers.each do |parser| @coverage[parser] = extract_interesting_lines(parser, base_path) end @trace = TracePoint.new(:line) do |trace| lineno = trace.lineno - 1 if (line_coverage = @coverage[trace.path]) if line_coverage[lineno] line_coverage[lineno] += 1 end end end @trace.enable end def self.stop @trace.disable end # Ruby's TracePoint#lineno will point only on "interesting" lines, # i.e.: only code (no comments or empty lines), no `end` keywords, # and for multi-line statements, only the first line of the statement. # # This method implements a very dumb Ruby parser, which skips empty lines # or lines with just comments, `end` keywords, and correctly handles # multi-line statements of the following form: # # * All lines of the statement except the last must end with `,`, `.` or `(`. # # Coverage can be disabled for code regions with annotations :nocov: and :cov:. # # Also, for best results, all actions should be delimited by at least # one non-action line. # def self.extract_interesting_lines(parser, base_path) grammar_source = File.join(@base_path, parser) grammar_file = Racc::GrammarFileParser.parse_file(grammar_source) ruby_sources = [ # Header and footer aren't passed through module_eval # in Racc-generated file, so the location info is lost. *grammar_file.params.inner, ].compact grammar_file.grammar.each_rule do |rule| source = rule.action.source next if source.nil? ruby_sources << source end lines = [] ruby_sources.each do |source| first_line = source.lineno state = :first_line source.text.each_line.with_index do |line, index| line = line.strip continues = line.end_with?(',') || line.end_with?('(') || line.end_with?('.') case state when :first_line if line =~ /:nocov/ state = :nocov next elsif line.empty? || line == 'end' || line.start_with?('#') next elsif continues state = :mid_line end lines[first_line + index - 1] = 0 when :mid_line unless continues state = :first_line end when :nocov if line =~ /:cov:/ state = :first_line end end end end lines end def self.result result = @coverage.map do |parser, coverage| [File.join(@base_path, parser), coverage] end Hash[result] end end class << SimpleCov def result_with_racc_coverage @result ||= SimpleCov::Result.new( Coverage.result.merge(RaccCoverage.result)) result_without_racc_coverage end alias result_without_racc_coverage result alias result result_with_racc_coverage end parser-3.3.4.2/test/test_ast_processor.rb000066400000000000000000000013231465510415600204140ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestASTProcessor < Minitest::Test LEAF_NODES = %i[ sym str int float complex rational true false nil self __FILE__ __LINE__ __ENCODING__ cbase regopt zsuper match_with_trailing_comma match_nil_pattern forward_args forwarded_args numargs kwnilarg objc_varargs objc_restarg objc_kwarg ident ].freeze def setup @traversible = Parser::AST::Processor .instance_methods(false) .map { |mid| mid.to_s.scan(/\Aon_(.*)/) } .flatten .map(&:to_sym) @traversible += LEAF_NODES end def test_nodes_are_traversible for_each_node do |node| assert_includes @traversible, node.type end end end parser-3.3.4.2/test/test_base.rb000066400000000000000000000016361465510415600164470ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' require 'parser/current' class TestBase < Minitest::Test include AST::Sexp def test_parse ast = Parser::CurrentRuby.parse('1') assert_equal s(:int, 1), ast end def test_parse_with_comments ast, comments = Parser::CurrentRuby.parse_with_comments('1 # foo') assert_equal s(:int, 1), ast assert_equal 1, comments.size assert_equal '# foo', comments.first.text end def test_loc_to_node ast = Parser::CurrentRuby.parse('1') assert_equal ast.loc.node, ast end def test_loc_dup ast = Parser::CurrentRuby.parse('1') assert_nil ast.loc.dup.node Parser::AST::Node.new(:zsuper, [], :location => ast.loc) end def test_node_ractor ast = Parser::CurrentRuby.parse('1') ::Ractor.make_shareable(ast) assert ::Ractor.shareable?(ast) assert_equal '1', ast.loc.expression.source end if defined?(::Ractor) end parser-3.3.4.2/test/test_current.rb000066400000000000000000000023131465510415600172100ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' require 'parser/current' class TestCurrent < Minitest::Test def test_current case RUBY_VERSION when '2.0.0' assert_equal Parser::Ruby20, Parser::CurrentRuby when /^2\.1\.\d+/ assert_equal Parser::Ruby21, Parser::CurrentRuby when /^2\.2\.\d+/ assert_equal Parser::Ruby22, Parser::CurrentRuby when /^2\.3\.\d+/ assert_equal Parser::Ruby23, Parser::CurrentRuby when /^2\.4\.\d+/ assert_equal Parser::Ruby24, Parser::CurrentRuby when /^2\.5\.\d+/ assert_equal Parser::Ruby25, Parser::CurrentRuby when /^2\.6\.\d+/ assert_equal Parser::Ruby26, Parser::CurrentRuby when /^2\.7\.\d+/ assert_equal Parser::Ruby27, Parser::CurrentRuby when /^3\.0\.\d+/ assert_equal Parser::Ruby30, Parser::CurrentRuby when /^3\.1\.\d+/ assert_equal Parser::Ruby31, Parser::CurrentRuby when /^3\.2\.\d+/ assert_equal Parser::Ruby32, Parser::CurrentRuby when /^3\.3\.\d+/ assert_equal Parser::Ruby33, Parser::CurrentRuby when /^3\.4\.\d+/ assert_equal Parser::Ruby34, Parser::CurrentRuby else flunk "Update test_current for #{RUBY_VERSION}" end end end parser-3.3.4.2/test/test_diagnostic.rb000066400000000000000000000051601465510415600176550ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestDiagnostic < Minitest::Test def setup @buffer = Parser::Source::Buffer.new('(string)', source: 'if (this is some bad code + bugs)') @range1 = Parser::Source::Range.new(@buffer, 0, 2) # if @range2 = Parser::Source::Range.new(@buffer, 4, 8) # this end def test_verifies_levels error = assert_raises ArgumentError do Parser::Diagnostic.new(:foobar, :escape_eof, {}, @range1) end assert_match(/level/, error.message) end def test_freezes string = 'foo'.dup highlights = [@range2] diag = Parser::Diagnostic.new(:error, :escape_eof, @range1, highlights) assert diag.frozen? assert diag.arguments.frozen? assert diag.highlights.frozen? refute string.frozen? refute highlights.frozen? end def test_render location = Parser::Source::Range.new(@buffer, 26, 27) highlights = [ Parser::Source::Range.new(@buffer, 21, 25), Parser::Source::Range.new(@buffer, 28, 32) ] diag = Parser::Diagnostic.new(:error, :unexpected, { :character => '+' }, location, highlights) assert_equal([ "(string):1:27: error: unexpected `+'", '(string):1: if (this is some bad code + bugs)', '(string):1: ~~~~ ^ ~~~~ ' ], diag.render) end def test_multiline_render @buffer = Parser::Source::Buffer.new('(string)', source: "abc abc abc\ndef def def\nghi ghi ghi\n") location = Parser::Source::Range.new(@buffer, 4, 27) highlights = [ Parser::Source::Range.new(@buffer, 0, 3), Parser::Source::Range.new(@buffer, 28, 31) ] diag = Parser::Diagnostic.new(:error, :unexpected_token, { :token => 'ghi' }, location, highlights) assert_equal([ "(string):1:5-3:3: error: unexpected token ghi", '(string):1: abc abc abc', '(string):1: ~~~ ^~~~~~~...', '(string):3: ghi ghi ghi', '(string):3: ~~~ ~~~ ' ], diag.render) end def test_bug_error_on_newline # regression test; see GitHub issue 273 source = <<-CODE { foo: ->() # I forgot my brace } } CODE @buffer = Parser::Source::Buffer.new('(string)', source: source) location = Parser::Source::Range.new(@buffer, 33, 34) diag = Parser::Diagnostic.new(:error, :unexpected_token, { :token => 'tNL' }, location) assert_equal([ '(string):2:32: error: unexpected token tNL', '(string):2: foo: ->() # I forgot my brace', '(string):2: ^' ], diag.render) end end parser-3.3.4.2/test/test_diagnostic_engine.rb000066400000000000000000000024131465510415600212000ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestDiagnosticEngine < Minitest::Test def setup @engine = Parser::Diagnostic::Engine.new @queue = [] @engine.consumer = lambda { |diag| @queue << diag } end def test_process_warnings warn = Parser::Diagnostic.new(:warning, :invalid_escape, {}, 1..2) @engine.process(warn) assert_equal [warn], @queue end def test_ignore_warnings @engine.ignore_warnings = true warn = Parser::Diagnostic.new(:warning, :invalid_escape, {}, 1..2) @engine.process(warn) assert_equal [], @queue end def test_all_errors_are_fatal @engine.all_errors_are_fatal = true error = Parser::Diagnostic.new(:error, :invalid_escape, {}, 1..2) err = assert_raises Parser::SyntaxError do @engine.process(error) end assert_equal error, err.diagnostic assert_equal [error], @queue end def test_all_errors_are_collected error = Parser::Diagnostic.new(:error, :invalid_escape, {}, 1..2) @engine.process(error) assert_equal [error], @queue end def test_fatal_error fatal = Parser::Diagnostic.new(:fatal, :invalid_escape, {}, 1..2) assert_raises Parser::SyntaxError do @engine.process(fatal) end assert_equal [fatal], @queue end end parser-3.3.4.2/test/test_encoding.rb000066400000000000000000000047361465510415600173270ustar00rootroot00000000000000# encoding: binary # frozen_string_literal: true require 'helper' class TestEncoding < Minitest::Test include AST::Sexp def recognize(string) Parser::Source::Buffer.recognize_encoding(string) end require 'parser/all' def test_default assert_nil recognize('foobar') end def test_bom assert_equal Encoding::UTF_8, recognize("\xef\xbb\xbf\nfoobar") assert_equal Encoding::UTF_8, recognize("\xef\xbb\xbf# coding:koi8-r\nfoobar") end def test_magic_comment assert_equal Encoding::KOI8_R, recognize("# coding:koi8-r\nfoobar") end def test_shebang assert_equal Encoding::KOI8_R, recognize("#!/bin/foo\n# coding:koi8-r\nfoobar") assert_nil recognize("#!/bin/foo\n") end def test_case assert_equal Encoding::KOI8_R, recognize("# coding:KoI8-r\nfoobar") end def test_space assert_equal Encoding::KOI8_R, recognize("# coding : koi8-r\nfoobar") end def test_empty assert_nil recognize('') end def test_no_comment assert_nil recognize(%{require 'cane/encoding_aware_iterator'}) end def test_adjacent assert_nil recognize('# codingkoi8-r') assert_nil recognize('# coding koi8-r') end def test_utf8_mac assert_equal Encoding::UTF8_MAC, recognize('# coding: utf8-mac') end def test_suffix assert_equal Encoding::UTF_8, recognize('# coding: utf-8-dos') assert_equal Encoding::UTF_8, recognize('# coding: utf-8-unix') assert_equal Encoding::UTF_8, recognize('# coding: utf-8-mac') e = assert_raises(ArgumentError) do assert_nil recognize('# coding: utf-8-dicks') end assert(e.is_a?(Parser::UnknownEncodingInMagicComment)) end def test_parse_18_invalid_enc ast = Parser::Ruby18.parse("# encoding:feynman-diagram\n1") assert_equal ast, s(:int, 1) end def test_parse_19_invalid_enc assert_raises(ArgumentError) do Parser::Ruby19.parse("# encoding:feynman-diagram\n1") end end def test_ending_comment assert_nil recognize('foo # coding: koi8-r') end def test_wrong_prefix assert_nil recognize('# decoding: koi8-r') end def test_no_spaces assert_equal Encoding::KOI8_R, recognize('#encoding:koi8-r') assert_equal Encoding::KOI8_R, recognize('#coding:koi8-r') end def test_underscore_and_star_characters assert_equal Encoding::KOI8_R, recognize('# -*- encoding: koi8-r -*-') end def test_garbage_around_encoding_comment assert_equal Encoding::KOI8_R, recognize('# 1$# -*- &)* encoding: koi8-r 1$# -*- &)*') end end parser-3.3.4.2/test/test_lexer.rb000066400000000000000000003313401465510415600166520ustar00rootroot00000000000000# encoding: ascii-8bit # frozen_string_literal: true require 'helper' require 'complex' class TestLexer < Minitest::Test def setup_lexer(version) @lex = Parser::Lexer.new(version) @lex.comments = [] @lex.diagnostics = Parser::Diagnostic::Engine.new @lex.diagnostics.all_errors_are_fatal = true # @lex.diagnostics.consumer = lambda { |diag| $stderr.puts "", diag.render } end def setup setup_lexer 18 end # # Tools # def utf(str) str.dup.force_encoding(Encoding::UTF_8) end # # Additional matchers # def refute_scanned(s, *args) assert_raises Parser::SyntaxError do assert_scanned(s, *args) end end def assert_escape(expected, input) source_buffer = Parser::Source::Buffer.new('(assert_escape)', source: "\"\\#{input}\"".encode(input.encoding)) @lex.reset @lex.source_buffer = source_buffer lex_token, (lex_value, *) = @lex.advance lex_value.force_encoding(Encoding::BINARY) assert_equal [:tSTRING, expected], [lex_token, lex_value], source_buffer.source end def refute_escape(input) err = assert_raises Parser::SyntaxError do @lex.state = :expr_beg assert_scanned "%Q[\\#{input}]" end assert_equal :fatal, err.diagnostic.level end def assert_lex_fname(name, type, range) begin_pos, end_pos = range assert_scanned("def #{name} ", :kDEF, 'def', [0, 3], type, name, [begin_pos + 4, end_pos + 4]) assert_equal :expr_endfn, @lex.state end def assert_scanned(input, *args) source_buffer = Parser::Source::Buffer.new('(assert_scanned)', source: input) @lex.reset(false) @lex.source_buffer = source_buffer until args.empty? do token, value, (begin_pos, end_pos) = args.shift(3) lex_token, (lex_value, lex_range) = @lex.advance assert lex_token, 'no more tokens' assert_operator [lex_token, lex_value], :eql?, [token, value], input assert_equal begin_pos, lex_range.begin_pos assert_equal end_pos, lex_range.end_pos end lex_token, (lex_value, *) = @lex.advance refute lex_token, "must be empty, but had #{[lex_token, lex_value].inspect}" end # # Tests # def test_read_escape assert_escape "\\", "\\" assert_escape "\n", "n" assert_escape "\t", "t" assert_escape "\r", "r" assert_escape "\f", "f" assert_escape "\13", "v" assert_escape "\0", "0" assert_escape "\07", "a" assert_escape "\007", "a" assert_escape "\033", "e" assert_escape "\377", "377" assert_escape "\377", "xff" assert_escape "\010", "b" assert_escape " ", "s" assert_escape "q", "q" # plain vanilla escape end def test_read_escape_c assert_escape "\030", "C-x" assert_escape "\030", "cx" assert_escape "\230", 'C-\M-x' assert_escape "\230", 'c\M-x' assert_escape "\177", "C-?" assert_escape "\177", "c?" assert_escape "\r", "cM" end def test_read_escape_m assert_escape "\370", "M-x" assert_escape "\230", 'M-\C-x' assert_escape "\230", 'M-\cx' end def test_read_escape_errors refute_escape "" refute_escape "M" refute_escape "M-" refute_escape "Mx" refute_escape "Cx" refute_escape "C" refute_escape "C-" refute_escape "c" refute_escape "x" end def test_read_escape_unicode__19 assert_escape "\x09", 'u{9}' assert_escape "\x31", 'u{31}' assert_escape "\x09\x01", 'u{9 1}' assert_escape "\xc4\xa3", utf('u0123') assert_escape "\xc4\xa3\xc3\xb0\xeb\x84\xa3", utf('u{123 f0 B123}') end def test_read_escape_unicode_bad__19 refute_escape 'u123' refute_escape 'u{}' refute_escape 'u{123 f0h}' refute_escape 'u{123 f0' end def test_read_escape_whitespaces__27 setup_lexer 27 [ *(0..8), *(14..31) ].each do |code| @lex.reset refute_scanned "\"\\C-" + code.chr + "\"" @lex.reset refute_scanned "\"\\M-" + code.chr + "\"" @lex.reset refute_scanned "\"\\C-\\M-" + code.chr + "\"" @lex.reset refute_scanned "\"\\M-\\C-" + code.chr + "\"" end end def test_ambiguous_uminus assert_scanned("m -3", :tIDENTIFIER, "m", [0, 1], :tUNARY_NUM, "-", [2, 3], :tINTEGER, 3, [3, 4]) end def test_ambiguous_uplus assert_scanned("m +3", :tIDENTIFIER, "m", [0, 1], :tUNARY_NUM, "+", [2, 3], :tINTEGER, 3, [3, 4]) end def test_and assert_scanned "&", :tAMPER, "&", [0, 1] end def test_and2 @lex.state = :expr_end assert_scanned "&&", :tANDOP, "&&", [0, 2] end def test_and2_equals @lex.state = :expr_end assert_scanned "&&=", :tOP_ASGN, "&&", [0, 3] end def test_and_arg @lex.state = :expr_arg assert_scanned(" &y", :tAMPER, "&", [1, 2], :tIDENTIFIER, "y", [2, 3]) end def test_and_equals @lex.state = :expr_end assert_scanned "&=", :tOP_ASGN, "&", [0, 2] end def test_and_expr @lex.state = :expr_arg assert_scanned("x & y", :tIDENTIFIER, "x", [0, 1], :tAMPER2, "&", [2, 3], :tIDENTIFIER, "y", [4, 5]) end def test_and_meth assert_lex_fname "&", :tAMPER2, [0, 1] end def test_and_dot_arg @lex.state = :expr_arg assert_scanned "&.", :tANDDOT, "&.", [0, 2] end def test_and_dot_cmdarg @lex.state = :expr_cmdarg assert_scanned "&.", :tANDDOT, "&.", [0, 2] end def test_assoc assert_scanned "=>", :tASSOC, "=>", [0, 2] end def test_label__18 assert_scanned("{a:b", :tLBRACE, "{", [0, 1], :tIDENTIFIER, "a", [1, 2], :tSYMBOL, "b", [2, 4]) end def test_label_in_params__18 assert_scanned("foo(a:b", :tIDENTIFIER, "foo", [0, 3], :tLPAREN2, "(", [3, 4], :tIDENTIFIER, "a", [4, 5], :tSYMBOL, "b", [5, 7]) end def test_label__19 setup_lexer 19 assert_scanned("{a:b", :tLBRACE, "{", [0, 1], :tLABEL, "a", [1, 3], :tIDENTIFIER, "b", [3, 4]) end def test_label_in_params__19 setup_lexer 19 assert_scanned("foo(a:b", :tIDENTIFIER, "foo", [0, 3], :tLPAREN2, "(", [3, 4], :tLABEL, "a", [4, 6], :tIDENTIFIER, "b", [6, 7]) end def test_label_fid__19 setup_lexer 19 assert_scanned("{a?:true", :tLBRACE, '{', [0, 1], :tLABEL, 'a?', [1, 4], :kTRUE, 'true', [4, 8]) end def test_label__22 setup_lexer 22 assert_scanned("{'a':", :tLBRACE, '{', [0, 1], :tSTRING_BEG, "'", [1, 2], :tSTRING_CONTENT, 'a', [2, 3], :tLABEL_END, "'", [3, 5]) end def test_label_nested__22 setup_lexer 22 assert_scanned("{'a\":':", :tLBRACE, '{', [0, 1], :tSTRING_BEG, "'", [1, 2], :tSTRING_CONTENT, 'a":', [2, 5], :tLABEL_END, "'", [5, 7]) end def test_label_colon2__22 setup_lexer 22 assert_scanned("{'a'::", :tLBRACE, '{', [0, 1], :tSTRING, "a", [1, 4], :tCOLON2, '::', [4, 6]) end def test_pct_string_colon__22 setup_lexer 22 assert_scanned("{%'a':", :tLBRACE, '{', [0, 1], :tSTRING_BEG, "%'", [1, 3], :tSTRING_CONTENT, 'a', [3, 4], :tSTRING_END, "'", [4, 5], :tCOLON, ':', [5, 6]) end def test_command_start__19 setup_lexer 19 %w[case elsif for in until when while if unless and or].each do |keyword| token = "k#{keyword.upcase}".to_sym @lex.reset assert_scanned("#{keyword} a:b", token, keyword, [0, keyword.length], :tIDENTIFIER, "a", [keyword.length + 1, keyword.length + 2], :tSYMBOL, "b", [keyword.length + 2, keyword.length + 4]) end end def test_mod_not_command_start__19 setup_lexer 19 %w[if unless while until rescue].each do |keyword| token = "k#{keyword.upcase}_MOD".to_sym @lex.state = :expr_end assert_scanned("#{keyword} a:b", token, keyword, [0, keyword.length], :tLABEL, "a", [keyword.length + 1, keyword.length + 3], :tIDENTIFIER, "b", [keyword.length + 3, keyword.length + 4]) end end def test_back_ref assert_scanned("[$&, $`, $', $+]", :tLBRACK, "[", [0, 1], :tBACK_REF, "$&", [1, 3], :tCOMMA, ",", [3, 4], :tBACK_REF, "$`", [5, 7], :tCOMMA, ",", [7, 8], :tBACK_REF, "$'", [9, 11], :tCOMMA, ",", [11, 12], :tBACK_REF, "$+", [13, 15], :tRBRACK, "]", [15, 16]) end def test_backslash assert_scanned("1 \\\n+ 2", :tINTEGER, 1, [0, 1], :tPLUS, "+", [4, 5], :tINTEGER, 2, [6, 7]) end def test_backslash_bad refute_scanned("1 \\ + 2", :tINTEGER, 1, [0, 1]) end def test_backtick assert_scanned("`ls`", :tXSTRING_BEG, "`", [0, 1], :tSTRING_CONTENT, "ls", [1, 3], :tSTRING_END, "`", [3, 4]) end def test_backtick_cmdarg @lex.state = :expr_dot assert_scanned("\n`", :tBACK_REF2, "`", [1, 2]) # \n ensures expr_cmd assert_equal :expr_arg, @lex.state end def test_backtick_dot @lex.state = :expr_dot assert_scanned("a.`(3)", :tIDENTIFIER, "a", [0, 1], :tDOT, ".", [1, 2], :tBACK_REF2, "`", [2, 3], :tLPAREN2, "(", [3, 4], :tINTEGER, 3, [4, 5], :tRPAREN, ")", [5, 6]) end def test_backtick_method @lex.state = :expr_fname assert_scanned("`", :tBACK_REF2, "`", [0, 1]) assert_equal :expr_endfn, @lex.state end def test_bad_char refute_scanned(" \010 ") end def test_bang assert_scanned "!", :tBANG, "!", [0, 1] end def test_bang_equals assert_scanned "!=", :tNEQ, "!=", [0, 2] end def test_bang_tilde assert_scanned "!~", :tNMATCH, "!~", [0, 2] end def test_def_ubang setup_lexer(20) @lex.state = :expr_fname assert_scanned '!@', :tBANG, '!@', [0, 2] end def test_carat assert_scanned "^", :tCARET, "^", [0, 1] end def test_carat_equals assert_scanned "^=", :tOP_ASGN, "^", [0, 2] end def test_colon2 assert_scanned("A::B", :tCONSTANT, "A", [0, 1], :tCOLON2, "::", [1, 3], :tCONSTANT, "B", [3, 4]) @lex.state = :expr_arg assert_scanned("::Array", :tCOLON2, "::", [0, 2], :tCONSTANT, "Array", [2, 7]) end def test_colon3 assert_scanned("::Array", :tCOLON3, "::", [0, 2], :tCONSTANT, "Array", [2, 7]) @lex.state = :expr_arg assert_scanned(" ::Array", :tCOLON3, "::", [1, 3], :tCONSTANT, "Array", [3, 8]) end def test_comma assert_scanned ",", :tCOMMA, ",", [0, 1] end def test_comment [26, 27].each do |version| setup_lexer(version) assert_scanned("1 # one\n# two\n2", :tINTEGER, 1, [0, 1], :tNL, nil, [7, 8], :tINTEGER, 2, [14, 15]) assert_equal 2, @lex.comments.length assert_equal '# one', @lex.comments[0].text assert_equal '# two', @lex.comments[1].text end end def test_comment_expr_beg assert_scanned("{#1\n}", :tLBRACE, "{", [0, 1], :tRCURLY, "}", [4, 5]) end def test_comment_begin assert_scanned("=begin\nblah\nblah\n=end\n42", :tINTEGER, 42, [22, 24]) assert_equal 1, @lex.comments.length assert_equal "=begin\nblah\nblah\n=end\n", @lex.comments[0].text end def test_comment_begin_bad refute_scanned("=begin\nblah\nblah\n") end def test_comment_begin_not_comment assert_scanned("beginfoo = 5\np x \\\n=beginfoo", :tIDENTIFIER, "beginfoo", [0, 8], :tEQL, "=", [9, 10], :tINTEGER, 5, [11, 12], :tNL, nil, [12, 13], :tIDENTIFIER, "p", [13, 14], :tIDENTIFIER, "x", [15, 16], :tEQL, "=", [19, 20], :tIDENTIFIER, "beginfoo", [20, 28]) end def test_comment_begin_space assert_scanned("=begin blah\nblah\n=end\n") assert_equal 1, @lex.comments.length assert_equal "=begin blah\nblah\n=end\n", @lex.comments[0].text end def test_comment_end_space_and_text assert_scanned("=begin blah\nblah\n=end blab\n") assert_equal 1, @lex.comments.length assert_equal "=begin blah\nblah\n=end blab\n", @lex.comments[0].text end def test_comment_eos assert_scanned("# comment") end def test_constant assert_scanned("ArgumentError", :tCONSTANT, "ArgumentError", [0, 13]) end def test_constant_semi assert_scanned("ArgumentError;", :tCONSTANT, "ArgumentError", [0, 13], :tSEMI, ";", [13, 14]) end def test_cvar assert_scanned "@@blah", :tCVAR, "@@blah", [0, 6] end def test_cvar_bad refute_scanned "@@1" end def test_div assert_scanned("a / 2", :tIDENTIFIER, "a", [0, 1], :tDIVIDE, "/", [2, 3], :tINTEGER, 2, [4, 5]) end def test_div_equals assert_scanned("a /= 2", :tIDENTIFIER, "a", [0, 1], :tOP_ASGN, "/", [2, 4], :tINTEGER, 2, [5, 6]) end def test_do assert_scanned("x do 42 end", :tIDENTIFIER, "x", [0, 1], :kDO, "do", [2, 4], :tINTEGER, 42, [5, 7], :kEND, "end", [8, 11]) end def test_do_block @lex.state = :expr_endarg assert_scanned("do 42 end", :kDO_BLOCK, "do", [0, 2], :tINTEGER, 42, [3, 5], :kEND, "end", [6, 9]) end def test_do_cond @lex.cond.push true assert_scanned("x do 42 end", :tIDENTIFIER, "x", [0, 1], :kDO_COND, "do", [2, 4], :tINTEGER, 42, [5, 7], :kEND, "end", [8, 11]) end def test_dot assert_scanned ".", :tDOT, ".", [0, 1] end def test_dot2 assert_scanned "..", :tDOT2, "..", [0, 2] refute_scanned("foo\n..42", :tIDENTIFIER, "foo", [0, 3], :tNL, nil, [3, 4]) end def test_dot2_27 setup_lexer 27 assert_scanned "..", :tBDOT2, "..", [0, 2] assert_scanned("foo\n..42", :tIDENTIFIER, "foo", [0, 3], :tNL, nil, [3, 4], :tBDOT2, "..", [4, 6], :tINTEGER, 42, [6, 8]) end def test_dot3 assert_scanned "...", :tDOT3, "...", [0, 3] refute_scanned("foo\n...42", :tIDENTIFIER, "foo", [0, 3], :tNL, nil, [3, 4]) end def test_dot3_27 setup_lexer 27 assert_scanned "...", :tBDOT3, "...", [0, 3] assert_scanned("foo\n...42", :tIDENTIFIER, "foo", [0, 3], :tNL, nil, [3, 4], :tBDOT3, "...", [4, 7], :tINTEGER, 42, [7, 9]) end def test_equals assert_scanned "=", :tEQL, "=", [0, 1] end def test_equals2 assert_scanned "==", :tEQ, "==", [0, 2] end def test_equals3 assert_scanned "===", :tEQQ, "===", [0, 3] end def test_equals_tilde assert_scanned "=~", :tMATCH, "=~", [0, 2] end def test_float assert_scanned "1.0", :tFLOAT, 1.0, [0, 3] end def test_float_bad_no_underscores refute_scanned "1__0.0" end def test_float_bad_no_zero_leading refute_scanned ".0" end def test_float_bad_trailing_underscore refute_scanned "123_.0" end def test_float_call assert_scanned("1.0.to_s", :tFLOAT, 1.0, [0, 3], :tDOT, ".", [3, 4], :tIDENTIFIER, "to_s", [4, 8]) end def test_float_dot_E assert_scanned "1.0E10", :tFLOAT, 1.0e10, [0, 6] end def test_float_dot_E_neg assert_scanned("-1.0E10", :tUNARY_NUM, "-", [0, 1], :tFLOAT, 1.0e10, [1, 7]) end def test_float_dot_E_pos assert_scanned("+1.0E10", :tUNARY_NUM, "+", [0, 1], :tFLOAT, 1.0e10, [1, 7]) end def test_float_dot_e assert_scanned "1.0e10", :tFLOAT, 1.0e10, [0, 6] end def test_float_dot_e_neg assert_scanned("-1.0e10", :tUNARY_NUM, "-", [0, 1], :tFLOAT, 1.0e10, [1, 7]) end def test_float_dot_e_pos assert_scanned("+1.0e10", :tUNARY_NUM, "+", [0, 1], :tFLOAT, 1.0e10, [1, 7]) end def test_float_e assert_scanned "1e10", :tFLOAT, 1e10, [0, 4] end def test_float_e_bad_trailing_underscore refute_scanned "123_e10" end def test_float_e_minus assert_scanned "1e-10", :tFLOAT, 1e-10, [0, 5] end def test_float_e_neg assert_scanned("-1e10", :tUNARY_NUM, "-", [0, 1], :tFLOAT, 1e10, [1, 5]) end def test_float_e_neg_minus assert_scanned("-1e-10", :tUNARY_NUM, "-", [0, 1], :tFLOAT, 1e-10, [1, 6]) end def test_float_e_neg_plus assert_scanned("-1e+10", :tUNARY_NUM, "-", [0, 1], :tFLOAT, 1e10, [1, 6]) end def test_float_e_pos assert_scanned("+1e10", :tUNARY_NUM, "+", [0, 1], :tFLOAT, 1e10, [1, 5]) end def test_float_e_pos_minus assert_scanned("+1e-10", :tUNARY_NUM, "+", [0, 1], :tFLOAT, 1e-10, [1, 6]) end def test_float_e_pos_plus assert_scanned("+1e+10", :tUNARY_NUM, "+", [0, 1], :tFLOAT, 1e10, [1, 6]) end def test_float_e_plus assert_scanned "1e+10", :tFLOAT, 1e10, [0, 5] end def test_float_e_zero assert_scanned "0e0", :tFLOAT, 0e0, [0, 3] end def test_float_e_nothing [18, 19, 20].each do |version| setup_lexer version refute_scanned "1end" refute_scanned "1.1end" end setup_lexer 21 assert_scanned("1end", :tINTEGER, 1, [0, 1], :kEND, 'end', [1, 4]) assert_scanned("1.1end", :tFLOAT, 1.1, [0, 3], :kEND, 'end', [3, 6]) end def test_float_neg assert_scanned("-1.0", :tUNARY_NUM, "-", [0, 1], :tFLOAT, 1.0, [1, 4]) end def test_float_pos assert_scanned("+1.0", :tUNARY_NUM, "+", [0, 1], :tFLOAT, 1.0, [1, 4]) end def test_ge assert_scanned("a >= 2", :tIDENTIFIER, "a", [0, 1], :tGEQ, ">=", [2, 4], :tINTEGER, 2, [5, 6]) end def test_global assert_scanned("$blah", :tGVAR, "$blah", [0, 5]) end def test_global_backref assert_scanned("$`", :tBACK_REF, "$`", [0, 2]) end # This was removed in 2.1. # def test_global_dash_nothing # assert_scanned("$- ", :tGVAR, "$-") # end def test_global_dash_something assert_scanned("$-x", :tGVAR, "$-x", [0, 3]) end def test_global_number assert_scanned("$10", :tNTH_REF, 10, [0, 3]) end def test_global_other assert_scanned("[$~, $*, $$, $?, $!, $@, $/, $\\, $;, $,, $., $=, $:, $<, $>, $\"]", :tLBRACK, "[", [0, 1], :tGVAR, "$~", [1, 3], :tCOMMA, ",", [3, 4], :tGVAR, "$*", [5, 7], :tCOMMA, ",", [7, 8], :tGVAR, "$$", [9, 11], :tCOMMA, ",", [11, 12], :tGVAR, "$\?", [13, 15], :tCOMMA, ",", [15, 16], :tGVAR, "$!", [17, 19], :tCOMMA, ",", [19, 20], :tGVAR, "$@", [21, 23], :tCOMMA, ",", [23, 24], :tGVAR, "$/", [25, 27], :tCOMMA, ",", [27, 28], :tGVAR, "$\\", [29, 31], :tCOMMA, ",", [31, 32], :tGVAR, "$;", [33, 35], :tCOMMA, ",", [35, 36], :tGVAR, "$,", [37, 39], :tCOMMA, ",", [39, 40], :tGVAR, "$.", [41, 43], :tCOMMA, ",", [43, 44], :tGVAR, "$=", [45, 47], :tCOMMA, ",", [47, 48], :tGVAR, "$:", [49, 51], :tCOMMA, ",", [51, 52], :tGVAR, "$<", [53, 55], :tCOMMA, ",", [55, 56], :tGVAR, "$>", [57, 59], :tCOMMA, ",", [59, 60], :tGVAR, "$\"", [61, 63], :tRBRACK, "]", [63, 64]) end def test_global_underscore assert_scanned("$_", :tGVAR, "$_", [0, 2]) end def test_global_weird assert_scanned("$__blah", :tGVAR, "$__blah", [0, 7]) end def test_global_zero assert_scanned("$0", :tGVAR, "$0", [0, 2]) end def test_gt assert_scanned("a > 2", :tIDENTIFIER, "a", [0, 1], :tGT, ">", [2, 3], :tINTEGER, 2, [4, 5]) end def test_heredoc_backtick assert_scanned("a = <<`EOF`\n blah blah\nEOF\n", :tIDENTIFIER, "a", [0, 1], :tEQL, "=", [2, 3], :tXSTRING_BEG, "<<`", [4, 11], :tSTRING_CONTENT, " blah blah\n", [12, 24], :tSTRING_END, "EOF", [24, 27], :tNL, nil, [11, 12]) end def test_heredoc_double assert_scanned("a = <<\"EOF\"\n blah blah\nEOF\n", :tIDENTIFIER, "a", [0, 1], :tEQL, "=", [2, 3], :tSTRING_BEG, "<<\"", [4, 11], :tSTRING_CONTENT, " blah blah\n", [12, 24], :tSTRING_END, "EOF", [24, 27], :tNL, nil, [11, 12]) end def test_heredoc_double_dash assert_scanned("a = <<-\"EOF\"\n blah blah\n EOF\n", :tIDENTIFIER, "a", [0, 1], :tEQL, "=", [2, 3], :tSTRING_BEG, "<<\"", [4, 12], :tSTRING_CONTENT, " blah blah\n", [13, 25], :tSTRING_END, "EOF", [25, 30], :tNL, nil, [12, 13]) end def test_heredoc_double_eos refute_scanned("a = <<\"EOF\"\nblah", :tIDENTIFIER, "a", [0, 1], :tEQL, "=", [2, 3], :tSTRING_BEG, "<<\"", [4, 7]) end def test_heredoc_double_eos_nl refute_scanned("a = <<\"EOF\"\nblah\n", :tIDENTIFIER, "a", [0, 1], :tEQL, "=", [2, 3], :tSTRING_BEG, "<<\"", [4, 7]) end def test_heredoc_double_interp assert_scanned("a = <<\"EOF\"\n#x a \#@a b \#$b c \#{3} \nEOF\n", :tIDENTIFIER, "a", [0, 1], :tEQL, "=", [2, 3], :tSTRING_BEG, "<<\"", [4, 11], :tSTRING_CONTENT, "#x a ", [12, 17], :tSTRING_DVAR, nil, [17, 18], :tIVAR, "@a", [18, 20], :tSTRING_CONTENT, " b ", [20, 23], :tSTRING_DVAR, nil, [23, 24], :tGVAR, "$b", [24, 26], :tSTRING_CONTENT, " c ", [26, 29], :tSTRING_DBEG, '#{', [29, 31], :tINTEGER, 3, [31, 32], :tRCURLY, "}", [32, 33], :tSTRING_CONTENT, " \n", [33, 35], :tSTRING_END, "EOF", [35, 38], :tNL, nil, [11, 12]) end def test_heredoc_empty assert_scanned("<<\"\"\n\#{x}\nblah2\n\n", :tSTRING_BEG, "<<\"", [0, 4], :tSTRING_DBEG, "\#{", [5, 7], :tIDENTIFIER, "x", [7, 8], :tRCURLY, "}", [8, 9], :tSTRING_CONTENT, "\n", [9, 10], :tSTRING_CONTENT, "blah2\n", [10, 16], :tSTRING_END, "", [16, 16], :tNL, nil, [4, 5]) end def test_heredoc_none assert_scanned("a = <", :tCMP, [0, 3] end def test_identifier_def assert_lex_fname "identifier", :tIDENTIFIER, [0, 10] end def test_identifier_equals_arrow assert_scanned(":blah==>", :tSYMBOL, "blah=", [0, 6], :tASSOC, "=>", [6, 8]) end def test_identifier_equals3 assert_scanned(":a===b", :tSYMBOL, "a", [0, 2], :tEQQ, "===", [2, 5], :tIDENTIFIER, "b", [5, 6]) end def test_identifier_equals_equals_arrow assert_scanned(":a==>b", :tSYMBOL, "a=", [0, 3], :tASSOC, "=>", [3, 5], :tIDENTIFIER, "b", [5, 6]) end def test_identifier_equals_caret assert_lex_fname "^", :tCARET, [0, 1] end def test_identifier_equals_def assert_lex_fname "identifier=", :tIDENTIFIER, [0, 11] end def test_identifier_equals_def2 assert_lex_fname "==", :tEQ, [0, 2] end def test_identifier_equals_expr @lex.state = :expr_dot assert_scanned("y = arg", :tIDENTIFIER, "y", [0, 1], :tEQL, "=", [2, 3], :tIDENTIFIER, "arg", [4, 7]) assert_equal :expr_arg, @lex.state end def test_identifier_equals_or assert_lex_fname "|", :tPIPE, [0, 1] end def test_identifier_equals_slash assert_lex_fname "/", :tDIVIDE, [0, 1] end def test_identifier_equals_tilde @lex.state = :expr_fname assert_scanned("identifier=~", :tIDENTIFIER, "identifier=", [0, 11], :tTILDE, "~", [11, 12]) end def test_identifier_gt assert_lex_fname ">", :tGT, [0, 1] end def test_identifier_le assert_lex_fname "<=", :tLEQ, [0, 2] end def test_identifier_lt assert_lex_fname "<", :tLT, [0, 1] end def test_identifier_tilde assert_lex_fname "~", :tTILDE, [0, 1] end def test_identifier_defined? assert_lex_fname "defined?", :kDEFINED, [0, 8] end def test_index assert_lex_fname "[]", :tAREF, [0, 2] end def test_index_equals assert_lex_fname "[]=", :tASET, [0, 3] end def test_integer assert_scanned "42", :tINTEGER, 42, [0, 2] end def test_integer_bin assert_scanned "0b101010", :tINTEGER, 42, [0, 8] end def test_integer_bin_bad_none refute_scanned "0b " end def test_integer_bin_bad_underscores refute_scanned "0b10__01" end def test_integer_dec assert_scanned "42", :tINTEGER, 42, [0, 2] end def test_integer_dec_bad_underscores refute_scanned "42__24" end def test_integer_dec_d assert_scanned "0d42", :tINTEGER, 42, [0, 4] end def test_integer_dec_d_bad_none refute_scanned "0d" end def test_integer_dec_d_bad_underscores refute_scanned "0d42__24" end def test_question_eh_a__18 setup_lexer 18 assert_scanned "?a", :tINTEGER, 97, [0, 2] end def test_question_eh_a__19 setup_lexer 19 assert_scanned '?a', :tCHARACTER, "a", [0, 2] end def test_question_eh_escape_M_escape_C__18 setup_lexer 18 assert_scanned '?\M-\C-a', :tINTEGER, 129, [0, 8] end def test_question_eh_escape_M_escape_C__19 setup_lexer 19 assert_scanned '?\M-\C-a', :tCHARACTER, "\M-\C-a", [0, 8] end def test_question_eh_escape_u_1_digit setup_lexer 19 refute_scanned '?\\u1' end def test_question_eh_escape_u_2_digits setup_lexer 19 refute_scanned '?\\u12' end def test_question_eh_escape_u_3_digits setup_lexer 19 refute_scanned '?\\u123' end def test_question_eh_escape_u_4_digits setup_lexer 19 assert_scanned '?\\u0001', :tCHARACTER, "\u0001", [0, 7] end def test_question_eh_single_unicode_point setup_lexer 19 assert_scanned '?\\u{123}', :tCHARACTER, "\u0123", [0, 8] setup_lexer 19 assert_scanned '?\\u{a}', :tCHARACTER, "\n", [0, 6] end def test_question_eh_multiple_unicode_points setup_lexer 19 refute_scanned '?\\u{1 2 3}' setup_lexer 19 refute_scanned '?\\u{a b}' end def test_question_eh_escape_u_unclosed_bracket setup_lexer 19 refute_scanned '?\\u{123' end def test_question_eh_escape_space_around_unicode_point__19 setup_lexer 19 refute_scanned '"\\u{1 }"' setup_lexer 19 refute_scanned '"\\u{ 1}"' setup_lexer 19 refute_scanned '"\\u{ 1 }"' setup_lexer 19 refute_scanned '"\\u{1 2 }"' setup_lexer 19 refute_scanned '"\\u{ 1 2}"' setup_lexer 19 refute_scanned '"\\u{1 2}"' end def test_question_eh_escape_space_around_unicode_point__24 setup_lexer 24 assert_scanned '"\\u{ 1}"', :tSTRING, "\u0001", [0, 8] setup_lexer 24 assert_scanned '"\\u{1 }"', :tSTRING, "\u0001", [0, 8] setup_lexer 24 assert_scanned '"\\u{ 1 }"', :tSTRING, "\u0001", [0, 9] setup_lexer 24 assert_scanned '"\\u{1 2 }"', :tSTRING, "\u0001\u0002", [0, 10] setup_lexer 24 assert_scanned '"\\u{ 1 2}"', :tSTRING, "\u0001\u0002", [0, 10] setup_lexer 24 assert_scanned '"\\u{1 2}"', :tSTRING, "\u0001\u0002", [0, 10] end def test_integer_hex assert_scanned "0x2a", :tINTEGER, 42, [0, 4] end def test_integer_hex_bad_none refute_scanned "0x " end def test_integer_hex_bad_underscores refute_scanned "0xab__cd" end def test_integer_oct assert_scanned "052", :tINTEGER, 42, [0, 3] end def test_integer_oct_bad_range refute_scanned "08" end def test_integer_oct_bad_underscores refute_scanned "01__23" end def test_integer_oct_O assert_scanned "0O52", :tINTEGER, 42, [0, 4] end def test_integer_oct_O_bad_range refute_scanned "0O1238" end def test_integer_oct_O_bad_underscores refute_scanned "0O1__23" end def test_integer_oct_O_not_bad_none assert_scanned "0O ", :tINTEGER, 0, [0, 2] end def test_integer_oct_o assert_scanned "0o52", :tINTEGER, 42, [0, 4] end def test_integer_oct_o_bad_range refute_scanned "0o1283" end def test_integer_oct_o_bad_underscores refute_scanned "0o1__23" end def test_integer_oct_o_not_bad_none assert_scanned "0o ", :tINTEGER, 0, [0, 2] end def test_integer_trailing assert_scanned("1.to_s", :tINTEGER, 1, [0, 1], :tDOT, '.', [1, 2], :tIDENTIFIER, 'to_s', [2, 6]) end def test_integer_underscore assert_scanned "4_2", :tINTEGER, 42, [0, 3] end def test_integer_underscore_bad refute_scanned "4__2" end def test_integer_zero assert_scanned "0", :tINTEGER, 0, [0, 1] end def test_ivar assert_scanned "@blah", :tIVAR, "@blah", [0, 5] end def test_ivar_bad refute_scanned "@1" end def test_ivar_bad_0_length refute_scanned "1+@\n", :tINTEGER, 1, [0, 1], :tPLUS, "+", [1, 2] end def test_keyword_expr @lex.state = :expr_endarg assert_scanned "if", :kIF_MOD, "if", [0, 2] assert_equal :expr_beg, @lex.state end def test_lt assert_scanned "<", :tLT, "<", [0, 1] end def test_lt2 assert_scanned("a <\< b", :tIDENTIFIER, "a", [0, 1], :tLSHFT, "<\<", [2, 4], :tIDENTIFIER, "b", [5, 6]) end def test_lt2_equals assert_scanned("a <\<= b", :tIDENTIFIER, "a", [0, 1], :tOP_ASGN, "<\<", [2, 5], :tIDENTIFIER, "b", [6, 7]) end def test_lt_equals assert_scanned "<=", :tLEQ, "<=", [0, 2] end def test_minus assert_scanned("1 - 2", :tINTEGER, 1, [0, 1], :tMINUS, "-", [2, 3], :tINTEGER, 2, [4, 5]) end def test_minus_equals @lex.state = :expr_end assert_scanned "-=", :tOP_ASGN, "-", [0, 2] end def test_minus_method @lex.state = :expr_fname assert_scanned "-", :tMINUS, "-", [0, 1] end def test_minus_unary_method @lex.state = :expr_fname assert_scanned "-@", :tUMINUS, "-@", [0, 2] end def test_minus_unary_number assert_scanned("-42", :tUNARY_NUM, "-", [0, 1], :tINTEGER, 42, [1, 3]) end def test_minus_unary_whitespace_number assert_scanned("- 42", :tUNARY_NUM, "-", [0, 1], :tINTEGER, 42, [2, 4]) end def test_nth_ref assert_scanned('[$1, $2, $3]', :tLBRACK, "[", [0, 1], :tNTH_REF, 1, [1, 3], :tCOMMA, ",", [3, 4], :tNTH_REF, 2, [5, 7], :tCOMMA, ",", [7, 8], :tNTH_REF, 3, [9, 11], :tRBRACK, "]", [11, 12]) end def test_open_bracket assert_scanned("(", :tLPAREN, "(", [0, 1]) end def test_open_bracket_cmdarg assert_scanned("m (", :tIDENTIFIER, "m", [0, 1], :tLPAREN_ARG, "(", [2, 3]) end def test_open_bracket_exprarg assert_scanned("m(", :tIDENTIFIER, "m", [0, 1], :tLPAREN2, "(", [1, 2]) end def test_open_curly_bracket assert_scanned("{", :tLBRACE, "{", [0, 1]) end def test_open_curly_bracket_arg assert_scanned("m { 3 }", :tIDENTIFIER, "m", [0, 1], :tLCURLY, "{", [2, 3], :tINTEGER, 3, [4, 5], :tRCURLY, "}", [6, 7]) end def test_open_curly_bracket_block @lex.state = :expr_endarg # seen m(3) assert_scanned("{ 4 }", :tLBRACE_ARG, "{", [0, 1], :tINTEGER, 4, [2, 3], :tRCURLY, "}", [4, 5]) end def test_open_square_bracket_arg assert_scanned("m [ 3 ]", :tIDENTIFIER, "m", [0, 1], :tLBRACK, "[", [2, 3], :tINTEGER, 3, [4, 5], :tRBRACK, "]", [6, 7]) end def test_open_square_bracket_ary assert_scanned("[1, 2, 3]", :tLBRACK, "[", [0, 1], :tINTEGER, 1, [1, 2], :tCOMMA, ",", [2, 3], :tINTEGER, 2, [4, 5], :tCOMMA, ",", [5, 6], :tINTEGER, 3, [7, 8], :tRBRACK, "]", [8, 9]) end def test_open_square_bracket_meth assert_scanned("m[3]", :tIDENTIFIER, "m", [0, 1], :tLBRACK2, "[", [1, 2], :tINTEGER, 3, [2, 3], :tRBRACK, "]", [3, 4]) end def test_or assert_scanned "|", :tPIPE, "|", [0, 1] end def test_or2 assert_scanned "||", :tOROP, "||", [0, 2] end def test_or2__after_27 setup_lexer(27) assert_scanned("||", :tPIPE, "|", [0, 1], :tPIPE, "|", [1, 2]) end def test_or2_equals assert_scanned "||=", :tOP_ASGN, "||", [0, 3] end def test_or_equals assert_scanned "|=", :tOP_ASGN, "|", [0, 2] end def test_percent assert_scanned("a % 2", :tIDENTIFIER, "a", [0, 1], :tPERCENT, "%", [2, 3], :tINTEGER, 2, [4, 5]) end def test_percent_equals assert_scanned("a %= 2", :tIDENTIFIER, "a", [0, 1], :tOP_ASGN, "%", [2, 4], :tINTEGER, 2, [5, 6]) end def test_plus assert_scanned("1 + 1", :tINTEGER, 1, [0, 1], :tPLUS, "+", [2, 3], :tINTEGER, 1, [4, 5]) end def test_plus_equals @lex.state = :expr_end assert_scanned "+=", :tOP_ASGN, "+", [0, 2] end def test_plus_method @lex.state = :expr_fname assert_scanned "+", :tPLUS, "+", [0, 1] end def test_plus_unary_method @lex.state = :expr_fname assert_scanned "+@", :tUPLUS, "+@", [0, 2] end def test_plus_unary_number assert_scanned("+42", :tUNARY_NUM, "+", [0, 1], :tINTEGER, 42, [1, 3]) end def test_plus_unary_whitespace_number assert_scanned("+ 42", :tUNARY_NUM, "+", [0, 1], :tINTEGER, 42, [2, 4]) end def test_numbers assert_scanned "0b10", :tINTEGER, 2, [0, 4] assert_scanned "0B10", :tINTEGER, 2, [0, 4] assert_scanned "0d10", :tINTEGER, 10, [0, 4] assert_scanned "0D10", :tINTEGER, 10, [0, 4] assert_scanned "0x10", :tINTEGER, 16, [0, 4] assert_scanned "0X10", :tINTEGER, 16, [0, 4] assert_scanned "0o10", :tINTEGER, 8, [0, 4] assert_scanned "0O10", :tINTEGER, 8, [0, 4] assert_scanned "0o", :tINTEGER, 0, [0, 2] assert_scanned "0O", :tINTEGER, 0, [0, 2] assert_scanned "0o", :tINTEGER, 0, [0, 2] assert_scanned "0O", :tINTEGER, 0, [0, 2] assert_scanned "0777_333", :tINTEGER, 261851, [0, 8] assert_scanned "0", :tINTEGER, 0, [0, 1] refute_scanned "0x" refute_scanned "0X" refute_scanned "0b" refute_scanned "0B" refute_scanned "0d" refute_scanned "0D" refute_scanned "08" refute_scanned "09" refute_scanned "0o8" refute_scanned "0o9" refute_scanned "0O8" refute_scanned "0O9" refute_scanned "1_e1" refute_scanned "1_.1" refute_scanned "1__1" refute_scanned "1end" refute_scanned "1.1end" end def test_question__18 setup_lexer 18 assert_scanned "?*", :tINTEGER, 42, [0, 2] end def test_question__19 setup_lexer 19 assert_scanned "?*", :tCHARACTER, "*", [0, 2] end def test_question_bad_eos refute_scanned "?" end def test_question_bad_ws assert_scanned "? ", :tEH, "?", [0, 1] assert_scanned "?\n", :tEH, "?", [0, 1] assert_scanned "?\t", :tEH, "?", [0, 1] assert_scanned "?\v", :tEH, "?", [0, 1] assert_scanned "?\r", :tEH, "?", [0, 1] assert_scanned "?\f", :tEH, "?", [0, 1] end def test_question_ws_backslashed__18 setup_lexer 18 @lex.state = :expr_beg assert_scanned "?\\ ", :tINTEGER, 32, [0, 3] @lex.state = :expr_beg assert_scanned "?\\n", :tINTEGER, 10, [0, 3] @lex.state = :expr_beg assert_scanned "?\\t", :tINTEGER, 9, [0, 3] @lex.state = :expr_beg assert_scanned "?\\v", :tINTEGER, 11, [0, 3] @lex.state = :expr_beg assert_scanned "?\\r", :tINTEGER, 13, [0, 3] @lex.state = :expr_beg assert_scanned "?\\f", :tINTEGER, 12, [0, 3] end def test_question_ws_backslashed__19 setup_lexer 19 @lex.state = :expr_beg assert_scanned "?\\ ", :tCHARACTER, " ", [0, 3] @lex.state = :expr_beg assert_scanned "?\\n", :tCHARACTER, "\n", [0, 3] @lex.state = :expr_beg assert_scanned "?\\t", :tCHARACTER, "\t", [0, 3] @lex.state = :expr_beg assert_scanned "?\\v", :tCHARACTER, "\v", [0, 3] @lex.state = :expr_beg assert_scanned "?\\r", :tCHARACTER, "\r", [0, 3] @lex.state = :expr_beg assert_scanned "?\\f", :tCHARACTER, "\f", [0, 3] end def test_rbracket assert_scanned "]", :tRBRACK, "]", [0, 1] end def test_rcurly assert_scanned "}", :tRCURLY, "}", [0, 1] end def test_regexp assert_scanned("/regexp/", :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regexp", [1, 7], :tSTRING_END, "/", [7, 8], :tREGEXP_OPT, "", [8, 8]) end def test_regexp_ambiguous assert_scanned("method /regexp/", :tIDENTIFIER, "method", [0, 6], :tREGEXP_BEG, "/", [7, 8], :tSTRING_CONTENT, "regexp", [8, 14], :tSTRING_END, "/", [14, 15], :tREGEXP_OPT, "", [15, 15]) end def test_regexp_bad refute_scanned("/.*/xyz", :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, ".*", [1, 3], :tSTRING_END, "/", [3, 4]) end def test_regexp_escape_C assert_scanned('/regex\\C-x/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\C-x", [1, 10], :tSTRING_END, "/", [10, 11], :tREGEXP_OPT, "", [11, 11]) end def test_regexp_escape_C_M assert_scanned('/regex\\C-\\M-x/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\C-\\M-x", [1, 13], :tSTRING_END, "/", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_C_M_craaaazy assert_scanned("/regex\\C-\\\n\\M-x/", :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\C-\\M-x", [1, 15], :tSTRING_END, "/", [15, 16], :tREGEXP_OPT, "", [16, 16]) end def test_regexp_escape_C_bad_dash refute_scanned '/regex\\Cx/', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_C_bad_dash_eos refute_scanned '/regex\\C-/', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_C_bad_dash_eos2 refute_scanned '/regex\\C-', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_C_bad_eos refute_scanned '/regex\\C/', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_C_bad_eos2 refute_scanned '/regex\\c', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_M assert_scanned('/regex\\M-x/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\M-x", [1, 10], :tSTRING_END, "/", [10, 11], :tREGEXP_OPT, "", [11, 11]) end def test_regexp_escape_M_C assert_scanned('/regex\\M-\\C-x/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\M-\\C-x", [1, 13], :tSTRING_END, "/", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_M_bad_dash refute_scanned '/regex\\Mx/', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_M_bad_dash_eos refute_scanned '/regex\\M-/', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_M_bad_dash_eos2 refute_scanned '/regex\\M-', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_M_bad_eos refute_scanned '/regex\\M/', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_backslash_slash assert_scanned('/\\//', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, '/', [1, 3], :tSTRING_END, "/", [3, 4], :tREGEXP_OPT, "", [4, 4]) end def test_regexp_escape_backslash_terminator assert_scanned('%r%blah\\%blah%', :tREGEXP_BEG, "%r%", [0, 3], :tSTRING_CONTENT, "blah%blah", [3, 13], :tSTRING_END, "%", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_backslash_terminator_meta1 assert_scanned('%r{blah\\}blah}', :tREGEXP_BEG, "%r{", [0, 3], :tSTRING_CONTENT, "blah\\}blah", [3, 13], :tSTRING_END, "}", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_backslash_terminator_meta2 assert_scanned('%r/blah\\/blah/', :tREGEXP_BEG, "%r/", [0, 3], :tSTRING_CONTENT, "blah/blah", [3, 13], :tSTRING_END, "/", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_backslash_terminator_meta3 assert_scanned('%r/blah\\%blah/', :tREGEXP_BEG, "%r/", [0, 3], :tSTRING_CONTENT, "blah\\%blah", [3, 13], :tSTRING_END, "/", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_bad_eos refute_scanned '/regex\\', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_bs assert_scanned('/regex\\\\regex/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\\\regex", [1, 13], :tSTRING_END, "/", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_c assert_scanned('/regex\\cxxx/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\cxxx", [1, 11], :tSTRING_END, "/", [11, 12], :tREGEXP_OPT, "", [12, 12]) end def test_regexp_escape_c_backslash assert_scanned('/regex\\c\\n/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\c\\n", [1, 10], :tSTRING_END, "/", [10, 11], :tREGEXP_OPT, "", [11, 11]) end def test_regexp_escape_chars assert_scanned('/re\\tge\\nxp/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "re\\tge\\nxp", [1, 11], :tSTRING_END, "/", [11, 12], :tREGEXP_OPT, "", [12, 12]) end def test_regexp_escape_double_backslash assert_scanned('/[\\/\\\\]$/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT,'[/\\\\]$', [1, 8], :tSTRING_END, "/", [8, 9], :tREGEXP_OPT, "", [9, 9]) end def test_regexp_escape_hex assert_scanned('/regex\\x61xp/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\x61xp", [1, 12], :tSTRING_END, "/", [12, 13], :tREGEXP_OPT, "", [13, 13]) end def test_regexp_escape_hex_bad refute_scanned '/regex\\xzxp/', :tREGEXP_BEG, "/", [0, 1] end def test_regexp_escape_hex_one assert_scanned('/^[\\xd\\xa]{2}/on', :tREGEXP_BEG, '/', [0, 1], :tSTRING_CONTENT, '^[\\xd\\xa]{2}', [1, 13], :tSTRING_END, "/", [13, 14], :tREGEXP_OPT, 'on', [14, 16]) end def test_regexp_escape_oct1 assert_scanned('/regex\\0xp/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\0xp", [1, 10], :tSTRING_END, "/", [10, 11], :tREGEXP_OPT, "", [11, 11]) end def test_regexp_escape_oct2 assert_scanned('/regex\\07xp/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\07xp", [1, 11], :tSTRING_END, "/", [11, 12], :tREGEXP_OPT, "", [12, 12]) end def test_regexp_escape_oct3 assert_scanned('/regex\\10142/', :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regex\\10142", [1, 12], :tSTRING_END, "/", [12, 13], :tREGEXP_OPT, "", [13, 13]) end def test_regexp_escape_return assert_scanned("/regex\\\nregex/", :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "regexregex", [1, 13], :tSTRING_END, "/", [13, 14], :tREGEXP_OPT, "", [14, 14]) end def test_regexp_escape_delimiter_meta assert_scanned("%r(\\))", :tREGEXP_BEG, "%r(", [0, 3], :tSTRING_CONTENT, "\\)", [3, 5], :tSTRING_END, ")", [5, 6], :tREGEXP_OPT, "", [6, 6]) end def test_regexp_escape_delimiter_nonmeta assert_scanned("%r'\\''", :tREGEXP_BEG, "%r'", [0, 3], :tSTRING_CONTENT, "'", [3, 5], :tSTRING_END, "'", [5, 6], :tREGEXP_OPT, "", [6, 6]) end def test_regexp_escape_other_meta assert_scanned("/\\.\\$\\*\\+\\.\\?\\|/", :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, "\\.\\$\\*\\+\\.\\?\\|", [1, 15], :tSTRING_END, "/", [15, 16], :tREGEXP_OPT, "", [16, 16]) end def test_regexp_nm assert_scanned("/.*/nm", :tREGEXP_BEG, "/", [0, 1], :tSTRING_CONTENT, ".*", [1, 3], :tSTRING_END, "/", [3, 4], :tREGEXP_OPT, "nm", [4, 6]) end def test_rparen assert_scanned ")", :tRPAREN, ")", [0, 1] end def test_rshft assert_scanned("a >> 2", :tIDENTIFIER, "a", [0, 1], :tRSHFT, ">>", [2, 4], :tINTEGER, 2, [5, 6]) end def test_rshft_equals assert_scanned("a >>= 2", :tIDENTIFIER, "a", [0, 1], :tOP_ASGN, ">>", [2, 5], :tINTEGER, 2, [6, 7]) end def test_star assert_scanned("a * ", :tIDENTIFIER, "a", [0, 1], :tSTAR2, "*", [2, 3]) assert_equal :expr_value, @lex.state end def test_star2 assert_scanned("a ** ", :tIDENTIFIER, "a", [0, 1], :tPOW, "**", [2, 4]) assert_equal :expr_value, @lex.state end def test_star2_equals assert_scanned("a **= ", :tIDENTIFIER, "a", [0, 1], :tOP_ASGN, "**", [2, 5]) assert_equal :expr_beg, @lex.state end def test_star2_beg assert_scanned("** ", :tDSTAR, "**", [0, 2]) assert_equal :expr_beg, @lex.state end def test_star_arg @lex.state = :expr_arg assert_scanned(" *a", :tSTAR, "*", [1, 2], :tIDENTIFIER, "a", [2, 3]) assert_equal :expr_arg, @lex.state end def test_star_arg_beg @lex.state = :expr_beg assert_scanned("*a", :tSTAR, "*", [0, 1], :tIDENTIFIER, "a", [1, 2]) assert_equal :expr_arg, @lex.state end def test_star_arg_beg_fname @lex.state = :expr_fname assert_scanned("*a", :tSTAR2, "*", [0, 1], :tIDENTIFIER, "a", [1, 2]) assert_equal :expr_arg, @lex.state end def test_star_equals assert_scanned("a *= ", :tIDENTIFIER, "a", [0, 1], :tOP_ASGN, "*", [2, 4]) assert_equal :expr_beg, @lex.state end def test_string_bad_eos refute_scanned('%', :tSTRING_BEG, '%', [0, 1]) end def test_string_bad_eos_quote refute_scanned('%{nest', :tSTRING_BEG, '%}', [0, 2]) end def test_string_double assert_scanned('"string"', :tSTRING, "string", [0, 8]) end def test_string_double_escape_C assert_scanned('"\\C-a"', :tSTRING, "\001", [0, 6]) end def test_string_double_escape_C_backslash assert_scanned('"\\C-\\\\"', :tSTRING, "\034", [0, 7]) end def test_string_double_escape_C_escape assert_scanned('"\\C-\\M-a"', :tSTRING, "\201", [0, 9]) end def test_string_double_escape_C_question assert_scanned('"\\C-?"', :tSTRING, "\177", [0, 6]) end def test_string_double_escape_M assert_scanned('"\\M-a"', :tSTRING, "\341", [0, 6]) end def test_string_double_escape_M_backslash assert_scanned('"\\M-\\\\"', :tSTRING, "\334", [0, 7]) end def test_string_double_escape_M_escape assert_scanned('"\\M-\\C-a"', :tSTRING, "\201", [0, 9]) end def test_string_double_escape_bs1 assert_scanned('"a\\a\\a"', :tSTRING, "a\a\a", [0, 7]) end def test_string_double_escape_bs2 assert_scanned('"a\\\\a"', :tSTRING, "a\\a", [0, 6]) end def test_string_double_escape_c assert_scanned('"\\ca"', :tSTRING, "\001", [0, 5]) end def test_string_double_escape_c_escape assert_scanned('"\\c\\M-a"', :tSTRING, "\201", [0, 8]) end def test_string_double_escape_c_question assert_scanned('"\\c?"', :tSTRING, "\177", [0, 5]) end def test_string_double_escape_chars assert_scanned('"s\\tri\\ng"', :tSTRING, "s\tri\ng", [0, 10]) end def test_string_double_escape_hex assert_scanned('"n = \\x61\\x62\\x63"', :tSTRING, "n = abc", [0, 18]) end def test_string_double_escape_octal assert_scanned('"n = \\101\\102\\103"', :tSTRING, "n = ABC", [0, 18]) end def test_string_double_escape_octal_wrap assert_scanned('"\\753"', :tSTRING, "\xEB", [0, 6]) end def test_string_double_interp assert_scanned("\"blah #x a \#@a b \#$b c \#{3} # \"", :tSTRING_BEG, "\"", [0, 1], :tSTRING_CONTENT, "blah #x a ", [1, 11], :tSTRING_DVAR, nil, [11, 12], :tIVAR, "@a", [12, 14], :tSTRING_CONTENT, " b ", [14, 17], :tSTRING_DVAR, nil, [17, 18], :tGVAR, "$b", [18, 20], :tSTRING_CONTENT, " c ", [20, 23], :tSTRING_DBEG, '#{', [23, 25], :tINTEGER, 3, [25, 26], :tRCURLY, "}", [26, 27], :tSTRING_CONTENT, " # ", [27, 30], :tSTRING_END, "\"", [30, 31]) end def test_string_double_interp_label assert_scanned('"#{foo:bar}"', :tSTRING_BEG, '"', [0, 1], :tSTRING_DBEG, '#{', [1, 3], :tIDENTIFIER, 'foo', [3, 6], :tSYMBOL, 'bar', [6, 10], :tRCURLY, '}', [10, 11], :tSTRING_END, '"', [11, 12]) end def test_string_double_nested_curlies assert_scanned('%{nest{one{two}one}nest}', :tSTRING_BEG, '%{', [0, 2], :tSTRING_CONTENT, "nest{one{two}one}nest", [2, 23], :tSTRING_END, '}', [23, 24]) end def test_string_double_no_interp assert_scanned("\"# blah\"", # pound first :tSTRING, "# blah", [0, 8]) assert_scanned("\"blah # blah\"", # pound not first :tSTRING, "blah # blah", [0, 13]) end def test_string_escape_x_single assert_scanned('"\\x0"', :tSTRING, "\000", [0, 5]) end def test_string_pct_Q assert_scanned("%Q[s1 s2]", :tSTRING_BEG, '%Q[', [0, 3], :tSTRING_CONTENT, "s1 s2", [3, 8], :tSTRING_END, ']', [8, 9]) end def test_string_pct_W assert_scanned("%W[s1 s2\ns3]", :tWORDS_BEG, "%W[", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "s2", [6, 8], :tSPACE, nil, [8, 9], :tSTRING_CONTENT, "s3", [9, 11], :tSPACE, nil, [11, 11], :tSTRING_END, ']', [11, 12]) end def test_string_pct_W_bs_nl assert_scanned("%W[s1 \\\ns2]", :tWORDS_BEG, "%W[", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "\ns2", [6, 10], :tSPACE, nil, [10, 10], :tSTRING_END, ']', [10, 11]) end def test_string_pct_W_interp assert_scanned('%W[#{1}#{2} #@a]', :tWORDS_BEG, '%W[', [0, 3], :tSTRING_DBEG, '#{', [3, 5], :tINTEGER, 1, [5, 6], :tRCURLY, '}', [6, 7], :tSTRING_DBEG, '#{', [7, 9], :tINTEGER, 2, [9, 10], :tRCURLY, '}', [10, 11], :tSPACE, nil, [11, 12], :tSTRING_DVAR, nil, [12, 13], :tIVAR, '@a', [13, 15], :tSPACE, nil, [15, 15], :tSTRING_END, ']', [15, 16]) end def test_string_pct_I assert_scanned("%I(s1 s2)", :tSYMBOLS_BEG, "%I(", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "s2", [6, 8], :tSPACE, nil, [8, 8], :tSTRING_END, ')', [8, 9]) end def test_string_pct_angle assert_scanned("%", :tSTRING_BEG, '%<', [0, 2], :tSTRING_CONTENT, "blah", [2, 6], :tSTRING_END, '>', [6, 7]) end def test_string_pct_pct assert_scanned("%%blah%", :tSTRING_BEG, '%%', [0, 2], :tSTRING_CONTENT, "blah", [2, 6], :tSTRING_END, '%', [6, 7]) end def test_string_pct_null assert_scanned("%\0blah\0", :tSTRING_BEG, "%\0", [0, 2], :tSTRING_CONTENT, "blah", [2, 6], :tSTRING_END, "\0", [6, 7]) end def test_string_pct_non_ascii refute_scanned("%★foo★") end def test_string_pct_alphabet refute_scanned("%AfooA") end def test_string_pct_number refute_scanned("%1foo1") end def test_string_pct_w assert_scanned("%w[s1 s2 ]", :tQWORDS_BEG, "%w[", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "s2", [6, 8], :tSPACE, nil, [8, 9], :tSTRING_END, "]", [9, 10]) end def test_string_pct_w_incomplete refute_scanned("%w[s1 ", :tQWORDS_BEG, "%w[", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6]) end def test_string_pct_w_bs_nl assert_scanned("%w[s1 \\\ns2]", :tQWORDS_BEG, "%w[", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "\ns2", [6, 10], :tSPACE, nil, [10, 10], :tSTRING_END, ']', [10, 11]) end def test_string_pct_w_bs_sp assert_scanned("%w[s\\ 1 s\\ 2]", :tQWORDS_BEG, "%w[", [0, 3], :tSTRING_CONTENT, "s 1", [3, 7], :tSPACE, nil, [7, 8], :tSTRING_CONTENT, "s 2", [8, 12], :tSPACE, nil, [12, 12], :tSTRING_END, ']', [12, 13]) end def test_string_pct_w_tab assert_scanned("%w[abc\tdef]", :tQWORDS_BEG, "%w[", [0, 3], :tSTRING_CONTENT, "abc", [3, 6], :tSPACE, nil, [6, 7], :tSTRING_CONTENT, "def", [7, 10], :tSPACE, nil, [10, 10], :tSTRING_END, ']', [10, 11]) end def test_string_pct_w_null assert_scanned("%w\0abc\0", :tQWORDS_BEG, "%w\0", [0, 3], :tSTRING_CONTENT, "abc", [3, 6], :tSPACE, nil, [6, 6], :tSTRING_END, "\0", [6, 7]) end def test_string_pct_w_non_ascii refute_scanned("%w★foo★") end def test_string_pct_w_alphabet refute_scanned("%wAfooA") end def test_string_pct_w_number refute_scanned("%w1foo1") end def test_string_pct_i assert_scanned("%i(s1 s2)", :tQSYMBOLS_BEG, "%i(", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "s2", [6, 8], :tSPACE, nil, [8, 8], :tSTRING_END, ')', [8, 9]) end def test_string_pct_backslash assert_scanned("%\\a\\", :tSTRING_BEG, "%\\", [0, 2], :tSTRING_CONTENT, "a", [2, 3], :tSTRING_END, "\\", [3, 4]) end def test_string_pct_w_backslash assert_scanned("%w\\s1 s2 \\", :tQWORDS_BEG, "%w\\", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "s2", [6, 8], :tSPACE, nil, [8, 9], :tSTRING_END, "\\", [9, 10]) end def test_string_pct_w_backslash_nl assert_scanned("%w\\s1 s2 \\\n", :tQWORDS_BEG, "%w\\", [0, 3], :tSTRING_CONTENT, "s1", [3, 5], :tSPACE, nil, [5, 6], :tSTRING_CONTENT, "s2", [6, 8], :tSPACE, nil, [8, 9], :tSTRING_END, "\\", [9, 10], :tNL, nil, [10, 11]) end def test_string_pct_w_backslash_interp_nl assert_scanned("%W\\blah #x a \#@a b \#$b c \#{3} # \\", :tWORDS_BEG, "%W\\", [0, 3], :tSTRING_CONTENT, "blah", [3, 7], :tSPACE, nil, [7, 8], :tSTRING_CONTENT, "#x", [8, 10], :tSPACE, nil, [10, 11], :tSTRING_CONTENT, "a", [11, 12], :tSPACE, nil, [12, 13], :tSTRING_DVAR, nil, [13, 14], :tIVAR, "@a", [14, 16], :tSPACE, nil, [16, 17], :tSTRING_CONTENT, "b", [17, 18], :tSPACE, nil, [18, 19], :tSTRING_DVAR, nil, [19, 20], :tGVAR, "$b", [20, 22], :tSPACE, nil, [22, 23], :tSTRING_CONTENT, "c", [23, 24], :tSPACE, nil, [24, 25], :tSTRING_DBEG, '#{', [25, 27], :tINTEGER, 3, [27, 28], :tRCURLY, "}", [28, 29], :tSPACE, nil, [29, 30], :tSTRING_CONTENT, "#", [30, 31], :tSPACE, nil, [31, 32], :tSTRING_END, "\\", [32, 33]) end def test_string_pct_backslash_with_bad_escape # No escapes are allowed in a backslash-delimited string refute_scanned("%\\a\\n\\", :tSTRING_BEG, "%\\", [0, 2], :tSTRING_CONTENT, "a", [2, 3], :tSTRING_END, "\\", [3, 4], :tIDENTIFIER, "n", [4, 5]) end def test_string_pct_intertwined_with_heredoc assert_scanned("<<-foo + %\\a\nbar\nfoo\nb\\", :tSTRING_BEG, "<<\"", [0, 6], :tSTRING_CONTENT, "bar\n", [13, 17], :tSTRING_END, "foo", [17, 20], :tPLUS, "+", [7, 8], :tSTRING_BEG, "%\\", [9, 11], :tSTRING_CONTENT, "a\n", [11, 13], :tSTRING_CONTENT, "b", [21, 22], :tSTRING_END, "\\", [22, 23]) end def test_string_pct_q_backslash assert_scanned("%q\\a\\", :tSTRING_BEG, "%q\\", [0, 3], :tSTRING_CONTENT, "a", [3, 4], :tSTRING_END, "\\", [4, 5]) end def test_string_pct_Q_backslash assert_scanned("%Q\\a\\", :tSTRING_BEG, "%Q\\", [0, 3], :tSTRING_CONTENT, "a", [3, 4], :tSTRING_END, "\\", [4, 5]) end def test_string_single assert_scanned("'string'", :tSTRING, "string", [0, 8]) end def test_string_single_escape_chars assert_scanned("'s\\tri\\ng'", :tSTRING, "s\\tri\\ng", [0, 10]) end def test_string_single_nl assert_scanned("'blah\\\nblah'", :tSTRING_BEG, "'", [0, 1], :tSTRING_CONTENT, "blah\\\n", [1, 7], :tSTRING_CONTENT, "blah", [7, 11], :tSTRING_END, "'", [11, 12]) end def test_symbol assert_scanned(":symbol", :tSYMBOL, "symbol", [0, 7]) end def test_symbol_double assert_scanned(":\"symbol\"", :tSYMBEG, ":\"", [0, 2], :tSTRING_CONTENT, "symbol", [2, 8], :tSTRING_END, "\"", [8, 9]) end def test_symbol_single assert_scanned(":'symbol'", :tSYMBEG, ":'", [0, 2], :tSTRING_CONTENT, "symbol", [2, 8], :tSTRING_END, "'", [8, 9]) end def test_ternary assert_scanned("a ? b : c", :tIDENTIFIER, "a", [0, 1], :tEH, "?", [2, 3], :tIDENTIFIER, "b", [4, 5], :tCOLON, ":", [6, 7], :tIDENTIFIER, "c", [8, 9]) assert_scanned("a ?b : c", :tIDENTIFIER, "a", [0, 1], :tINTEGER, 98, [2, 4], :tCOLON, ":", [5, 6], :tIDENTIFIER, "c", [7, 8]) assert_scanned("a ?bb : c", # GAH! MATZ!!! :tIDENTIFIER, "a", [0, 1], :tEH, "?", [2, 3], :tIDENTIFIER, "bb", [3, 5], :tCOLON, ":", [6, 7], :tIDENTIFIER, "c", [8, 9]) assert_scanned("42 ?", # 42 forces expr_end :tINTEGER, 42, [0, 2], :tEH, "?", [3, 4]) end def test_tilde assert_scanned "~", :tTILDE, "~", [0, 1] end def test_tilde_unary @lex.state = :expr_fname assert_scanned "~@", :tTILDE, "~@", [0, 2] end def test_uminus assert_scanned("-blah", :tUMINUS, "-", [0, 1], :tIDENTIFIER, "blah", [1, 5]) end def test_underscore assert_scanned("_var", :tIDENTIFIER, "_var", [0, 4]) end def test_underscore_end assert_scanned("__END__\n") assert_scanned("__END__") assert_scanned("__END__ foo", :tIDENTIFIER, '__END__', [0, 7], :tIDENTIFIER, 'foo', [8, 11]) assert_scanned("__END__\rfoo", :tIDENTIFIER, '__END__', [0, 7], :tIDENTIFIER, 'foo', [8, 11]) end def test_uplus assert_scanned("+blah", :tUPLUS, "+", [0, 1], :tIDENTIFIER, "blah", [1, 5]) end def test_if_unless_mod assert_scanned("return if true unless false", :kRETURN, "return", [0, 6], :kIF_MOD, "if", [7, 9], :kTRUE, "true", [10, 14], :kUNLESS_MOD, "unless", [15, 21], :kFALSE, "false", [22, 27]) end def test_if_stmt assert_scanned("if true\n return end", :kIF, "if", [0, 2], :kTRUE, "true", [3, 7], :tNL, nil, [7, 8], :kRETURN, "return", [9, 15], :kEND, "end", [16, 19]) end def test_sclass_label setup_lexer 20 assert_scanned("class << a:b", :kCLASS, 'class', [0, 5], :tLSHFT, '<<', [6, 8], :tIDENTIFIER, 'a', [9, 10], :tSYMBOL, 'b', [10, 12]) end def test_fname_pct_s__22 setup_lexer 22 @lex.state = :expr_fname assert_scanned("%s(a)", :tPERCENT, '%', [0, 1], :tIDENTIFIER, 's', [1, 2], :tLPAREN2, '(', [2, 3], :tIDENTIFIER, 'a', [3, 4], :tRPAREN, ')', [4, 5]) end def test_fname_pct_s__23 setup_lexer 23 @lex.state = :expr_fname assert_scanned("%s(a)", :tSYMBEG, '%s(', [0, 3], :tSTRING_CONTENT, 'a', [3, 4], :tSTRING_END, ')', [4, 5]) end def test_static_env env = Parser::StaticEnvironment.new env.declare "a" @lex.static_env = env assert_scanned("a [42]", :tIDENTIFIER, "a", [0, 1], :tLBRACK2, "[", [2, 3], :tINTEGER, 42, [3, 5], :tRBRACK, "]", [5, 6]) end def test_int_suffix [18, 19, 20].each do |version| setup_lexer version assert_scanned("42r", :tINTEGER, 42, [0, 2], :tIDENTIFIER, 'r', [2, 3]) assert_scanned("42if", :tINTEGER, 42, [0, 2], :kIF_MOD, 'if', [2, 4]) end setup_lexer 21 assert_scanned("42r", :tRATIONAL, Rational(42), [0, 3]) assert_scanned("42i", :tIMAGINARY, Complex(0, 42), [0, 3]) assert_scanned("42ri", :tIMAGINARY, Complex(0, Rational(42)), [0, 4]) end def test_float_suffix [18, 19, 20].each do |version| setup_lexer version assert_scanned("42.1r", :tFLOAT, 42.1, [0, 4], :tIDENTIFIER, 'r', [4, 5]) assert_scanned("42.1if", :tFLOAT, 42.1, [0, 4], :kIF_MOD, 'if', [4, 6]) assert_scanned("1e1r", :tFLOAT, 1e1, [0, 3], :tIDENTIFIER, 'r', [3, 4]) end begin # Feature-check. Rational("10") setup_lexer 21 assert_scanned("42.1r", :tRATIONAL, Rational(421, 10), [0, 5]) assert_scanned("42.1i", :tIMAGINARY, Complex(0, 42.1), [0, 5]) assert_scanned("42.1ri", :tIMAGINARY, Complex(0, Rational(421, 10)), [0, 6]) assert_scanned("42.1ir", :tIMAGINARY, Complex(0, 42.1), [0, 5], :tIDENTIFIER, 'r', [5, 6]) assert_scanned("1e1i", :tIMAGINARY, Complex(0, 1e1), [0, 4]) assert_scanned("1e1r", :tFLOAT, 1e1, [0, 3], :tIDENTIFIER, 'r', [3, 4]) assert_scanned("1e1ri", :tFLOAT, 1e1, [0, 3], :tIDENTIFIER, 'ri', [3, 5]) assert_scanned("1e1ir", :tIMAGINARY, Complex(0, 1e1), [0, 4], :tIDENTIFIER, 'r', [4, 5]) rescue NoMethodError # Ruby not modern enough end end def test_eof assert_scanned("self", :kSELF, "self", [0, 4]) assert_equal([false, ["$eof", Parser::Source::Range.new(@lex.source_buffer, 4, 4)]], @lex.advance) end # # Test for 'fluent interface' # def test_fluent_dot assert_scanned("x\n.y", :tIDENTIFIER, 'x', [0, 1], :tDOT, '.', [2, 3], :tIDENTIFIER, 'y', [3, 4]) assert_scanned("x\n .y", :tIDENTIFIER, 'x', [0, 1], :tDOT, '.', [4, 5], :tIDENTIFIER, 'y', [5, 6]) assert_scanned("x # comment\n .y", :tIDENTIFIER, 'x', [0, 1], :tDOT, '.', [14, 15], :tIDENTIFIER, 'y', [15, 16]) end def test_fluent_and_dot assert_scanned("x\n&.y", :tIDENTIFIER, 'x', [0, 1], :tANDDOT, '&.', [2, 4], :tIDENTIFIER, 'y', [4, 5]) end # # Tests for whitespace. # def test_whitespace_fname @lex.state = :expr_fname assert_scanned('class', :kCLASS, 'class', [0, 5]) @lex.state = :expr_fname assert_scanned(' class', :kCLASS, 'class', [1, 6]) @lex.state = :expr_fname assert_scanned("\nclass", :kCLASS, 'class', [1, 6]) @lex.state = :expr_fname assert_scanned("\\\nclass", :kCLASS, 'class', [2, 7]) @lex.state = :expr_fname assert_scanned("#foo\nclass", :kCLASS, 'class', [5, 10]) end def test_whitespace_endfn setup_lexer(21) @lex.state = :expr_endfn assert_scanned('foo:', :tLABEL, 'foo', [0, 4]) @lex.state = :expr_endfn assert_scanned(' foo:', :tLABEL, 'foo', [1, 5]) @lex.state = :expr_endfn assert_scanned("\nfoo:", :tNL, nil, [0, 1], :tIDENTIFIER, 'foo', [1, 4], :tCOLON, ':', [4, 5]) @lex.state = :expr_endfn assert_scanned("\nfoo: ", :tNL, nil, [0, 1], :tIDENTIFIER, 'foo', [1, 4], :tCOLON, ':', [4, 5]) @lex.state = :expr_endfn assert_scanned("\\\nfoo:", :tLABEL, 'foo', [2, 6]) @lex.state = :expr_endfn assert_scanned("#foo\nfoo:", :tNL, nil, [4, 5], :tIDENTIFIER, 'foo', [5, 8], :tCOLON, ':', [8, 9]) @lex.state = :expr_endfn assert_scanned("#foo\nfoo: ", :tNL, nil, [4, 5], :tIDENTIFIER, 'foo', [5, 8], :tCOLON, ':', [8, 9]) end def test_whitespace_dot @lex.state = :expr_dot assert_scanned('class', :tIDENTIFIER, 'class', [0, 5]) @lex.state = :expr_dot assert_scanned(' class', :tIDENTIFIER, 'class', [1, 6]) @lex.state = :expr_dot assert_scanned("\nclass", :tIDENTIFIER, 'class', [1, 6]) @lex.state = :expr_dot assert_scanned("\\\nclass", :tIDENTIFIER, 'class', [2, 7]) @lex.state = :expr_dot assert_scanned("#foo\nclass", :tIDENTIFIER, 'class', [5, 10]) end def test_whitespace_arg @lex.state = :expr_arg assert_scanned('+', :tPLUS, '+', [0, 1]) @lex.state = :expr_arg assert_scanned(' +', :tUPLUS, '+', [1, 2]) @lex.state = :expr_arg assert_scanned("\n+", :tNL, nil, [0, 1], :tUPLUS, '+', [1, 2]) @lex.state = :expr_arg assert_scanned("\\\n+", :tUPLUS, '+', [2, 3]) @lex.state = :expr_arg assert_scanned("\\\n +", :tUPLUS, '+', [3, 4]) @lex.state = :expr_arg assert_scanned("#foo\n+", :tNL, nil, [4, 5], :tUPLUS, '+', [5, 6]) end def test_whitespace_endarg @lex.state = :expr_endarg assert_scanned('{', :tLBRACE_ARG, '{', [0, 1]) @lex.state = :expr_endarg assert_scanned(' {', :tLBRACE_ARG, '{', [1, 2]) @lex.state = :expr_endarg assert_scanned("\n{", :tNL, nil, [0, 1], :tLBRACE, '{', [1, 2]) @lex.state = :expr_endarg assert_scanned("\\\n{", :tLBRACE_ARG, '{', [2, 3]) @lex.state = :expr_endarg assert_scanned("#foo\n{", :tNL, nil, [4, 5], :tLBRACE, '{', [5, 6]) end def test_whitespace_mid @lex.state = :expr_mid assert_scanned('+', :tUPLUS, '+', [0, 1]) @lex.state = :expr_mid assert_scanned(' +', :tUPLUS, '+', [1, 2]) @lex.state = :expr_mid assert_scanned("\n+", :tNL, nil, [0, 1], :tUPLUS, '+', [1, 2]) @lex.state = :expr_mid assert_scanned("\\\n+", :tUPLUS, '+', [2, 3]) @lex.state = :expr_mid assert_scanned("#foo\n+", :tNL, nil, [4, 5], :tUPLUS, '+', [5, 6]) end def test_whitespace_beg @lex.state = :expr_beg assert_scanned('+', :tUPLUS, '+', [0, 1]) @lex.state = :expr_beg assert_scanned(' +', :tUPLUS, '+', [1, 2]) @lex.state = :expr_beg assert_scanned("\n+", :tUPLUS, '+', [1, 2]) @lex.state = :expr_beg assert_scanned("\\\n+", :tUPLUS, '+', [2, 3]) @lex.state = :expr_beg assert_scanned("#foo\n+", :tUPLUS, '+', [5, 6]) end def test_whitespace_value setup_lexer(20) @lex.state = :expr_value assert_scanned('a:b', :tIDENTIFIER, 'a', [0, 1], :tSYMBOL, 'b', [1, 3]) @lex.state = :expr_value assert_scanned(' a:b', :tIDENTIFIER, 'a', [1, 2], :tSYMBOL, 'b', [2, 4]) @lex.state = :expr_value assert_scanned("\na:b", :tIDENTIFIER, 'a', [1, 2], :tSYMBOL, 'b', [2, 4]) @lex.state = :expr_value assert_scanned("\\\na:b", :tIDENTIFIER, 'a', [2, 3], :tSYMBOL, 'b', [3, 5]) @lex.state = :expr_value assert_scanned("#foo\na:b", :tIDENTIFIER, 'a', [5, 6], :tSYMBOL, 'b', [6, 8]) end def test_whitespace_end @lex.state = :expr_end assert_scanned('+ 1', :tPLUS, '+', [0, 1], :tINTEGER, 1, [2, 3]) @lex.state = :expr_end assert_scanned(' + 1', :tPLUS, '+', [1, 2], :tINTEGER, 1, [3, 4]) @lex.state = :expr_end assert_scanned("\n+ 1", :tNL, nil, [0, 1], :tUNARY_NUM, '+', [1, 2], :tINTEGER, 1, [3, 4]) @lex.state = :expr_end assert_scanned("\\\n+ 1", :tPLUS, '+', [2, 3], :tINTEGER, 1, [4, 5]) @lex.state = :expr_end assert_scanned("#foo\n+ 1", :tNL, nil, [4, 5], :tUNARY_NUM, '+', [5, 6], :tINTEGER, 1, [7, 8]) end def test_whitespace_cr setup_lexer(20) assert_scanned("<", :kRESCUE, 'rescue', [0, 6], :tASSOC, '=>', [6, 8]) end def test_bug_expr_arg_percent @lex.state = :expr_arg assert_scanned("%[", :tPERCENT, "%", [0, 1], :tLBRACK, "[", [1, 2]) @lex.state = :expr_arg assert_scanned("%=1", :tOP_ASGN, "%", [0, 2], :tINTEGER, 1, [2, 3]) @lex.state = :expr_arg assert_scanned(" %[1]", :tSTRING_BEG, "%[", [1, 3], :tSTRING_CONTENT, '1', [3, 4], :tSTRING_END, ']', [4, 5]) @lex.state = :expr_arg assert_scanned(" %=1=", :tOP_ASGN, "%", [1, 3], :tINTEGER, 1, [3, 4], :tEQL, "=", [4, 5]) @lex.state = :expr_arg assert_scanned(" %\n", :tPERCENT, '%', [1, 2]) end def test_bug_expr_arg_lt_lt @lex.state = :expr_arg assert_scanned("< e message = e.message end assert_diagnoses( [:error, :invalid_regexp, {:message => message}], %q[/?/], %q(~~~ location), SINCE_1_9) assert_diagnoses( [:error, :invalid_regexp, {:message => message}], %q[/#{""}?/], %q(~~~~~~~~ location), SINCE_1_9) end def test_regexp_error_invalid_encoding_conversion message = if defined?(JRUBY_VERSION) '"\\xE3\\x81\\x82" from UTF-8 to ASCII-8BIT' else 'U+3042 from UTF-8 to ASCII-8BIT' end assert_diagnoses( [:error, :invalid_regexp, { message: message }], %q[/あ/n], %q(~~~ location)) end # Arrays def test_array_plain assert_parses( s(:array, s(:int, 1), s(:int, 2)), %q{[1, 2]}, %q{^ begin | ^ end |~~~~~~ expression}) end def test_array_splat assert_parses( s(:array, s(:int, 1), s(:splat, s(:lvar, :foo)), s(:int, 2)), %q{[1, *foo, 2]}, %q{^ begin | ^ end | ^ operator (splat) | ~~~~ expression (splat) |~~~~~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:array, s(:int, 1), s(:splat, s(:lvar, :foo))), %q{[1, *foo]}, %q{^ begin | ^ end | ^ operator (splat) | ~~~~ expression (splat) |~~~~~~~~~ expression}) assert_parses( s(:array, s(:splat, s(:lvar, :foo))), %q{[*foo]}) end def test_array_assocs assert_parses( s(:array, s(:hash, s(:pair, s(:int, 1), s(:int, 2)))), %q{[ 1 => 2 ]}, %q{ ~~ operator (hash.pair) | ~~~~~~ expression (hash.pair) | ~~~~~~ expression (hash)}) assert_parses( s(:array, s(:int, 1), s(:hash, s(:pair, s(:int, 2), s(:int, 3)))), %q{[ 1, 2 => 3 ]}, %q{}, SINCE_1_9) end def test_array_words assert_parses( s(:array, s(:str, 'foo'), s(:str, 'bar')), %q{%w[foo bar]}, %q{^^^ begin | ^ end | ~~~ expression (str) |~~~~~~~~~~~ expression}) end def test_array_words_interp assert_parses( s(:array, s(:str, 'foo'), s(:dstr, s(:begin, s(:lvar, :bar)))), %q{%W[foo #{bar}]}, %q{^^^ begin | ^^ begin (dstr.begin) | ^ end (dstr.begin) | ~~~~~~ expression (dstr.begin) | ^ end | ~~~ expression (str) | ~~~ expression (dstr.begin.lvar) |~~~~~~~~~~~~~~ expression}) assert_parses( s(:array, s(:str, 'foo'), s(:dstr, s(:begin, s(:lvar, :bar)), s(:str, 'foo'), s(:ivar, :@baz))), %q{%W[foo #{bar}foo#@baz]}) end def test_array_words_empty assert_parses( s(:array), %q{%w[]}, %q{^^^ begin | ^ end |~~~~ expression}) assert_parses( s(:array), %q{%W()}) end def test_array_symbols assert_parses( s(:array, s(:sym, :foo), s(:sym, :bar)), %q{%i[foo bar]}, %q{^^^ begin | ^ end | ~~~ expression (sym) |~~~~~~~~~~~ expression}, SINCE_2_0) end def test_array_symbols_interp assert_parses( s(:array, s(:sym, :foo), s(:dsym, s(:begin, s(:lvar, :bar)))), %q{%I[foo #{bar}]}, %q{^^^ begin | ^ end | ~~~ expression (sym) | ^^ begin (dsym.begin) | ^ end (dsym.begin) | ~~~~~~ expression (dsym.begin) | ~~~ expression (dsym.begin.lvar) |~~~~~~~~~~~~~~ expression}, SINCE_2_0) assert_parses( s(:array, s(:dsym, s(:str, 'foo'), s(:begin, s(:lvar, :bar)))), %q{%I[foo#{bar}]}, %q{}, SINCE_2_0) end def test_array_symbols_empty assert_parses( s(:array), %q{%i[]}, %q{^^^ begin | ^ end |~~~~ expression}, SINCE_2_0) assert_parses( s(:array), %q{%I()}, %q{}, SINCE_2_0) end # Hashes def test_hash_empty assert_parses( s(:hash), %q[{ }], %q{^ begin | ^ end |~~~ expression}) end def test_hash_hashrocket assert_parses( s(:hash, s(:pair, s(:int, 1), s(:int, 2))), %q[{ 1 => 2 }], %q{^ begin | ^ end | ^^ operator (pair) | ~~~~~~ expression (pair) |~~~~~~~~~~ expression}) assert_parses( s(:hash, s(:pair, s(:int, 1), s(:int, 2)), s(:pair, s(:sym, :foo), s(:str, 'bar'))), %q[{ 1 => 2, :foo => "bar" }]) end def test_hash_label assert_parses( s(:hash, s(:pair, s(:sym, :foo), s(:int, 2))), %q[{ foo: 2 }], %q{^ begin | ^ end | ^ operator (pair) | ~~~ expression (pair.sym) | ~~~~~~ expression (pair) |~~~~~~~~~~ expression}, SINCE_1_9) end def test_hash_label_end assert_parses( s(:hash, s(:pair, s(:sym, :foo), s(:int, 2))), %q[{ 'foo': 2 }], %q{^ begin | ^ end | ^ operator (pair) | ^ begin (pair.sym) | ^ end (pair.sym) | ~~~~~ expression (pair.sym) | ~~~~~~~~ expression (pair) |~~~~~~~~~~~~ expression}, SINCE_2_2) assert_parses( s(:hash, s(:pair, s(:sym, :foo), s(:int, 2)), s(:pair, s(:sym, :bar), s(:hash))), %q[{ 'foo': 2, 'bar': {}}], %q{}, SINCE_2_2) assert_parses( s(:send, nil, :f, s(:if, s(:send, nil, :a), s(:str, "a"), s(:int, 1))), %q{f(a ? "a":1)}, %q{}, SINCE_2_2) end def test_hash_kwsplat assert_parses( s(:hash, s(:pair, s(:sym, :foo), s(:int, 2)), s(:kwsplat, s(:lvar, :bar))), %q[{ foo: 2, **bar }], %q{ ^^ operator (kwsplat) | ~~~~~ expression (kwsplat)}, SINCE_2_0) end def test_hash_no_hashrocket assert_parses( s(:hash, s(:pair, s(:int, 1), s(:int, 2))), %q[{ 1, 2 }], %q{^ begin | ^ end | ~~~~ expression (pair) |~~~~~~~~ expression}, %w(1.8)) end def test_hash_no_hashrocket_odd assert_diagnoses( [:error, :odd_hash], %q[{ 1, 2, 3 }], %q( ~ location), %w(1.8)) end # Range def test_range_inclusive assert_parses( s(:irange, s(:int, 1), s(:int, 2)), %q{1..2}, %q{ ~~ operator |~~~~ expression}) end def test_range_exclusive assert_parses( s(:erange, s(:int, 1), s(:int, 2)), %q{1...2}, %q{ ~~~ operator |~~~~~ expression}) end def test_range_endless assert_parses( s(:irange, s(:int, 1), nil), %q{1..}, %q{~~~ expression | ~~ operator}, SINCE_2_6) assert_parses( s(:erange, s(:int, 1), nil), %q{1...}, %q{~~~~ expression | ~~~ operator}, SINCE_2_6) end def test_beginless_range_before_27 assert_diagnoses( [:error, :unexpected_token, { :token => 'tDOT2' }], %q{..42}, %q{^^ location}, ALL_VERSIONS - SINCE_2_7 ) assert_diagnoses( [:error, :unexpected_token, { :token => 'tDOT3' }], %q{...42}, %q{^^^ location}, ALL_VERSIONS - SINCE_2_7 ) end def test_beginless_range assert_parses( s(:irange, nil, s(:int, 100)), %q{..100}, %q{~~~~~ expression |~~ operator}, SINCE_2_7 ) assert_parses( s(:erange, nil, s(:int, 100)), %q{...100}, %q{~~~~~~ expression |~~~ operator}, SINCE_2_7 ) end def test_beginless_irange_after_newline assert_parses( s(:begin, s(:lvar, :foo), s(:irange, nil, s(:int, 100))), %Q{foo\n..100}, %q{}, SINCE_2_7 ) end def test_beginless_erange_after_newline assert_parses( s(:begin, s(:lvar, :foo), s(:erange, nil, s(:int, 100))), %Q{foo\n...100}, %q{}, SINCE_2_7 ) end # # Access # # Variables and pseudovariables def test_self assert_parses( s(:self), %q{self}, %q{~~~~ expression}) end def test_lvar assert_parses( s(:lvar, :foo), %q{foo}, %q{~~~ expression}) end def test_ivar assert_parses( s(:ivar, :@foo), %q{@foo}, %q{~~~~ expression}) end def test_cvar assert_parses( s(:cvar, :@@foo), %q{@@foo}, %q{~~~~~ expression}) end def test_gvar assert_parses( s(:gvar, :$foo), %q{$foo}, %q{~~~~ expression}) end def test_gvar_dash_empty assert_diagnoses( [:fatal, :unexpected, { :character => '$' }], %q{$- }, %q{^ location}, %w(2.1)) end def test_back_ref assert_parses( s(:back_ref, :$+), %q{$+}, %q{~~ expression}) end def test_nth_ref assert_parses( s(:nth_ref, 10), %q{$10}, %q{~~~ expression}) end # Constants def test_const_toplevel assert_parses( s(:const, s(:cbase), :Foo), %q{::Foo}, %q{ ~~~ name |~~ double_colon |~~~~~ expression}) end def test_const_scoped assert_parses( s(:const, s(:const, nil, :Bar), :Foo), %q{Bar::Foo}, %q{ ~~~ name | ~~ double_colon |~~~~~~~~ expression}) end def test_const_unscoped assert_parses( s(:const, nil, :Foo), %q{Foo}, %q{~~~ name |~~~ expression}) end def test___ENCODING__ assert_parses( s(:__ENCODING__), %q{__ENCODING__}, %q{~~~~~~~~~~~~ expression}, SINCE_1_9) end def test___ENCODING___legacy_ Parser::Builders::Default.emit_encoding = false assert_parses( s(:const, s(:const, nil, :Encoding), :UTF_8), %q{__ENCODING__}, %q{~~~~~~~~~~~~ expression}, SINCE_1_9) ensure Parser::Builders::Default.emit_encoding = true end # defined? def test_defined assert_parses( s(:defined?, s(:lvar, :foo)), %q{defined? foo}, %q{~~~~~~~~ keyword |~~~~~~~~~~~~ expression}) assert_parses( s(:defined?, s(:lvar, :foo)), %q{defined?(foo)}, %q{~~~~~~~~ keyword | ^ begin | ^ end |~~~~~~~~~~~~~ expression}) assert_parses( s(:defined?, s(:ivar, :@foo)), %q{defined? @foo}) end # # Assignment # # Variables def test_lvasgn assert_parses( s(:begin, s(:lvasgn, :var, s(:int, 10)), s(:lvar, :var)), %q{var = 10; var}, %q{~~~ name (lvasgn) | ^ operator (lvasgn) |~~~~~~~~ expression (lvasgn) }) end def test_ivasgn assert_parses( s(:ivasgn, :@var, s(:int, 10)), %q{@var = 10}, %q{~~~~ name | ^ operator |~~~~~~~~~ expression }) end def test_cvasgn assert_parses( s(:cvasgn, :@@var, s(:int, 10)), %q{@@var = 10}, %q{~~~~~ name | ^ operator |~~~~~~~~~~ expression }) end def test_gvasgn assert_parses( s(:gvasgn, :$var, s(:int, 10)), %q{$var = 10}, %q{~~~~ name | ^ operator |~~~~~~~~~ expression }) end def test_asgn_cmd assert_parses( s(:lvasgn, :foo, s(:send, nil, :m, s(:lvar, :foo))), %q{foo = m foo}) assert_parses( s(:lvasgn, :foo, s(:lvasgn, :bar, s(:send, nil, :m, s(:lvar, :foo)))), %q{foo = bar = m foo}, %q{}, ALL_VERSIONS - %w(1.8 mac ios)) end def test_asgn_keyword_invalid assert_diagnoses( [:error, :invalid_assignment], %q{nil = foo}, %q{~~~ location}) assert_diagnoses( [:error, :invalid_assignment], %q{self = foo}, %q{~~~~ location}) assert_diagnoses( [:error, :invalid_assignment], %q{true = foo}, %q{~~~~ location}) assert_diagnoses( [:error, :invalid_assignment], %q{false = foo}, %q{~~~~~ location}) assert_diagnoses( [:error, :invalid_assignment], %q{__FILE__ = foo}, %q{~~~~~~~~ location}) assert_diagnoses( [:error, :invalid_assignment], %q{__LINE__ = foo}, %q{~~~~~~~~ location}) end def test_asgn_backref_invalid assert_diagnoses( [:error, :backref_assignment], %q{$1 = foo}, %q{~~ location}) end # Constants def test_casgn_toplevel assert_parses( s(:casgn, s(:cbase), :Foo, s(:int, 10)), %q{::Foo = 10}, %q{ ~~~ name | ^ operator |~~ double_colon |~~~~~~~~~~ expression }) end def test_casgn_scoped assert_parses( s(:casgn, s(:const, nil, :Bar), :Foo, s(:int, 10)), %q{Bar::Foo = 10}, %q{ ~~~ name | ^ operator | ~~ double_colon |~~~~~~~~~~~~~ expression }) end def test_casgn_unscoped assert_parses( s(:casgn, nil, :Foo, s(:int, 10)), %q{Foo = 10}, %q{~~~ name | ^ operator |~~~~~~~~ expression }) end def test_casgn_invalid assert_diagnoses( [:error, :dynamic_const], %q{def f; Foo = 1; end}, %q{ ~~~ location}) assert_diagnoses( [:error, :dynamic_const], %q{def f; Foo::Bar = 1; end}, %q{ ~~~~~~~~ location}) assert_diagnoses( [:error, :dynamic_const], %q{def f; ::Bar = 1; end}, %q{ ~~~~~ location}) assert_diagnoses( [:error, :dynamic_const], %q{def self.f; Foo = 1; end}, %q{ ~~~ location}) assert_diagnoses( [:error, :dynamic_const], %q{def self.f; Foo::Bar = 1; end}, %q{ ~~~~~~~~ location}) assert_diagnoses( [:error, :dynamic_const], %q{def self.f; ::Bar = 1; end}, %q{ ~~~~~ location}) end # Multiple assignment def test_masgn assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :foo), s(:lvasgn, :bar)), s(:array, s(:int, 1), s(:int, 2))), %q{foo, bar = 1, 2}, %q{ ^ operator |~~~~~~~~ expression (mlhs) | ~~~~ expression (array) |~~~~~~~~~~~~~~~ expression }) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :foo), s(:lvasgn, :bar)), s(:array, s(:int, 1), s(:int, 2))), %q{(foo, bar) = 1, 2}, %q{^ begin (mlhs) | ^ end (mlhs) |~~~~~~~~~~ expression (mlhs) |~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :foo), s(:lvasgn, :bar), s(:lvasgn, :baz)), s(:array, s(:int, 1), s(:int, 2))), %q{foo, bar, baz = 1, 2}) end def test_masgn_splat assert_parses( s(:masgn, s(:mlhs, s(:ivasgn, :@foo), s(:cvasgn, :@@bar)), s(:array, s(:splat, s(:lvar, :foo)))), %q{@foo, @@bar = *foo}, %q{ ^ operator (array.splat) | ~~~~ expression (array.splat) }) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:array, s(:splat, s(:lvar, :foo)), s(:lvar, :bar))), %q{a, b = *foo, bar}, %q{}, SINCE_1_9) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:splat, s(:lvasgn, :b))), s(:lvar, :bar)), %q{a, *b = bar}) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:splat, s(:lvasgn, :b)), s(:lvasgn, :c)), s(:lvar, :bar)), %q{a, *b, c = bar}, %q{}, SINCE_1_9) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:splat)), s(:lvar, :bar)), %q{a, * = bar}) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:splat), s(:lvasgn, :c)), s(:lvar, :bar)), %q{a, *, c = bar}, %q{}, SINCE_1_9) assert_parses( s(:masgn, s(:mlhs, s(:splat, s(:lvasgn, :b))), s(:lvar, :bar)), %q{*b = bar}) assert_parses( s(:masgn, s(:mlhs, s(:splat, s(:lvasgn, :b)), s(:lvasgn, :c)), s(:lvar, :bar)), %q{*b, c = bar}, %q{}, SINCE_1_9) assert_parses( s(:masgn, s(:mlhs, s(:splat)), s(:lvar, :bar)), %q{* = bar}) assert_parses( s(:masgn, s(:mlhs, s(:splat), s(:lvasgn, :c), s(:lvasgn, :d)), s(:lvar, :bar)), %q{*, c, d = bar}, %q{}, SINCE_1_9) end def test_masgn_nested assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:mlhs, s(:lvasgn, :b), s(:lvasgn, :c))), s(:lvar, :foo)), %q{a, (b, c) = foo}, %q{ ^ begin (mlhs.mlhs) | ^ end (mlhs.mlhs) | ~~~~~~ expression (mlhs.mlhs) }) assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :b)), s(:lvar, :foo)), %q{((b, )) = foo}, %q{^ begin (mlhs) | ^ end (mlhs)}) end def test_masgn_attr assert_parses( s(:masgn, s(:mlhs, s(:send, s(:self), :a=), s(:indexasgn, s(:self), s(:int, 1), s(:int, 2))), s(:lvar, :foo)), %q{self.a, self[1, 2] = foo}, %q{~~~~~~ expression (mlhs.send) | ~ selector (mlhs.send) | ^ begin (mlhs.indexasgn) | ^ end (mlhs.indexasgn) | ~~~~~~~~~~ expression (mlhs.indexasgn)}) assert_parses( s(:masgn, s(:mlhs, s(:send, s(:self), :a=), s(:lvasgn, :foo)), s(:lvar, :foo)), %q{self::a, foo = foo}) assert_parses( s(:masgn, s(:mlhs, s(:send, s(:self), :A=), s(:lvasgn, :foo)), s(:lvar, :foo)), %q{self.A, foo = foo}) end def test_masgn_const assert_parses( s(:masgn, s(:mlhs, s(:casgn, s(:self), :A), s(:lvasgn, :foo)), s(:lvar, :foo)), %q{self::A, foo = foo}) assert_parses( s(:masgn, s(:mlhs, s(:casgn, s(:cbase), :A), s(:lvasgn, :foo)), s(:lvar, :foo)), %q{::A, foo = foo}) end def test_masgn_cmd assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :foo), s(:lvasgn, :bar)), s(:send, nil, :m, s(:lvar, :foo))), %q{foo, bar = m foo}) end def test_asgn_mrhs assert_parses( s(:lvasgn, :foo, s(:array, s(:lvar, :bar), s(:int, 1))), %q{foo = bar, 1}, %q{ ~~~~~~ expression (array) |~~~~~~~~~~~~ expression}) assert_parses( s(:lvasgn, :foo, s(:array, s(:splat, s(:lvar, :bar)))), %q{foo = *bar}) assert_parses( s(:lvasgn, :foo, s(:array, s(:lvar, :baz), s(:splat, s(:lvar, :bar)))), %q{foo = baz, *bar}) end def test_masgn_keyword_invalid assert_diagnoses( [:error, :invalid_assignment], %q{nil, foo = bar}, %q{~~~ location}) end def test_masgn_backref_invalid assert_diagnoses( [:error, :backref_assignment], %q{$1, = foo}, %q{~~ location}) end def test_masgn_const_invalid assert_diagnoses( [:error, :dynamic_const], %q{def f; self::A, foo = foo; end}, %q{ ~~~~~~~ location}) assert_diagnoses( [:error, :dynamic_const], %q{def f; ::A, foo = foo; end}, %q{ ~~~ location}) end # Variable binary operator-assignment def test_var_op_asgn assert_parses( s(:op_asgn, s(:lvasgn, :a), :+, s(:int, 1)), %q{a += 1}, %q{ ^^ operator |~~~~~~ expression}) assert_parses( s(:op_asgn, s(:ivasgn, :@a), :|, s(:int, 1)), %q{@a |= 1}, %q{ ^^ operator |~~~~~~~ expression}) assert_parses( s(:op_asgn, s(:cvasgn, :@@var), :|, s(:int, 10)), %q{@@var |= 10}) assert_parses( s(:def, :a, s(:args), s(:op_asgn, s(:cvasgn, :@@var), :|, s(:int, 10))), %q{def a; @@var |= 10; end}) end def test_var_op_asgn_cmd assert_parses( s(:op_asgn, s(:lvasgn, :foo), :+, s(:send, nil, :m, s(:lvar, :foo))), %q{foo += m foo}) end def test_var_op_asgn_keyword_invalid assert_diagnoses( [:error, :invalid_assignment], %q{nil += foo}, %q{~~~ location}) end def test_const_op_asgn assert_parses( s(:op_asgn, s(:casgn, nil, :A), :+, s(:int, 1)), %q{A += 1}) assert_parses( s(:op_asgn, s(:casgn, s(:cbase), :A), :+, s(:int, 1)), %q{::A += 1}, %q{}, SINCE_2_0) assert_parses( s(:op_asgn, s(:casgn, s(:const, nil, :B), :A), :+, s(:int, 1)), %q{B::A += 1}, %q{}, SINCE_2_0) assert_parses( s(:def, :x, s(:args), s(:or_asgn, s(:casgn, s(:self), :A), s(:int, 1))), %q{def x; self::A ||= 1; end}, %q{}, SINCE_2_0) assert_parses( s(:def, :x, s(:args), s(:or_asgn, s(:casgn, s(:cbase), :A), s(:int, 1))), %q{def x; ::A ||= 1; end}, %q{}, SINCE_2_0) end def test_const_op_asgn_invalid assert_diagnoses( [:error, :dynamic_const], %q{Foo::Bar += 1}, %q{ ~~~ location}, %w(1.8 1.9 mac ios)) assert_diagnoses( [:error, :dynamic_const], %q{::Bar += 1}, %q{ ~~~ location}, %w(1.8 1.9 mac ios)) assert_diagnoses( [:error, :dynamic_const], %q{def foo; Foo::Bar += 1; end}, %q{ ~~~ location}, %w(1.8 1.9 mac ios)) assert_diagnoses( [:error, :dynamic_const], %q{def foo; ::Bar += 1; end}, %q{ ~~~ location}, %w(1.8 1.9 mac ios)) end # Method binary operator-assignment def test_op_asgn assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :a), :+, s(:int, 1)), %q{foo.a += 1}, %q{ ^^ operator | ~ selector (send) |~~~~~ expression (send) |~~~~~~~~~~ expression}) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :a), :+, s(:int, 1)), %q{foo::a += 1}) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :A), :+, s(:int, 1)), %q{foo.A += 1}) end def test_op_asgn_cmd assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :a), :+, s(:send, nil, :m, s(:lvar, :foo))), %q{foo.a += m foo}) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :a), :+, s(:send, nil, :m, s(:lvar, :foo))), %q{foo::a += m foo}) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :A), :+, s(:send, nil, :m, s(:lvar, :foo))), %q{foo.A += m foo}) assert_diagnoses( [:error, :const_reassignment], %q{foo::A += m foo}, %q{ ~~ location}, %w(1.9 mac)) assert_parses( s(:op_asgn, s(:casgn, s(:lvar, :foo), :A), :+, s(:send, nil, :m, s(:lvar, :foo))), %q{foo::A += m foo}, %q{}, SINCE_2_0) end def test_op_asgn_index assert_parses( s(:op_asgn, s(:indexasgn, s(:lvar, :foo), s(:int, 0), s(:int, 1)), :+, s(:int, 2)), %q{foo[0, 1] += 2}, %q{ ^^ operator | ^ begin (indexasgn) | ^ end (indexasgn) |~~~~~~~~~ expression (indexasgn) |~~~~~~~~~~~~~~ expression}) end def test_op_asgn_index_cmd assert_parses( s(:op_asgn, s(:indexasgn, s(:lvar, :foo), s(:int, 0), s(:int, 1)), :+, s(:send, nil, :m, s(:lvar, :foo))), %q{foo[0, 1] += m foo}) end def test_op_asgn_invalid assert_diagnoses( [:error, :backref_assignment], %q{$1 |= 1}, %q{~~ location}) assert_diagnoses( [:error, :backref_assignment], %q{$+ |= 1}, %q{~~ location}) assert_diagnoses( [:error, :backref_assignment], %q{$+ |= m foo}, %q{~~ location}) end # Variable logical operator-assignment def test_var_or_asgn assert_parses( s(:or_asgn, s(:lvasgn, :a), s(:int, 1)), %q{a ||= 1}, %q{ ^^^ operator |~~~~~~~ expression}) end def test_var_and_asgn assert_parses( s(:and_asgn, s(:lvasgn, :a), s(:int, 1)), %q{a &&= 1}, %q{ ^^^ operator |~~~~~~~ expression}) end # Method logical operator-assignment def test_or_asgn assert_parses( s(:or_asgn, s(:send, s(:lvar, :foo), :a), s(:int, 1)), %q{foo.a ||= 1}, %q{ ^^^ operator | ~ selector (send) |~~~~~ expression (send) |~~~~~~~~~~~ expression}) assert_parses( s(:or_asgn, s(:indexasgn, s(:lvar, :foo), s(:int, 0), s(:int, 1)), s(:int, 2)), %q{foo[0, 1] ||= 2}, %q{ ^^^ operator | ^ begin (indexasgn) | ^ end (indexasgn) |~~~~~~~~~ expression (indexasgn) |~~~~~~~~~~~~~~~ expression}) end def test_and_asgn assert_parses( s(:and_asgn, s(:send, s(:lvar, :foo), :a), s(:int, 1)), %q{foo.a &&= 1}, %q{ ^^^ operator | ~ selector (send) |~~~~~ expression (send) |~~~~~~~~~~~ expression}) assert_parses( s(:and_asgn, s(:indexasgn, s(:lvar, :foo), s(:int, 0), s(:int, 1)), s(:int, 2)), %q{foo[0, 1] &&= 2}, %q{ ^^^ operator | ^ begin (indexasgn) | ^ end (indexasgn) |~~~~~~~~~ expression (indexasgn) |~~~~~~~~~~~~~~~ expression}) end def test_log_asgn_invalid assert_diagnoses( [:error, :backref_assignment], %q{$1 &&= 1}, %q{~~ location}) assert_diagnoses( [:error, :backref_assignment], %q{$+ ||= 1}, %q{~~ location}) end # # Class and module definitions # def test_module assert_parses( s(:module, s(:const, nil, :Foo), nil), %q{module Foo; end}, %q{~~~~~~ keyword | ~~~ name | ~~~ end}) end def test_module_invalid assert_diagnoses( [:error, :module_in_def], %q{def a; module Foo; end; end}, %q{ ^^^^^^ location}) end def test_cpath assert_parses( s(:module, s(:const, s(:cbase), :Foo), nil), %q{module ::Foo; end}) assert_parses( s(:module, s(:const, s(:const, nil, :Bar), :Foo), nil), %q{module Bar::Foo; end}) end def test_cpath_invalid assert_diagnoses( [:error, :module_name_const], %q{module foo; end}) end def test_class assert_parses( s(:class, s(:const, nil, :Foo), nil, nil), %q{class Foo; end}, %q{~~~~~ keyword | ~~~ name | ~~~ end}) assert_parses( s(:class, s(:const, nil, :Foo), nil, nil), %q{class Foo end}, %q{}, SINCE_2_3) end def test_class_super assert_parses( s(:class, s(:const, nil, :Foo), s(:const, nil, :Bar), nil), %q{class Foo < Bar; end}, %q{~~~~~ keyword | ^ operator | ~~~ end}) end def test_class_super_label assert_parses( s(:class, s(:const, nil, :Foo), s(:send, nil, :a, s(:sym, :b)), nil), %q{class Foo < a:b; end}, %q{}, SINCE_2_0) end def test_class_invalid assert_diagnoses( [:error, :class_in_def], %q{def a; class Foo; end; end}, %q{ ^^^^^ location}) assert_diagnoses( [:error, :class_in_def], %q{def self.a; class Foo; end; end}, %q{ ^^^^^ location}) end def test_sclass assert_parses( s(:sclass, s(:lvar, :foo), s(:nil)), %q{class << foo; nil; end}, %q{~~~~~ keyword | ^^ operator | ~~~ end}) end # # Method (un)definition # def test_def assert_parses( s(:def, :foo, s(:args), nil), %q{def foo; end}, %q{~~~ keyword | ~~~ name |! assignment | ~~~ end}) assert_parses( s(:def, :String, s(:args), nil), %q{def String; end}) assert_parses( s(:def, :String=, s(:args), nil), %q{def String=; end}) assert_parses( s(:def, :until, s(:args), nil), %q{def until; end}) assert_parses( s(:def, :BEGIN, s(:args), nil), %q{def BEGIN; end}) assert_parses( s(:def, :END, s(:args), nil), %q{def END; end}) end def test_defs assert_parses( s(:defs, s(:self), :foo, s(:args), nil), %q{def self.foo; end}, %q{~~~ keyword | ^ operator | ~~~ name | ~~~ end}) assert_parses( s(:defs, s(:self), :foo, s(:args), nil), %q{def self::foo; end}, %q{~~~ keyword | ^^ operator | ~~~ name | ~~~ end}) assert_parses( s(:defs, s(:lvar, :foo), :foo, s(:args), nil), %q{def (foo).foo; end}) assert_parses( s(:defs, s(:const, nil, :String), :foo, s(:args), nil), %q{def String.foo; end}) assert_parses( s(:defs, s(:const, nil, :String), :foo, s(:args), nil), %q{def String::foo; end}) end def test_defs_invalid assert_diagnoses( [:error, :singleton_literal], %q{def (1).foo; end}, %q{ ~ location}) assert_diagnoses( [:error, :singleton_literal], %q{def ("foo").foo; end}, %q{ ~~~~~ location}) assert_diagnoses( [:error, :singleton_literal], %q{def ("foo#{bar}").foo; end}, %q{ ~~~~~~~~~~~ location}) assert_diagnoses( [:error, :singleton_literal], %q{def (:foo).foo; end}, %q{ ~~~~ location}) assert_diagnoses( [:error, :singleton_literal], %q{def (:"foo#{bar}").foo; end}, %q{ ~~~~~~~~~~~~ location}) assert_diagnoses( [:error, :singleton_literal], %q{def ([]).foo; end}, %q{ ~~ location}) assert_diagnoses( [:error, :singleton_literal], %q{def ({}).foo; end}, %q{ ~~ location}) assert_diagnoses( [:error, :singleton_literal], %q{def (/foo/).foo; end}, %q{ ~~~~~ location}) end def test_undef assert_parses( s(:undef, s(:sym, :foo), s(:sym, :bar), s(:dsym, s(:str, 'foo'), s(:begin, s(:int, 1)))), %q{undef foo, :bar, :"foo#{1}"}, %q{~~~~~ keyword | ~~~ expression (sym/1) | ~~~~ expression (sym/2) |~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end # # Aliasing # def test_alias assert_parses( s(:alias, s(:sym, :foo), s(:sym, :bar)), %q{alias :foo bar}, %q{~~~~~ keyword | ~~~~ expression (sym/1) | ^ begin (sym/1) | ~~~ expression (sym/2) | ! begin (sym/2) |~~~~~~~~~~~~~~ expression}) end def test_alias_gvar assert_parses( s(:alias, s(:gvar, :$a), s(:gvar, :$b)), %q{alias $a $b}, %q{ ~~ expression (gvar/1)}) assert_parses( s(:alias, s(:gvar, :$a), s(:back_ref, :$+)), %q{alias $a $+}, %q{ ~~ expression (back_ref)}) end def test_alias_nth_ref assert_diagnoses( [:error, :nth_ref_alias], %q{alias $a $1}, %q{ ~~ location}) end # # Formal arguments # def test_arg assert_parses( s(:def, :f, s(:args, s(:arg, :foo)), nil), %q{def f(foo); end}, %q{ ~~~ name (args.arg) | ~~~ expression (args.arg) | ^ begin (args) | ^ end (args) | ~~~~~ expression (args)}) assert_parses( s(:def, :f, s(:args, s(:arg, :foo), s(:arg, :bar)), nil), %q{def f(foo, bar); end}) end def test_optarg assert_parses( s(:def, :f, s(:args, s(:optarg, :foo, s(:int, 1))), nil), %q{def f foo = 1; end}, %q{ ~~~ name (args.optarg) | ^ operator (args.optarg) | ~~~~~~~ expression (args.optarg) | ~~~~~~~ expression (args)}) assert_parses( s(:def, :f, s(:args, s(:optarg, :foo, s(:int, 1)), s(:optarg, :bar, s(:int, 2))), nil), %q{def f(foo=1, bar=2); end}) end def test_restarg_named assert_parses( s(:def, :f, s(:args, s(:restarg, :foo)), nil), %q{def f(*foo); end}, %q{ ~~~ name (args.restarg) | ~~~~ expression (args.restarg)}) end def test_restarg_unnamed assert_parses( s(:def, :f, s(:args, s(:restarg)), nil), %q{def f(*); end}, %q{ ~ expression (args.restarg)}) end def test_kwarg assert_parses( s(:def, :f, s(:args, s(:kwarg, :foo)), nil), %q{def f(foo:); end}, %q{ ~~~ name (args.kwarg) | ~~~~ expression (args.kwarg)}, SINCE_2_1) end def test_kwoptarg assert_parses( s(:def, :f, s(:args, s(:kwoptarg, :foo, s(:int, 1))), nil), %q{def f(foo: 1); end}, %q{ ~~~ name (args.kwoptarg) | ~~~~~~ expression (args.kwoptarg)}, SINCE_2_0) end def test_kwrestarg_named assert_parses( s(:def, :f, s(:args, s(:kwrestarg, :foo)), nil), %q{def f(**foo); end}, %q{ ~~~ name (args.kwrestarg) | ~~~~~ expression (args.kwrestarg)}, SINCE_2_0) end def test_kwrestarg_unnamed assert_parses( s(:def, :f, s(:args, s(:kwrestarg)), nil), %q{def f(**); end}, %q{ ~~ expression (args.kwrestarg)}, SINCE_2_0) end def test_kwnilarg assert_parses( s(:def, :f, s(:args, s(:kwnilarg)), nil), %q{def f(**nil); end}, %q{ ~~~~~ expression (args.kwnilarg) | ~~~ name (args.kwnilarg)}, SINCE_2_7) assert_parses( s(:block, s(:send, nil, :m), s(:args, s(:kwnilarg)), nil), %q{m { |**nil| }}, %q{ ~~~~~ expression (args.kwnilarg) | ~~~ name (args.kwnilarg)}, SINCE_2_7) assert_parses( s(:block, s(:lambda), s(:args, s(:kwnilarg)), nil), %q{->(**nil) {}}, %q{ ~~~~~ expression (args.kwnilarg) | ~~~ name (args.kwnilarg)}, SINCE_2_7) end def test_blockarg assert_parses( s(:def, :f, s(:args, s(:blockarg, :block)), nil), %q{def f(&block); end}, %q{ ~~~~~ name (args.blockarg) | ~~~~~~ expression (args.blockarg)}) end def test_objc_arg assert_parses( s(:def, :f, s(:args, s(:arg, :a), s(:objc_kwarg, :b, :c)), nil), %q{def f(a, b: c); end}, %q{ ~ keyword (args.objc_kwarg) | ~ operator (args.objc_kwarg) | ~ argument (args.objc_kwarg) | ~~~~ expression (args.objc_kwarg)}, %w(mac)) assert_parses( s(:def, :f, s(:args, s(:arg, :a), s(:objc_kwarg, :b, :c)), nil), %q{def f(a, b => c); end}, %q{ ~ keyword (args.objc_kwarg) | ~~ operator (args.objc_kwarg) | ~ argument (args.objc_kwarg) | ~~~~~~ expression (args.objc_kwarg)}, %w(mac)) end def test_arg_scope # [ruby-core:61299] [Bug #9593] assert_parses( s(:def, :f, s(:args, s(:optarg, :var, s(:defined?, s(:lvar, :var)))), s(:lvar, :var)), %q{def f(var = defined?(var)) var end}, %q{}, SINCE_2_7 - SINCE_2_1) assert_parses( s(:def, :f, s(:args, s(:kwoptarg, :var, s(:defined?, s(:lvar, :var)))), s(:lvar, :var)), %q{def f(var: defined?(var)) var end}, %q{}, SINCE_2_7 - SINCE_2_1) assert_parses( s(:block, s(:send, nil, :lambda), s(:args, s(:shadowarg, :a)), s(:lvar, :a)), %q{lambda{|;a|a}}, %q{}, SINCE_1_9) end def assert_parses_args(ast, code, versions=ALL_VERSIONS) assert_parses( s(:def, :f, ast, nil), %Q{def f #{code}; end}, %q{}, versions) end def test_arg_combinations # f_arg tCOMMA f_optarg tCOMMA f_rest_arg opt_f_block_arg assert_parses_args( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:restarg, :r), s(:blockarg, :b)), %q{a, o=1, *r, &b}) # f_arg tCOMMA f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_args( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{a, o=1, *r, p, &b}, SINCE_1_9) # f_arg tCOMMA f_optarg opt_f_block_arg assert_parses_args( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:blockarg, :b)), %q{a, o=1, &b}) # f_arg tCOMMA f_optarg tCOMMA f_arg opt_f_block_arg assert_parses_args( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:arg, :p), s(:blockarg, :b)), %q{a, o=1, p, &b}, SINCE_1_9) # f_arg tCOMMA f_rest_arg opt_f_block_arg assert_parses_args( s(:args, s(:arg, :a), s(:restarg, :r), s(:blockarg, :b)), %q{a, *r, &b}) # f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_args( s(:args, s(:arg, :a), s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{a, *r, p, &b}, SINCE_1_9) # f_arg opt_f_block_arg assert_parses_args( s(:args, s(:arg, :a), s(:blockarg, :b)), %q{a, &b}) # f_optarg tCOMMA f_rest_arg opt_f_block_arg assert_parses_args( s(:args, s(:optarg, :o, s(:int, 1)), s(:restarg, :r), s(:blockarg, :b)), %q{o=1, *r, &b}) # f_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_args( s(:args, s(:optarg, :o, s(:int, 1)), s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{o=1, *r, p, &b}, SINCE_1_9) # f_optarg opt_f_block_arg assert_parses_args( s(:args, s(:optarg, :o, s(:int, 1)), s(:blockarg, :b)), %q{o=1, &b}) # f_optarg tCOMMA f_arg opt_f_block_arg assert_parses_args( s(:args, s(:optarg, :o, s(:int, 1)), s(:arg, :p), s(:blockarg, :b)), %q{o=1, p, &b}, SINCE_1_9) # f_rest_arg opt_f_block_arg assert_parses_args( s(:args, s(:restarg, :r), s(:blockarg, :b)), %q{*r, &b}) # f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_args( s(:args, s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{*r, p, &b}, SINCE_1_9) # f_block_arg assert_parses_args( s(:args, s(:blockarg, :b)), %q{&b}) # (nothing) assert_parses_args( s(:args), %q{}) end def test_kwarg_combinations # f_kwarg tCOMMA f_kwrest opt_f_block_arg assert_parses_args( s(:args, s(:kwoptarg, :foo, s(:int, 1)), s(:kwoptarg, :bar, s(:int, 2)), s(:kwrestarg, :baz), s(:blockarg, :b)), %q{(foo: 1, bar: 2, **baz, &b)}, SINCE_2_0) # f_kwarg opt_f_block_arg assert_parses_args( s(:args, s(:kwoptarg, :foo, s(:int, 1)), s(:blockarg, :b)), %q{(foo: 1, &b)}, SINCE_2_0) # f_kwrest opt_f_block_arg assert_parses_args( s(:args, s(:kwrestarg, :baz), s(:blockarg, :b)), %q{**baz, &b}, SINCE_2_0) assert_parses_args( s(:args, s(:restarg), s(:kwrestarg)), %q{*, **}, SINCE_2_0) end def test_kwarg_no_paren assert_parses_args( s(:args, s(:kwarg, :foo)), %Q{foo:\n}, SINCE_2_1) assert_parses_args( s(:args, s(:kwoptarg, :foo, s(:int, -1))), %Q{foo: -1\n}, SINCE_2_1) end def assert_parses_margs(ast, code, versions=SINCE_1_9) assert_parses_args( s(:args, ast), %Q{(#{code})}, versions) end def test_marg_combinations # tLPAREN f_margs rparen assert_parses_margs( s(:mlhs, s(:mlhs, s(:arg, :a))), %q{((a))}) # f_marg_list assert_parses_margs( s(:mlhs, s(:arg, :a), s(:arg, :a1)), %q{(a, a1)}) # f_marg_list tCOMMA tSTAR f_norm_arg assert_parses_margs( s(:mlhs, s(:arg, :a), s(:restarg, :r)), %q{(a, *r)}) # f_marg_list tCOMMA tSTAR f_norm_arg tCOMMA f_marg_list assert_parses_margs( s(:mlhs, s(:arg, :a), s(:restarg, :r), s(:arg, :p)), %q{(a, *r, p)}) # f_marg_list tCOMMA tSTAR assert_parses_margs( s(:mlhs, s(:arg, :a), s(:restarg)), %q{(a, *)}) # f_marg_list tCOMMA tSTAR tCOMMA f_marg_list assert_parses_margs( s(:mlhs, s(:arg, :a), s(:restarg), s(:arg, :p)), %q{(a, *, p)}) # tSTAR f_norm_arg assert_parses_margs( s(:mlhs, s(:restarg, :r)), %q{(*r)}) # tSTAR f_norm_arg tCOMMA f_marg_list assert_parses_margs( s(:mlhs, s(:restarg, :r), s(:arg, :p)), %q{(*r, p)}) # tSTAR assert_parses_margs( s(:mlhs, s(:restarg)), %q{(*)}) # tSTAR tCOMMA f_marg_list assert_parses_margs( s(:mlhs, s(:restarg), s(:arg, :p)), %q{(*, p)}) end def test_marg_objc_restarg assert_parses( s(:def, :f, s(:args, s(:arg, :a), s(:mlhs, s(:objc_restarg, s(:objc_kwarg, :b, :c)))), nil), %Q{def f(a, (*b: c)); end}, %q{ ~ operator (args.mlhs.objc_restarg) | ~~~~~ expression (args.mlhs.objc_restarg)}, %w(mac)) end def assert_parses_blockargs(ast, code, versions=ALL_VERSIONS) assert_parses( s(:block, s(:send, nil, :f), ast, nil), %Q{f{ #{code} }}, %q{}, versions) end def test_block_arg_combinations # none assert_parses_blockargs( s(:args), %q{}) # tPIPE tPIPE # tPIPE opt_bv_decl tPIPE assert_parses_blockargs( s(:args), %q{| |}) assert_parses_blockargs( s(:args, s(:shadowarg, :a)), %q{|;a|}, SINCE_1_9) assert_parses_blockargs( s(:args, s(:shadowarg, :a)), %Q{|;\na\n|}, SINCE_2_0) # tOROP before 2.7 / tPIPE+tPIPE after assert_parses_blockargs( s(:args), %q{||}) # block_par # block_par tCOMMA # block_par tCOMMA tAMPER lhs # f_arg opt_f_block_arg # f_arg tCOMMA assert_parses_blockargs( s(:args, s(:procarg0, s(:arg, :a))), %q{|a|}, SINCE_1_9) assert_parses_blockargs( s(:args, s(:arg, :a)), %q{|a|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:arg, :a), s(:arg, :c)), %q{|a, c|}) assert_parses_blockargs( s(:args, s(:arg_expr, s(:ivasgn, :@a))), %q{|@a|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:arg, :a)), %q{|a,|} ) assert_parses_blockargs( s(:args, s(:arg, :a), s(:blockarg, :b)), %q{|a, &b|}) assert_parses_blockargs( s(:args, s(:arg, :a), s(:blockarg_expr, s(:ivasgn, :@b))), %q{|a, &@b|}, %w(1.8)) # block_par tCOMMA tSTAR lhs tCOMMA tAMPER lhs # block_par tCOMMA tSTAR tCOMMA tAMPER lhs # block_par tCOMMA tSTAR lhs # block_par tCOMMA tSTAR # f_arg tCOMMA f_rest_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg, :s), s(:blockarg, :b)), %q{|a, *s, &b|}) assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg_expr, s(:ivasgn, :@s)), s(:blockarg_expr, s(:ivasgn, :@b))), %q{|a, *@s, &@b|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg), s(:blockarg, :b)), %q{|a, *, &b|}) assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg), s(:blockarg_expr, s(:ivasgn, :@b))), %q{|a, *, &@b|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg, :s)), %q{|a, *s|}) assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg_expr, s(:ivasgn, :@s))), %q{|a, *@s|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg)), %q{|a, *|}) # tSTAR lhs tCOMMA tAMPER lhs # tSTAR lhs # tSTAR # tSTAR tCOMMA tAMPER lhs # f_rest_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:restarg, :s), s(:blockarg, :b)), %q{|*s, &b|}) assert_parses_blockargs( s(:args, s(:restarg_expr, s(:ivasgn, :@s)), s(:blockarg_expr, s(:ivasgn, :@b))), %q{|*@s, &@b|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:restarg), s(:blockarg, :b)), %q{|*, &b|}) assert_parses_blockargs( s(:args, s(:restarg), s(:blockarg_expr, s(:ivasgn, :@b))), %q{|*, &@b|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:restarg, :s)), %q{|*s|}) assert_parses_blockargs( s(:args, s(:restarg_expr, s(:ivasgn, :@s))), %q{|*@s|}, %w(1.8)) assert_parses_blockargs( s(:args, s(:restarg)), %q{|*|}) # tAMPER lhs # f_block_arg assert_parses_blockargs( s(:args, s(:blockarg, :b)), %q{|&b|}) assert_parses_blockargs( s(:args, s(:blockarg_expr, s(:ivasgn, :@b))), %q{|&@b|}, %w(1.8)) # f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:optarg, :o1, s(:int, 2)), s(:restarg, :r), s(:blockarg, :b)), %q{|a, o=1, o1=2, *r, &b|}, SINCE_1_9) # f_arg tCOMMA f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{|a, o=1, *r, p, &b|}, SINCE_1_9) # f_arg tCOMMA f_block_optarg opt_f_block_arg assert_parses_blockargs( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:blockarg, :b)), %q{|a, o=1, &b|}, SINCE_1_9) # f_arg tCOMMA f_block_optarg tCOMMA f_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:arg, :a), s(:optarg, :o, s(:int, 1)), s(:arg, :p), s(:blockarg, :b)), %q{|a, o=1, p, &b|}, SINCE_1_9) # f_arg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:arg, :a), s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{|a, *r, p, &b|}, SINCE_1_9) # f_block_optarg tCOMMA f_rest_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:optarg, :o, s(:int, 1)), s(:restarg, :r), s(:blockarg, :b)), %q{|o=1, *r, &b|}, SINCE_1_9) # f_block_optarg tCOMMA f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:optarg, :o, s(:int, 1)), s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{|o=1, *r, p, &b|}, SINCE_1_9) # f_block_optarg opt_f_block_arg assert_parses_blockargs( s(:args, s(:optarg, :o, s(:int, 1)), s(:blockarg, :b)), %q{|o=1, &b|}, SINCE_1_9) # f_block_optarg tCOMMA f_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:optarg, :o, s(:int, 1)), s(:arg, :p), s(:blockarg, :b)), %q{|o=1, p, &b|}, SINCE_1_9) # f_rest_arg tCOMMA f_arg opt_f_block_arg assert_parses_blockargs( s(:args, s(:restarg, :r), s(:arg, :p), s(:blockarg, :b)), %q{|*r, p, &b|}, SINCE_1_9) end def test_multiple_args_with_trailing_comma assert_parses_blockargs( s(:args, s(:arg, :a), s(:arg, :b)), %q(|a, b,|) ) end def test_procarg0_legacy Parser::Builders::Default.emit_procarg0 = false assert_parses_blockargs( s(:args, s(:arg, :a)), %q{|a|} ) ensure Parser::Builders::Default.emit_procarg0 = true end def test_emit_arg_inside_procarg0_legacy Parser::Builders::Default.emit_arg_inside_procarg0 = false assert_parses_blockargs( s(:args, s(:procarg0, :a)), %q{|a|}, SINCE_1_9) ensure Parser::Builders::Default.emit_arg_inside_procarg0 = true end def test_procarg0 assert_parses( s(:block, s(:send, nil, :m), s(:args, s(:procarg0, s(:arg, :foo))), nil), %q{m { |foo| } }, %q{ ^^^ expression (args.procarg0)}, SINCE_1_9) assert_parses( s(:block, s(:send, nil, :m), s(:args, s(:procarg0, s(:arg, :foo), s(:arg, :bar))), nil), %q{m { |(foo, bar)| } }, %q{ ^ begin (args.procarg0) | ^ end (args.procarg0) | ^^^^^^^^^^ expression (args.procarg0)}, SINCE_1_9) end def test_block_kwarg_combinations # f_block_kwarg tCOMMA f_kwrest opt_f_block_arg assert_parses_blockargs( s(:args, s(:kwoptarg, :foo, s(:int, 1)), s(:kwoptarg, :bar, s(:int, 2)), s(:kwrestarg, :baz), s(:blockarg, :b)), %q{|foo: 1, bar: 2, **baz, &b|}, SINCE_2_0) # f_block_kwarg opt_f_block_arg assert_parses_blockargs( s(:args, s(:kwoptarg, :foo, s(:int, 1)), s(:blockarg, :b)), %q{|foo: 1, &b|}, SINCE_2_0) # f_kwrest opt_f_block_arg assert_parses_blockargs( s(:args, s(:kwrestarg, :baz), s(:blockarg, :b)), %q{|**baz, &b|}, SINCE_2_0) end def test_block_kwarg assert_parses_blockargs( s(:args, s(:kwarg, :foo)), %q{|foo:|}, SINCE_2_1) end def test_arg_invalid assert_diagnoses( [:error, :argument_const], %q{def foo(Abc); end}, %q{ ~~~ location}) assert_diagnoses( [:error, :argument_ivar], %q{def foo(@abc); end}, %q{ ~~~~ location}) assert_diagnoses( [:error, :argument_gvar], %q{def foo($abc); end}, %q{ ~~~~ location}) assert_diagnoses( [:error, :argument_cvar], %q{def foo(@@abc); end}, %q{ ~~~~~ location}) end def test_arg_duplicate assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, aa); end}, %q{ ^^ location | ~~ highlights (0)}) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, aa=1); end}, %q{ ^^ location | ~~ highlights (0)}) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, *aa); end}, %q{ ^^ location | ~~ highlights (0)}) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, &aa); end}, %q{ ^^ location | ~~ highlights (0)}) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, (bb, aa)); end}, %q{ ^^ location | ~~ highlights (0)}, SINCE_1_9) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, *r, aa); end}, %q{ ^^ location | ~~ highlights (0)}, SINCE_1_9) assert_diagnoses( [:error, :duplicate_argument], %q{lambda do |aa; aa| end}, %q{ ^^ location | ~~ highlights (0)}, SINCE_1_9) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, aa: 1); end}, %q{ ^^ location | ~~ highlights (0)}, SINCE_2_0) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, **aa); end}, %q{ ^^ location | ~~ highlights (0)}, SINCE_2_0) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(aa, aa:); end}, %q{ ^^ location | ~~ highlights (0)}, SINCE_2_1) end def test_arg_duplicate_ignored assert_diagnoses( [:error, :duplicate_argument], %q{def foo(_, _); end}, %q{}, %w(1.8)) assert_parses( s(:def, :foo, s(:args, s(:arg, :_), s(:arg, :_)), nil), %q{def foo(_, _); end}, %q{}, SINCE_1_9) assert_diagnoses( [:error, :duplicate_argument], %q{def foo(_a, _a); end}, %q{}, %w(1.8 1.9 mac ios)) assert_parses( s(:def, :foo, s(:args, s(:arg, :_a), s(:arg, :_a)), nil), %q{def foo(_a, _a); end}, %q{}, SINCE_2_0) end def test_arg_duplicate_proc assert_parses( s(:block, s(:send, nil, :proc), s(:args, s(:arg, :a), s(:arg, :a)), nil), %q{proc{|a,a|}}, %q{}, %w(1.8)) assert_diagnoses( [:error, :duplicate_argument], %q{proc{|a,a|}}, %q{}, SINCE_1_9) end def test_kwarg_invalid assert_diagnoses( [:error, :argument_const], %q{def foo(Abc: 1); end}, %q{ ~~~~ location}, SINCE_2_0) assert_diagnoses( [:error, :argument_const], %q{def foo(Abc:); end}, %q{ ~~~~ location}, SINCE_2_1) end def test_arg_label assert_parses( s(:def, :foo, s(:args), s(:send, nil, :a, s(:sym, :b))), %q{def foo() a:b end}, %q{}, SINCE_1_9) assert_parses( s(:def, :foo, s(:args), s(:send, nil, :a, s(:sym, :b))), %Q{def foo\n a:b end}, %q{}, SINCE_1_9) assert_parses( s(:block, s(:send, nil, :f), s(:args), s(:send, nil, :a, s(:sym, :b))), %Q{f { || a:b }}, %q{}, SINCE_1_9) end # # Sends # # To self def test_send_self assert_parses( s(:send, nil, :fun), %q{fun}, %q{~~~ selector |~~~ expression}) assert_parses( s(:send, nil, :fun!), %q{fun!}, %q{~~~~ selector |~~~~ expression}) assert_parses( s(:send, nil, :fun, s(:int, 1)), %q{fun(1)}, %q{~~~ selector | ^ begin | ^ end |~~~~~~ expression}) end def test_send_self_block assert_parses( s(:block, s(:send, nil, :fun), s(:args), nil), %q{fun { }}) assert_parses( s(:block, s(:send, nil, :fun), s(:args), nil), %q{fun() { }}) assert_parses( s(:block, s(:send, nil, :fun, s(:int, 1)), s(:args), nil), %q{fun(1) { }}) assert_parses( s(:block, s(:send, nil, :fun), s(:args), nil), %q{fun do end}) end def test_send_block_blockarg assert_diagnoses( [:error, :block_and_blockarg], %q{fun(&bar) do end}, %q{ ~~~~ location | ~~ highlights (0)}) end def test_send_objc_vararg assert_parses( s(:send, nil, :fun, s(:int, 1), s(:kwargs, s(:pair, s(:sym, :bar), s(:objc_varargs, s(:int, 2), s(:int, 3), s(:nil))))), %q{fun(1, bar: 2, 3, nil)}, %q{ ~~~~~~~~~ expression (kwargs.pair.objc_varargs)}, %w(mac)) end # To receiver def test_send_plain assert_parses( s(:send, s(:lvar, :foo), :fun), %q{foo.fun}, %q{ ~~~ selector | ^ dot |~~~~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :fun), %q{foo::fun}, %q{ ~~~ selector | ^^ dot |~~~~~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :Fun), %q{foo::Fun()}, %q{ ~~~ selector | ^^ dot |~~~~~~~~~~ expression}) end def test_send_plain_cmd assert_parses( s(:send, s(:lvar, :foo), :fun, s(:lvar, :bar)), %q{foo.fun bar}, %q{ ~~~ selector | ^ dot |~~~~~~~~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :fun, s(:lvar, :bar)), %q{foo::fun bar}, %q{ ~~~ selector | ^^ dot |~~~~~~~~~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :Fun, s(:lvar, :bar)), %q{foo::Fun bar}, %q{ ~~~ selector | ^^ dot |~~~~~~~~~~~~ expression}) end def test_send_plain_cmd_ambiguous_literal assert_diagnoses( [:warning, :ambiguous_literal], %q{m /foo/}, %q{ ^ location}, ALL_VERSIONS - SINCE_3_0) refute_diagnoses( %q{m %[1]}) end def test_send_plain_cmd_ambiguous_regexp assert_diagnoses( [:warning, :ambiguous_regexp], %q{m /foo/}, %q{ ^ location}, SINCE_3_0) refute_diagnoses( %q{m %[1]}) end def test_send_plain_cmd_ambiguous_prefix assert_diagnoses( [:warning, :ambiguous_prefix, { :prefix => '+' }], %q{m +foo}, %q{ ^ location}) assert_diagnoses( [:warning, :ambiguous_prefix, { :prefix => '-' }], %q{m -foo}, %q{ ^ location}) assert_diagnoses( [:warning, :ambiguous_prefix, { :prefix => '&' }], %q{m &foo}, %q{ ^ location}) assert_diagnoses( [:warning, :ambiguous_prefix, { :prefix => '*' }], %q{m *foo}, %q{ ^ location}) assert_diagnoses( [:warning, :ambiguous_prefix, { :prefix => '**' }], %q{m **foo}, %q{ ^^ location}, SINCE_2_0) end def test_send_block_chain_cmd assert_parses( s(:send, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil), :fun, s(:lvar, :bar)), %q{meth 1 do end.fun bar}, %q{ ~~~ selector | ^ dot |~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:send, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil), :fun, s(:lvar, :bar)), %q{meth 1 do end.fun(bar)}, %q{ ~~~ selector | ^ dot | ^ begin | ^ end |~~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:send, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil), :fun, s(:lvar, :bar)), %q{meth 1 do end::fun bar}, %q{ ~~~ selector | ^^ dot |~~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:send, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil), :fun, s(:lvar, :bar)), %q{meth 1 do end::fun(bar)}, %q{ ~~~ selector | ^ begin | ^ end | ^^ dot |~~~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:block, s(:send, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil), :fun, s(:lvar, :bar)), s(:args), nil), %q{meth 1 do end.fun bar do end}, %q{}, SINCE_2_0) assert_parses( s(:block, s(:send, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil), :fun, s(:lvar, :bar)), s(:args), nil), %q{meth 1 do end.fun(bar) {}}, %q{}, SINCE_2_0) assert_parses( s(:block, s(:send, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil), :fun), s(:args), nil), %q{meth 1 do end.fun {}}, %q{}, SINCE_2_0) end def test_send_paren_block_cmd assert_parses( s(:send, nil, :foo, s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil)), %q{foo(meth 1 do end)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :foo, s(:int, 1), s(:block, s(:send, nil, :meth, s(:int, 1)), s(:args), nil)), %q{foo(1, meth 1 do end)}, %q{}, %w(1.8)) end def test_send_binary_op assert_parses( s(:send, s(:lvar, :foo), :+, s(:int, 1)), %q{foo + 1}, %q{ ~ selector |~~~~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :-, s(:int, 1)), %q{foo - 1}) assert_parses( s(:send, s(:lvar, :foo), :*, s(:int, 1)), %q{foo * 1}) assert_parses( s(:send, s(:lvar, :foo), :/, s(:int, 1)), %q{foo / 1}) assert_parses( s(:send, s(:lvar, :foo), :%, s(:int, 1)), %q{foo % 1}) assert_parses( s(:send, s(:lvar, :foo), :**, s(:int, 1)), %q{foo ** 1}) assert_parses( s(:send, s(:lvar, :foo), :|, s(:int, 1)), %q{foo | 1}) assert_parses( s(:send, s(:lvar, :foo), :^, s(:int, 1)), %q{foo ^ 1}) assert_parses( s(:send, s(:lvar, :foo), :&, s(:int, 1)), %q{foo & 1}) assert_parses( s(:send, s(:lvar, :foo), :<=>, s(:int, 1)), %q{foo <=> 1}) assert_parses( s(:send, s(:lvar, :foo), :<, s(:int, 1)), %q{foo < 1}) assert_parses( s(:send, s(:lvar, :foo), :<=, s(:int, 1)), %q{foo <= 1}) assert_parses( s(:send, s(:lvar, :foo), :>, s(:int, 1)), %q{foo > 1}) assert_parses( s(:send, s(:lvar, :foo), :>=, s(:int, 1)), %q{foo >= 1}) assert_parses( s(:send, s(:lvar, :foo), :==, s(:int, 1)), %q{foo == 1}) assert_parses( s(:not, s(:send, s(:lvar, :foo), :==, s(:int, 1))), %q{foo != 1}, %q{}, %w(1.8)) assert_parses( s(:send, s(:lvar, :foo), :'!=', s(:int, 1)), %q{foo != 1}, %q{}, SINCE_1_9) assert_parses( s(:send, s(:lvar, :foo), :===, s(:int, 1)), %q{foo === 1}) assert_parses( s(:send, s(:lvar, :foo), :=~, s(:int, 1)), %q{foo =~ 1}) assert_parses( s(:not, s(:send, s(:lvar, :foo), :=~, s(:int, 1))), %q{foo !~ 1}, %q{}, %w(1.8)) assert_parses( s(:send, s(:lvar, :foo), :'!~', s(:int, 1)), %q{foo !~ 1}, %q{}, SINCE_1_9) assert_parses( s(:send, s(:lvar, :foo), :<<, s(:int, 1)), %q{foo << 1}) assert_parses( s(:send, s(:lvar, :foo), :>>, s(:int, 1)), %q{foo >> 1}) end def test_send_unary_op assert_parses( s(:send, s(:lvar, :foo), :-@), %q{-foo}, %q{~ selector |~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :+@), %q{+foo}) assert_parses( s(:send, s(:lvar, :foo), :~), %q{~foo}) end def test_bang assert_parses( s(:not, s(:lvar, :foo)), %q{!foo}, %{}, %w(1.8)) assert_parses( s(:send, s(:lvar, :foo), :'!'), %q{!foo}, %{}, SINCE_1_9) end def test_bang_cmd assert_parses( s(:not, s(:send, nil, :m, s(:lvar, :foo))), %q{!m foo}, %{}, %w(1.8)) assert_parses( s(:send, s(:send, nil, :m, s(:lvar, :foo)), :'!'), %q{!m foo}, %{}, SINCE_1_9) end def test_not assert_parses( s(:not, s(:lvar, :foo)), %q{not foo}, %{}, %w(1.8)) assert_parses( s(:send, s(:lvar, :foo), :'!'), %q{not foo}, %{}, SINCE_1_9) assert_parses( s(:send, s(:lvar, :foo), :'!'), %q{not(foo)}, %q{~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:send, s(:begin), :'!'), %q{not()}, %q{~~~~~ expression}, SINCE_1_9) end def test_not_cmd assert_parses( s(:not, s(:send, nil, :m, s(:lvar, :foo))), %q{not m foo}, %{}, %w(1.8)) assert_parses( s(:send, s(:send, nil, :m, s(:lvar, :foo)), :'!'), %q{not m foo}, %{}, SINCE_1_9) end def test_unary_num_pow_precedence assert_parses( s(:send, s(:send, s(:int, 2), :**, s(:int, 10)), :+@), %q{+2 ** 10}, %{}, %w{2.1}) assert_parses( s(:send, s(:send, s(:float, 2.0), :**, s(:int, 10)), :+@), %q{+2.0 ** 10}) assert_parses( s(:send, s(:send, s(:int, 2), :**, s(:int, 10)), :-@), %q{-2 ** 10}) assert_parses( s(:send, s(:send, s(:float, 2.0), :**, s(:int, 10)), :-@), %q{-2.0 ** 10}) end def test_send_attr_asgn assert_parses( s(:send, s(:lvar, :foo), :a=, s(:int, 1)), %q{foo.a = 1}, %q{ ~ selector | ^ dot | ^ operator |~~~~~~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :a=, s(:int, 1)), %q{foo::a = 1}, %q{ ~ selector | ^^ dot | ^ operator |~~~~~~~~~~ expression}) assert_parses( s(:send, s(:lvar, :foo), :A=, s(:int, 1)), %q{foo.A = 1}, %q{ ~ selector | ^ dot | ^ operator |~~~~~~~~~ expression}) assert_parses( s(:casgn, s(:lvar, :foo), :A, s(:int, 1)), %q{foo::A = 1}, %q{ ~ name | ^^ double_colon | ^ operator |~~~~~~~~~~ expression}) end def test_send_index assert_parses( s(:index, s(:lvar, :foo), s(:int, 1), s(:int, 2)), %q{foo[1, 2]}, %q{ ^ begin | ^ end |~~~~~~~~~ expression}) end def test_send_index_legacy Parser::Builders::Default.emit_index = false assert_parses( s(:send, s(:lvar, :foo), :[], s(:int, 1), s(:int, 2)), %q{foo[1, 2]}, %q{ ~~~~~~ selector |~~~~~~~~~ expression}) ensure Parser::Builders::Default.emit_index = true end def test_send_index_cmd assert_parses( s(:index, s(:lvar, :foo), s(:send, nil, :m, s(:lvar, :bar))), %q{foo[m bar]}) end def test_send_index_asgn assert_parses( s(:indexasgn, s(:lvar, :foo), s(:int, 1), s(:int, 2), s(:int, 3)), %q{foo[1, 2] = 3}, %q{ ^ begin | ^ end | ^ operator |~~~~~~~~~~~~~ expression}) end def test_send_index_asgn_legacy Parser::Builders::Default.emit_index = false assert_parses( s(:send, s(:lvar, :foo), :[]=, s(:int, 1), s(:int, 2), s(:int, 3)), %q{foo[1, 2] = 3}, %q{ ~~~~~~ selector | ^ operator |~~~~~~~~~~~~~ expression}) ensure Parser::Builders::Default.emit_index = true end def test_send_lambda assert_parses( s(:block, s(:lambda), s(:args), nil), %q{->{ }}, %q{~~ expression (lambda) | ^ begin | ^ end |~~~~~ expression}, SINCE_1_9) assert_parses( s(:block, s(:lambda), s(:args, s(:restarg)), nil), %q{-> * { }}, %q{~~ expression (lambda) | ^ begin | ^ end | ^ expression (args.restarg) |~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:block, s(:lambda), s(:args), nil), %q{-> do end}, %q{~~ expression (lambda) | ^^ begin | ^^^ end |~~~~~~~~~ expression}, SINCE_1_9) end def test_send_lambda_args assert_parses( s(:block, s(:lambda), s(:args, s(:arg, :a)), nil), %q{->(a) { }}, %q{~~ expression (lambda) | ^ begin (args) | ^ end (args) | ^ begin | ^ end |~~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:block, s(:lambda), s(:args, s(:arg, :a)), nil), %q{-> (a) { }}, %q{}, SINCE_2_0) end def test_send_lambda_args_shadow assert_parses( s(:block, s(:lambda), s(:args, s(:arg, :a), s(:shadowarg, :foo), s(:shadowarg, :bar)), nil), %q{->(a; foo, bar) { }}, %q{ ~~~ expression (args.shadowarg)}, SINCE_1_9) end def test_send_lambda_args_noparen assert_parses( s(:block, s(:lambda), s(:args, s(:kwoptarg, :a, s(:int, 1))), nil), %q{-> a: 1 { }}, %q{}, SINCE_2_0) assert_parses( s(:block, s(:lambda), s(:args, s(:kwarg, :a)), nil), %q{-> a: { }}, %q{}, SINCE_2_1) end def test_send_lambda_legacy Parser::Builders::Default.emit_lambda = false assert_parses( s(:block, s(:send, nil, :lambda), s(:args), nil), %q{->{ }}, %q{~~ selector (send) | ^ begin | ^ end |~~~~~ expression}, SINCE_1_9) ensure Parser::Builders::Default.emit_lambda = true end def test_send_call assert_parses( s(:send, s(:lvar, :foo), :call, s(:int, 1)), %q{foo.(1)}, %q{ ^ begin | ^ end | ^ dot |~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:send, s(:lvar, :foo), :call, s(:int, 1)), %q{foo::(1)}, %q{ ^ begin | ^ end | ^^ dot |~~~~~~~~ expression}, SINCE_1_9) end def test_send_conditional assert_parses( s(:csend, s(:send, nil, :a), :b), %q{a&.b}, %q{ ^^ dot}, SINCE_2_3 + %w{ios}) end def test_send_attr_asgn_conditional assert_parses( s(:csend, s(:send, nil, :a), :b=, s(:int, 1)), %q{a&.b = 1}, %q{ ^^ dot}, SINCE_2_3 + %w{ios}) end def test_send_block_conditional assert_parses( s(:block, s(:csend, s(:lvar, :foo), :bar), s(:args), nil), %q{foo&.bar {}}, %q{}, SINCE_2_3 + %w{ios}) end def test_send_op_asgn_conditional assert_parses( s(:and_asgn, s(:csend, s(:send, nil, :a), :b), s(:int, 1)), %q{a&.b &&= 1}, %q{}, SINCE_2_3 + %w{ios}) end def test_lvar_injecting_match assert_parses( s(:begin, s(:match_with_lvasgn, s(:regexp, s(:str, '(?bar)'), s(:regopt)), s(:str, 'bar')), s(:lvar, :match)), %q{/(?bar)/ =~ 'bar'; match}, %q{ ~~ selector (match_with_lvasgn) |~~~~~~~~~~~~~~~~~~~~~~~~ expression (match_with_lvasgn)}, SINCE_1_9) assert_parses( s(:begin, s(:match_with_lvasgn, s(:regexp, s(:str, "(?a)"), s(:regopt)), s(:str, "a")), s(:send, s(:regexp, s(:begin), s(:str, "(?b)"), s(:regopt)), :=~, s(:str, "b")), s(:lvar, :a), s(:send, nil, :b)), %q{/(?a)/ =~ 'a'; /#{}(?b)/ =~ 'b'; a; b}, %q{}, SINCE_3_3) end def test_non_lvar_injecting_match assert_parses( s(:send, s(:regexp, s(:begin, s(:int, 1)), s(:str, '(?bar)'), s(:regopt)), :=~, s(:str, 'bar')), %q{/#{1}(?bar)/ =~ 'bar'}) end # To superclass def test_super assert_parses( s(:super, s(:lvar, :foo)), %q{super(foo)}, %q{~~~~~ keyword | ^ begin | ^ end |~~~~~~~~~~ expression}) assert_parses( s(:super, s(:lvar, :foo)), %q{super foo}, %q{~~~~~ keyword |~~~~~~~~~ expression}) assert_parses( s(:super), %q{super()}, %q{~~~~~ keyword | ^ begin | ^ end |~~~~~~~ expression}) end def test_zsuper assert_parses( s(:zsuper), %q{super}, %q{~~~~~ keyword |~~~~~ expression}) end def test_super_block assert_parses( s(:block, s(:super, s(:lvar, :foo), s(:lvar, :bar)), s(:args), nil), %q{super foo, bar do end}) assert_parses( s(:block, s(:zsuper), s(:args), nil), %q{super do end}) end # To block argument def test_yield assert_parses( s(:yield, s(:lvar, :foo)), %q{yield(foo)}, %q{~~~~~ keyword | ^ begin | ^ end |~~~~~~~~~~ expression}) assert_parses( s(:yield, s(:lvar, :foo)), %q{yield foo}, %q{~~~~~ keyword |~~~~~~~~~ expression}) assert_parses( s(:yield), %q{yield()}, %q{~~~~~ keyword | ^ begin | ^ end |~~~~~~~ expression}) assert_parses( s(:yield), %q{yield}, %q{~~~~~ keyword |~~~~~ expression}) end def test_yield_block assert_diagnoses( [:error, :block_given_to_yield], %q{yield foo do end}, %q{~~~~~ location | ~~ highlights (0)}) assert_diagnoses( [:error, :block_given_to_yield], %q{yield(&foo)}, %q{~~~~~ location | ~~~~ highlights (0)}) end # Call arguments def test_args_cmd assert_parses( s(:send, nil, :fun, s(:send, nil, :f, s(:lvar, :bar))), %q{fun(f bar)}) end def test_args_args_star assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:splat, s(:lvar, :bar))), %q{fun(foo, *bar)}) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun(foo, *bar, &baz)}) end def test_args_star assert_parses( s(:send, nil, :fun, s(:splat, s(:lvar, :bar))), %q{fun(*bar)}) assert_parses( s(:send, nil, :fun, s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun(*bar, &baz)}) end def test_args_block_pass assert_parses( s(:send, nil, :fun, s(:block_pass, s(:lvar, :bar))), %q{fun(&bar)}) end def test_args_args_comma assert_parses( s(:index, s(:lvar, :foo), s(:lvar, :bar)), %q{foo[bar,]}, %q{}, SINCE_1_9) end def test_args_assocs_legacy Parser::Builders::Default.emit_kwargs = false assert_parses( s(:send, nil, :fun, s(:hash, s(:pair, s(:sym, :foo), s(:int, 1)))), %q{fun(:foo => 1)}) assert_parses( s(:send, nil, :fun, s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:block_pass, s(:lvar, :baz))), %q{fun(:foo => 1, &baz)}) assert_parses( s(:index, s(:self), s(:hash, s(:pair, s(:sym, :bar), s(:int, 1)))), %q{self[:bar => 1]}) assert_parses( s(:send, s(:self), :[]=, s(:lvar, :foo), s(:hash, s(:pair, s(:sym, :a), s(:int, 1)))), %q{self.[]= foo, :a => 1}) assert_parses( s(:yield, s(:hash, s(:pair, s(:sym, :foo), s(:int, 42)))), %q{yield(:foo => 42)}) assert_parses( s(:super, s(:hash, s(:pair, s(:sym, :foo), s(:int, 42)))), %q{super(:foo => 42)}) ensure Parser::Builders::Default.emit_kwargs = true end def test_args_assocs assert_parses( s(:send, nil, :fun, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1)))), %q{fun(:foo => 1)}) assert_parses( s(:send, nil, :fun, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1))), s(:block_pass, s(:lvar, :baz))), %q{fun(:foo => 1, &baz)}) assert_parses( s(:index, s(:self), s(:kwargs, s(:pair, s(:sym, :bar), s(:int, 1)))), %q{self[:bar => 1]}) assert_parses( s(:send, s(:self), :[]=, s(:lvar, :foo), s(:kwargs, s(:pair, s(:sym, :a), s(:int, 1)))), %q{self.[]= foo, :a => 1}) assert_parses( s(:yield, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 42)))), %q{yield(:foo => 42)}) assert_parses( s(:super, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 42)))), %q{super(:foo => 42)}) end def test_args_assocs_star assert_parses( s(:send, nil, :fun, s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar))), %q{fun(:foo => 1, *bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun(:foo => 1, *bar, &baz)}, %q{}, %w(1.8)) end def test_args_assocs_comma assert_parses( s(:index, s(:lvar, :foo), s(:kwargs, s(:pair, s(:sym, :baz), s(:int, 1)))), %q{foo[:baz => 1,]}, %q{}, SINCE_1_9) end def test_args_args_assocs assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1)))), %q{fun(foo, :foo => 1)}) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1))), s(:block_pass, s(:lvar, :baz))), %q{fun(foo, :foo => 1, &baz)}) end def test_args_args_assocs_comma assert_parses( s(:index, s(:lvar, :foo), s(:lvar, :bar), s(:kwargs, s(:pair, s(:sym, :baz), s(:int, 1)))), %q{foo[bar, :baz => 1,]}, %q{}, SINCE_1_9) end def test_args_args_assocs_star assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar))), %q{fun(foo, :foo => 1, *bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun(foo, :foo => 1, *bar, &baz)}, %q{}, %w(1.8)) end # Call arguments with whitespace def test_space_args_cmd assert_parses( s(:send, nil, :fun, s(:begin, s(:send, nil, :f, s(:lvar, :bar)))), %q{fun (f bar)}) end def test_space_args_arg assert_parses( s(:send, nil, :fun, s(:begin, s(:int, 1))), %q{fun (1)}) end def test_space_args_arg_newline assert_parses( s(:send, nil, :fun, s(:begin, s(:int, 1))), %Q{fun (1\n)}, %q{}, ALL_VERSIONS - %w(mac)) end def test_space_args_arg_block assert_parses( s(:block, s(:send, nil, :fun, s(:begin, s(:int, 1))), s(:args), nil), %q{fun (1) {}}) assert_parses( s(:block, s(:send, s(:lvar, :foo), :fun, s(:int, 1)), s(:args), nil), %q{foo.fun (1) {}}, %q{}, %w(1.8)) assert_parses( s(:block, s(:send, s(:lvar, :foo), :fun, s(:begin, s(:int, 1))), s(:args), nil), %q{foo.fun (1) {}}, %q{}, SINCE_1_9) assert_parses( s(:block, s(:send, s(:lvar, :foo), :fun, s(:int, 1)), s(:args), nil), %q{foo::fun (1) {}}, %q{}, %w(1.8)) assert_parses( s(:block, s(:send, s(:lvar, :foo), :fun, s(:begin, s(:int, 1))), s(:args), nil), %q{foo::fun (1) {}}, %q{}, SINCE_1_9) end def test_space_args_hash_literal_then_block # This code only parses if the lexer enters expr_endarg state correctly assert_parses( s(:block, s(:send, nil, :f, s(:int, 1), s(:hash, s(:pair, s(:int, 1), s(:int, 2)))), s(:args), s(:int, 1)), %q{f 1, {1 => 2} {1}}, %q{}, ALL_VERSIONS - SINCE_2_5) end def test_space_args_arg_call assert_parses( s(:send, nil, :fun, s(:send, s(:begin, s(:int, 1)), :to_i)), %q{fun (1).to_i}) end def test_space_args_block_pass assert_parses( s(:send, nil, :fun, s(:block_pass, s(:lvar, :foo))), %q{fun (&foo)}, %q{}, %w(1.8)) end def test_space_args_arg_block_pass assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:block_pass, s(:lvar, :bar))), %q{fun (foo, &bar)}, %q{}, %w(1.8)) end def test_space_args_args_star assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:splat, s(:lvar, :bar))), %q{fun (foo, *bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun (foo, *bar, &baz)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:int, 1), s(:splat, s(:lvar, :bar))), %q{fun (foo, 1, *bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:int, 1), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun (foo, 1, *bar, &baz)}, %q{}, %w(1.8)) end def test_space_args_star assert_parses( s(:send, nil, :fun, s(:splat, s(:lvar, :bar))), %q{fun (*bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun (*bar, &baz)}, %q{}, %w(1.8)) end def test_space_args_assocs assert_parses( s(:send, nil, :fun, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1)))), %q{fun (:foo => 1)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1))), s(:block_pass, s(:lvar, :baz))), %q{fun (:foo => 1, &baz)}, %q{}, %w(1.8)) end def test_space_args_assocs_star assert_parses( s(:send, nil, :fun, s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar))), %q{fun (:foo => 1, *bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun (:foo => 1, *bar, &baz)}, %q{}, %w(1.8)) end def test_space_args_args_assocs assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1)))), %q{fun (foo, :foo => 1)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1))), s(:block_pass, s(:lvar, :baz))), %q{fun (foo, :foo => 1, &baz)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:int, 1), s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1)))), %q{fun (foo, 1, :foo => 1)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:int, 1), s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1))), s(:block_pass, s(:lvar, :baz))), %q{fun (foo, 1, :foo => 1, &baz)}, %q{}, %w(1.8)) end def test_space_args_args_assocs_star assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar))), %q{fun (foo, :foo => 1, *bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun (foo, :foo => 1, *bar, &baz)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:int, 1), s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar))), %q{fun (foo, 1, :foo => 1, *bar)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :fun, s(:lvar, :foo), s(:int, 1), s(:hash, s(:pair, s(:sym, :foo), s(:int, 1))), s(:splat, s(:lvar, :bar)), s(:block_pass, s(:lvar, :baz))), %q{fun (foo, 1, :foo => 1, *bar, &baz)}, %q{}, %w(1.8)) end def test_space_args_arg_arg assert_parses( s(:send, nil, :fun, s(:int, 1), s(:int, 2)), %q{fun (1, 2)}, %q{}, %w(1.8)) end def test_space_args_none assert_parses( s(:send, nil, :fun), %q{fun ()}, %q{}, %w(1.8)) end def test_space_args_block assert_parses( s(:block, s(:send, nil, :fun), s(:args), nil), %q{fun () {}}, %q{ ^ begin (send) | ^ end (send)}, %w(1.8)) assert_parses( s(:block, s(:send, s(:lvar, :foo), :fun), s(:args), nil), %q{foo.fun () {}}, %q{ ^ begin (send) | ^ end (send)}, %w(1.8)) assert_parses( s(:block, s(:send, s(:lvar, :foo), :fun), s(:args), nil), %q{foo::fun () {}}, %q{ ^ begin (send) | ^ end (send)}, %w(1.8)) assert_parses( s(:block, s(:send, nil, :fun, s(:begin)), s(:args), nil), %q{fun () {}}, %q{ ~~ expression (send.begin)}, SINCE_2_0) end # # Control flow # # Operators def test_and assert_parses( s(:and, s(:lvar, :foo), s(:lvar, :bar)), %q{foo and bar}, %q{ ~~~ operator |~~~~~~~~~~~ expression}) assert_parses( s(:and, s(:lvar, :foo), s(:lvar, :bar)), %q{foo && bar}, %q{ ~~ operator |~~~~~~~~~~ expression}) end def test_or assert_parses( s(:or, s(:lvar, :foo), s(:lvar, :bar)), %q{foo or bar}, %q{ ~~ operator |~~~~~~~~~~ expression}) assert_parses( s(:or, s(:lvar, :foo), s(:lvar, :bar)), %q{foo || bar}, %q{ ~~ operator |~~~~~~~~~~ expression}) end def test_and_or_masgn assert_parses( s(:and, s(:lvar, :foo), s(:begin, s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:lvar, :bar)))), %q{foo && (a, b = bar)}) assert_parses( s(:or, s(:lvar, :foo), s(:begin, s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:lvar, :bar)))), %q{foo || (a, b = bar)}) end # Branching def test_if assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :bar), nil), %q{if foo then bar; end}, %q{~~ keyword | ~~~~ begin | ~~~ end |~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :bar), nil), %q{if foo; bar; end}, %q{~~ keyword | ~~~ end |~~~~~~~~~~~~~~~~ expression}) end def test_if_nl_then assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :bar), nil), %Q{if foo\nthen bar end}, %q{ ~~~~ begin}) end def test_if_mod assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :bar), nil), %q{bar if foo}, %q{ ~~ keyword |~~~~~~~~~~ expression}) end def test_unless assert_parses( s(:if, s(:lvar, :foo), nil, s(:lvar, :bar)), %q{unless foo then bar; end}, %q{~~~~~~ keyword | ~~~~ begin | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:if, s(:lvar, :foo), nil, s(:lvar, :bar)), %q{unless foo; bar; end}, %q{~~~~~~ keyword | ~~~ end |~~~~~~~~~~~~~~~~~~~~ expression}) end def test_unless_mod assert_parses( s(:if, s(:lvar, :foo), nil, s(:lvar, :bar)), %q{bar unless foo}, %q{ ~~~~~~ keyword |~~~~~~~~~~~~~~ expression}) end def test_if_else assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :bar), s(:lvar, :baz)), %q{if foo then bar; else baz; end}, %q{~~ keyword | ~~~~ begin | ~~~~ else | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :bar), s(:lvar, :baz)), %q{if foo; bar; else baz; end}, %q{~~ keyword | ~~~~ else | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_unless_else assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :baz), s(:lvar, :bar)), %q{unless foo then bar; else baz; end}, %q{~~~~~~ keyword | ~~~~ begin | ~~~~ else | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :baz), s(:lvar, :bar)), %q{unless foo; bar; else baz; end}, %q{~~~~~~ keyword | ~~~~ else | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_if_elsif assert_parses( s(:if, s(:lvar, :foo), s(:lvar, :bar), s(:if, s(:lvar, :baz), s(:int, 1), s(:int, 2))), %q{if foo; bar; elsif baz; 1; else 2; end}, %q{~~ keyword | ~~~~~ else | ~~~~~ keyword (if) | ~~~~ else (if) | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_ternary assert_parses( s(:if, s(:lvar, :foo), s(:int, 1), s(:int, 2)), %q{foo ? 1 : 2}, %q{ ^ question | ^ colon |~~~~~~~~~~~ expression}) end def test_ternary_ambiguous_symbol assert_parses( s(:begin, s(:lvasgn, :t, s(:int, 1)), s(:if, s(:begin, s(:lvar, :foo)), s(:lvar, :t), s(:const, nil, :T))), %q{t=1;(foo)?t:T}, %q{}, SINCE_1_9) end def test_if_masgn assert_diagnoses( [:error, :masgn_as_condition], %q{if (a, b = foo); end}, %q{ ~~~~~~~~~~ location}, %w(1.8 1.9 2.0 2.1 2.2 2.3 ios mac)) end def test_if_masgn__24 assert_parses( s(:if, s(:begin, s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:lvar, :foo))), nil, nil), %q{if (a, b = foo); end}, %q{}, SINCE_2_4) end def test_if_mod_masgn assert_diagnoses( [:error, :masgn_as_condition], %q{1 if (a, b = foo)}, %q{ ~~~~~~~~~~ location}, %w(1.8 1.9 2.0 2.1 2.2 2.3 ios mac)) end def test_tern_masgn assert_diagnoses( [:error, :masgn_as_condition], %q{(a, b = foo) ? 1 : 2}, %q{ ~~~~~~~~~~ location}, %w(1.8 1.9 2.0 2.1 2.2 2.3 ios mac)) end def test_not_masgn assert_diagnoses( [:error, :masgn_as_condition], %q{!(a, b = foo)}, %q{ ~~~~~~~~~~ location}, %w(1.8 1.9 2.0 2.1 2.2 2.3 ios mac)) end def test_not_masgn__24 assert_parses( s(:send, s(:begin, s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:lvar, :foo))), :'!'), %q{!(a, b = foo)}, %q{}, SINCE_2_4) end def test_cond_begin assert_parses( s(:if, s(:begin, s(:lvar, :bar)), s(:lvar, :foo), nil), %q{if (bar); foo; end}) end def test_cond_begin_masgn assert_parses( s(:if, s(:begin, s(:lvar, :bar), s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:lvar, :foo))), nil, nil), %q{if (bar; a, b = foo); end}) end def test_cond_begin_and_or_masgn assert_diagnoses( [:error, :masgn_as_condition], %q{if foo && (a, b = bar); end}, %q{ ~~~~~~~~~~ location}, %w(1.9 2.0 2.1 2.2 2.3 ios mac)) assert_diagnoses( [:error, :masgn_as_condition], %q{if foo || (a, b = bar); end}, %q{ ~~~~~~~~~~ location}, %w(1.9 2.0 2.1 2.2 2.3 ios mac)) assert_parses( s(:if, s(:and, s(:begin, s(:masgn, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:lvar, :foo))), s(:lvar, :bar)), nil, nil), %q{if (a, b = foo) && bar; end}, %q{}, %w(1.8)) end def test_cond_iflipflop assert_parses( s(:if, s(:iflipflop, s(:lvar, :foo), s(:lvar, :bar)), nil, nil), %q{if foo..bar; end}, %q{ ~~~~~~~~ expression (iflipflop) | ~~ operator (iflipflop)}) assert_parses( s(:if, s(:iflipflop, s(:lvar, :foo), s(:nil)), nil, nil), %q{if foo..nil; end}, %q{ ~~~~~~~~ expression (iflipflop) | ~~ operator (iflipflop)}, %w(1.8)) assert_parses( s(:if, s(:iflipflop, s(:nil), s(:lvar, :bar)), nil, nil), %q{if nil..bar; end}, %q{ ~~~~~~~~ expression (iflipflop) | ~~ operator (iflipflop)}, %w(1.8)) assert_parses( s(:not, s(:begin, s(:iflipflop, s(:lvar, :foo), s(:lvar, :bar)))), %q{!(foo..bar)}, %q{ ~~~~~~~~ expression (begin.iflipflop) | ~~ operator (begin.iflipflop)}, %w(1.8)) assert_parses( s(:send, s(:begin, s(:iflipflop, s(:lvar, :foo), s(:lvar, :bar))), :'!'), %q{!(foo..bar)}, %q{ ~~~~~~~~ expression (begin.iflipflop) | ~~ operator (begin.iflipflop)}, SINCE_1_9) end def test_cond_iflipflop_with_endless_range assert_parses( s(:if, s(:iflipflop, s(:lvar, :foo), nil), nil, nil), %q{if foo..; end}, %q{ ~~~~~ expression (iflipflop) | ~~ operator (iflipflop)}, SINCE_2_6) end def test_cond_iflipflop_with_beginless_range assert_parses( s(:if, s(:iflipflop, nil, s(:lvar, :bar)), nil, nil), %q{if ..bar; end}, %q{ ~~~~~ expression (iflipflop) | ~~ operator (iflipflop)}, SINCE_2_7) end def test_cond_eflipflop assert_parses( s(:if, s(:eflipflop, s(:lvar, :foo), s(:lvar, :bar)), nil, nil), %q{if foo...bar; end}, %q{ ~~~~~~~~~ expression (eflipflop) | ~~~ operator (eflipflop)}) assert_parses( s(:if, s(:eflipflop, s(:lvar, :foo), s(:nil)), nil, nil), %q{if foo...nil; end}, %q{ ~~~~~~~~~ expression (eflipflop) | ~~~ operator (eflipflop)}, %w(1.8)) assert_parses( s(:if, s(:eflipflop, s(:nil), s(:lvar, :bar)), nil, nil), %q{if nil...bar; end}, %q{ ~~~~~~~~~ expression (eflipflop) | ~~~ operator (eflipflop)}, %w(1.8)) assert_parses( s(:not, s(:begin, s(:eflipflop, s(:lvar, :foo), s(:lvar, :bar)))), %q{!(foo...bar)}, %q{ ~~~~~~~~~ expression (begin.eflipflop) | ~~~ operator (begin.eflipflop)}, %w(1.8)) assert_parses( s(:send, s(:begin, s(:eflipflop, s(:lvar, :foo), s(:lvar, :bar))), :'!'), %q{!(foo...bar)}, %q{ ~~~~~~~~~ expression (begin.eflipflop) | ~~~ operator (begin.eflipflop)}, SINCE_1_9) end def test_cond_eflipflop_with_endless_range assert_parses( s(:if, s(:eflipflop, s(:lvar, :foo), nil), nil, nil), %q{if foo...; end}, %q{ ~~~~~~ expression (eflipflop) | ~~~ operator (eflipflop)}, SINCE_2_6) end def test_cond_eflipflop_with_beginless_range assert_parses( s(:if, s(:eflipflop, nil, s(:lvar, :bar)), nil, nil), %q{if ...bar; end}, %q{ ~~~~~~ expression (eflipflop) | ~~~ operator (eflipflop)}, SINCE_2_7) end def test_cond_match_current_line assert_parses( s(:if, s(:match_current_line, s(:regexp, s(:str, 'wat'), s(:regopt))), nil, nil), %q{if /wat/; end}, %q{ ~~~~~ expression (match_current_line)}) assert_parses( s(:not, s(:match_current_line, s(:regexp, s(:str, 'wat'), s(:regopt)))), %q{!/wat/}, %q{ ~~~~~ expression (match_current_line)}, %w(1.8)) assert_parses( s(:send, s(:match_current_line, s(:regexp, s(:str, 'wat'), s(:regopt))), :'!'), %q{!/wat/}, %q{ ~~~~~ expression (match_current_line)}, SINCE_1_9) end # Case matching def test_case_expr assert_parses( s(:case, s(:lvar, :foo), s(:when, s(:str, 'bar'), s(:lvar, :bar)), nil), %q{case foo; when 'bar'; bar; end}, %q{~~~~ keyword | ~~~~ keyword (when) | ~~~ end | ~~~~~~~~~~~~~~~ expression (when) |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_case_expr_else assert_parses( s(:case, s(:lvar, :foo), s(:when, s(:str, 'bar'), s(:lvar, :bar)), s(:lvar, :baz)), %q{case foo; when 'bar'; bar; else baz; end}, %q{~~~~ keyword | ~~~~ keyword (when) | ~~~~ else | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_case_cond assert_parses( s(:case, nil, s(:when, s(:lvar, :foo), s(:str, 'foo')), nil), %q{case; when foo; 'foo'; end}, %q{~~~~ keyword | ~~~~ keyword (when) | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_case_cond_else assert_parses( s(:case, nil, s(:when, s(:lvar, :foo), s(:str, 'foo')), s(:str, 'bar')), %q{case; when foo; 'foo'; else 'bar'; end}, %q{~~~~ keyword | ~~~~ keyword (when) | ~~~~ else | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_case_cond_just_else assert_parses( s(:case, nil, s(:str, 'bar')), %q{case; else 'bar'; end}, %q{~~~~ keyword | ~~~~ else | ~~~ end |~~~~~~~~~~~~~~~~~~~~~ expression}, %w(1.8)) end def test_when_then assert_parses( s(:case, s(:lvar, :foo), s(:when, s(:str, 'bar'), s(:lvar, :bar)), nil), %q{case foo; when 'bar' then bar; end}, %q{ ~~~~ keyword (when) | ~~~~ begin (when) | ~~~~~~~~~~~~~~~~~~~ expression (when)}) end def test_when_multi assert_parses( s(:case, s(:lvar, :foo), s(:when, s(:str, 'bar'), s(:str, 'baz'), s(:lvar, :bar)), nil), %q{case foo; when 'bar', 'baz'; bar; end}) end def test_when_splat assert_parses( s(:case, s(:lvar, :foo), s(:when, s(:int, 1), s(:splat, s(:lvar, :baz)), s(:lvar, :bar)), s(:when, s(:splat, s(:lvar, :foo)), nil), nil), %q{case foo; when 1, *baz; bar; when *foo; end}, %q{ ^ operator (when/1.splat) | ~~~~ expression (when/1.splat) | ^ operator (when/2.splat) | ~~~~ expression (when/2.splat)}) end # Looping def test_while assert_parses( s(:while, s(:lvar, :foo), s(:send, nil, :meth)), %q{while foo do meth end}, %q{~~~~~ keyword | ~~ begin | ~~~ end |~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:while, s(:lvar, :foo), s(:send, nil, :meth)), %q{while foo; meth end}, %q{~~~~~ keyword | ~~~ end |~~~~~~~~~~~~~~~~~~~ expression}) end def test_while_mod assert_parses( s(:while, s(:lvar, :foo), s(:send, nil, :meth)), %q{meth while foo}, %q{ ~~~~~ keyword}) end def test_until assert_parses( s(:until, s(:lvar, :foo), s(:send, nil, :meth)), %q{until foo do meth end}, %q{~~~~~ keyword | ~~ begin | ~~~ end}) assert_parses( s(:until, s(:lvar, :foo), s(:send, nil, :meth)), %q{until foo; meth end}, %q{~~~~~ keyword | ~~~ end}) end def test_until_mod assert_parses( s(:until, s(:lvar, :foo), s(:send, nil, :meth)), %q{meth until foo}, %q{ ~~~~~ keyword}) end def test_while_post assert_parses( s(:while_post, s(:lvar, :foo), s(:kwbegin, s(:send, nil, :meth))), %q{begin meth end while foo}, %q{ ~~~~~ keyword}) end def test_until_post assert_parses( s(:until_post, s(:lvar, :foo), s(:kwbegin, s(:send, nil, :meth))), %q{begin meth end until foo}, %q{ ~~~~~ keyword}) end def test_while_masgn assert_diagnoses( [:error, :masgn_as_condition], %q{while (a, b = foo); end}, %q{ ~~~~~~~~~~ location}, %w(1.8 1.9 2.0 2.1 2.2 2.3 ios mac)) end def test_while_mod_masgn assert_diagnoses( [:error, :masgn_as_condition], %q{foo while (a, b = foo)}, %q{ ~~~~~~~~~~ location}, %w(1.8 1.9 2.0 2.1 2.2 2.3 ios mac)) end def test_for assert_parses( s(:for, s(:lvasgn, :a), s(:lvar, :foo), s(:send, nil, :p, s(:lvar, :a))), %q{for a in foo do p a; end}, %q{~~~ keyword | ~~ in | ~~ begin | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~ expression}) assert_parses( s(:for, s(:lvasgn, :a), s(:lvar, :foo), s(:send, nil, :p, s(:lvar, :a))), %q{for a in foo; p a; end}) end def test_for_mlhs assert_parses( s(:for, s(:mlhs, s(:lvasgn, :a), s(:lvasgn, :b)), s(:lvar, :foo), s(:send, nil, :p, s(:lvar, :a), s(:lvar, :b))), %q{for a, b in foo; p a, b; end}, %q{ ~~~~ expression (mlhs)}) end # Control flow commands def test_break assert_parses( s(:break, s(:begin, s(:lvar, :foo))), %q{break(foo)}, %q{~~~~~ keyword |~~~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:break, s(:begin, s(:lvar, :foo))), %q{break(foo)}, %q{~~~~~ keyword |~~~~~~~~~~ expression}, %w(1.8)) assert_parses( s(:break, s(:lvar, :foo)), %q{break foo}, %q{~~~~~ keyword |~~~~~~~~~ expression}) assert_parses( s(:break, s(:begin)), %q{break()}, %q{~~~~~ keyword |~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:break), %q{break}, %q{~~~~~ keyword |~~~~~ expression}) end def test_break_block assert_parses( s(:break, s(:block, s(:send, nil, :fun, s(:lvar, :foo)), s(:args), nil)), %q{break fun foo do end}, %q{ ~~~~~~~~~~~~~~ expression (block) |~~~~~~~~~~~~~~~~~~~~ expression}, ALL_VERSIONS - %w(1.8 ios)) end def test_return assert_parses( s(:return, s(:begin, s(:lvar, :foo))), %q{return(foo)}, %q{~~~~~~ keyword |~~~~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:return, s(:begin, s(:lvar, :foo))), %q{return(foo)}, %q{~~~~~~ keyword |~~~~~~~~~~~ expression}, %w(1.8)) assert_parses( s(:return, s(:lvar, :foo)), %q{return foo}, %q{~~~~~~ keyword |~~~~~~~~~~ expression}) assert_parses( s(:return, s(:begin)), %q{return()}, %q{~~~~~~ keyword |~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:return), %q{return}, %q{~~~~~~ keyword |~~~~~~ expression}) end def test_return_block assert_parses( s(:return, s(:block, s(:send, nil, :fun, s(:lvar, :foo)), s(:args), nil)), %q{return fun foo do end}, %q{ ~~~~~~~~~~~~~~ expression (block) |~~~~~~~~~~~~~~~~~~~~~ expression}, ALL_VERSIONS - %w(1.8 ios)) end def test_next assert_parses( s(:next, s(:begin, s(:lvar, :foo))), %q{next(foo)}, %q{~~~~ keyword |~~~~~~~~~ expression}, SINCE_1_9) assert_parses( s(:next, s(:begin, s(:lvar, :foo))), %q{next(foo)}, %q{~~~~ keyword |~~~~~~~~~ expression}, %w(1.8)) assert_parses( s(:next, s(:lvar, :foo)), %q{next foo}, %q{~~~~ keyword |~~~~~~~~ expression}) assert_parses( s(:next, s(:begin)), %q{next()}, %q{~~~~ keyword |~~~~~~ expression}, SINCE_1_9) assert_parses( s(:next), %q{next}, %q{~~~~ keyword |~~~~ expression}) end def test_next_block assert_parses( s(:next, s(:block, s(:send, nil, :fun, s(:lvar, :foo)), s(:args), nil)), %q{next fun foo do end}, %q{ ~~~~~~~~~~~~~~ expression (block) |~~~~~~~~~~~~~~~~~~~ expression}, ALL_VERSIONS - %w(1.8 ios)) end def test_redo assert_parses( s(:redo), %q{redo}, %q{~~~~ keyword |~~~~ expression}) end # Exception handling def test_rescue assert_parses( s(:kwbegin, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:lvar, :foo)), nil)), %q{begin; meth; rescue; foo; end}, %q{~~~~~ begin | ~~~~~~ keyword (rescue.resbody) | ~~~~~~~~~~~ expression (rescue.resbody) | ~~~~~~~~~~~~~~~~~ expression (rescue) | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_rescue_else assert_parses( s(:kwbegin, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:lvar, :foo)), s(:lvar, :bar))), %q{begin; meth; rescue; foo; else; bar; end}, %q{ ~~~~ else (rescue) | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (rescue)}) end def test_rescue_else_useless assert_parses( s(:kwbegin, s(:begin, s(:int, 2))), %q{begin; else; 2; end}, %q{ ~~~~ begin (begin)}, ALL_VERSIONS - SINCE_2_6) assert_parses( s(:kwbegin, s(:int, 1), s(:begin, s(:int, 2))), %q{begin; 1; else; 2; end}, %q{ ~~~~ begin (begin)}, ALL_VERSIONS - SINCE_2_6) assert_parses( s(:kwbegin, s(:int, 1), s(:int, 2), s(:begin, s(:int, 3))), %q{begin; 1; 2; else; 3; end}, %q{ ~~~~ begin (begin)}, ALL_VERSIONS - SINCE_2_6) assert_diagnoses( [:warning, :useless_else], %q{begin; 1; else; 2; end}, %q{ ~~~~ location}, ALL_VERSIONS - SINCE_2_6) assert_diagnoses( [:error, :useless_else], %q{begin; 1; else; 2; end}, %q{ ~~~~ location}, SINCE_2_6) assert_diagnoses( [:error, :useless_else], %q{begin; 1; else; end}, %q{ ~~~~ location}, SINCE_2_6) end def test_ensure assert_parses( s(:kwbegin, s(:ensure, s(:send, nil, :meth), s(:lvar, :bar))), %q{begin; meth; ensure; bar; end}, %q{~~~~~ begin | ~~~~~~ keyword (ensure) | ~~~~~~~~~~~~~~~~~ expression (ensure) | ~~~ end |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_ensure_empty assert_parses( s(:kwbegin, s(:ensure, nil, nil)), %q{begin ensure end}, %q{~~~~~ begin | ~~~~~~ keyword (ensure) | ~~~~~~ expression (ensure) | ~~~ end |~~~~~~~~~~~~~~~~ expression}) end def test_rescue_ensure assert_parses( s(:kwbegin, s(:ensure, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:lvar, :baz)), nil), s(:lvar, :bar))), %q{begin; meth; rescue; baz; ensure; bar; end}, %q{ ~~~~~~ keyword (ensure) | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (ensure) | ~~~~~~ keyword (ensure.rescue.resbody) | ~~~~~~~~~~~~~~~~~ expression (ensure.rescue)}) end def test_rescue_else_ensure assert_parses( s(:kwbegin, s(:ensure, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:lvar, :baz)), s(:lvar, :foo)), s(:lvar, :bar))), %q{begin; meth; rescue; baz; else foo; ensure; bar end}, %q{ ~~~~~~ keyword (ensure) | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (ensure) | ~~~~~~ keyword (ensure.rescue.resbody) | ~~~~ else (ensure.rescue) | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (ensure.rescue)}) end def test_rescue_mod assert_parses( s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:lvar, :bar)), nil), %q{meth rescue bar}, %q{ ~~~~~~ keyword (resbody) | ~~~~~~~~~~ expression (resbody) |~~~~~~~~~~~~~~~ expression}) end def test_rescue_mod_asgn assert_parses( s(:lvasgn, :foo, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:lvar, :bar)), nil)), %q{foo = meth rescue bar}, %q{ ~~~~~~ keyword (rescue.resbody) | ~~~~~~~~~~ expression (rescue.resbody) | ~~~~~~~~~~~~~~~ expression (rescue) |~~~~~~~~~~~~~~~~~~~~~ expression}) end def test_rescue_mod_masgn assert_parses( s(:masgn, s(:mlhs, s(:lvasgn, :foo), s(:lvasgn, :bar)), s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:array, s(:int, 1), s(:int, 2))), nil)), %q{foo, bar = meth rescue [1, 2]}, %q{ ~~~~~~ keyword (rescue.resbody) | ~~~~~~~~~~~~~ expression (rescue.resbody) | ~~~~~~~~~~~~~~~~~~ expression (rescue) |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_2_7) end def test_rescue_mod_op_assign assert_parses( s(:op_asgn, s(:lvasgn, :foo), :+, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, nil, s(:lvar, :bar)), nil)), %q{foo += meth rescue bar}, %q{ ~~~~~~ keyword (rescue.resbody) | ~~~~~~~~~~ expression (rescue.resbody) | ~~~~~~~~~~~~~~~ expression (rescue) |~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_1_9) end def test_rescue_without_begin_end assert_parses( s(:block, s(:send, nil, :meth), s(:args), s(:rescue, s(:lvar, :foo), s(:resbody, nil, nil, s(:lvar, :bar)), nil)), %q{meth do; foo; rescue; bar; end}, %q{ ~~~~~~ keyword (rescue.resbody) | ~~~~~~~~~~~ expression (rescue.resbody) | ~~~~~~~~~~~~~~~~ expression (rescue)}, SINCE_2_5) end def test_resbody_list assert_parses( s(:kwbegin, s(:rescue, s(:send, nil, :meth), s(:resbody, s(:array, s(:const, nil, :Exception)), nil, s(:lvar, :bar)), nil)), %q{begin; meth; rescue Exception; bar; end}) end def test_resbody_list_mrhs assert_parses( s(:kwbegin, s(:rescue, s(:send, nil, :meth), s(:resbody, s(:array, s(:const, nil, :Exception), s(:lvar, :foo)), nil, s(:lvar, :bar)), nil)), %q{begin; meth; rescue Exception, foo; bar; end}) end def test_resbody_var assert_parses( s(:kwbegin, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, s(:lvasgn, :ex), s(:lvar, :bar)), nil)), %q{begin; meth; rescue => ex; bar; end}) assert_parses( s(:kwbegin, s(:rescue, s(:send, nil, :meth), s(:resbody, nil, s(:ivasgn, :@ex), s(:lvar, :bar)), nil)), %q{begin; meth; rescue => @ex; bar; end}) end def test_resbody_list_var assert_parses( s(:kwbegin, s(:rescue, s(:send, nil, :meth), s(:resbody, s(:array, s(:lvar, :foo)), s(:lvasgn, :ex), s(:lvar, :bar)), nil)), %q{begin; meth; rescue foo => ex; bar; end}) end def test_retry assert_parses( s(:retry), %q{retry}, %q{~~~~~ keyword |~~~~~ expression}) end # BEGIN and END def test_preexe assert_parses( s(:preexe, s(:int, 1)), %q{BEGIN { 1 }}, %q{~~~~~ keyword | ^ begin | ^ end |~~~~~~~~~~~ expression}) end def test_preexe_invalid assert_diagnoses( [:error, :begin_in_method], %q{def f; BEGIN{}; end}, %q{ ~~~~~ location}, # Yes. *Exclude 1.9*. Sigh. ALL_VERSIONS - %w(1.9 mac ios)) end def test_postexe assert_parses( s(:postexe, s(:int, 1)), %q{END { 1 }}, %q{~~~ keyword | ^ begin | ^ end |~~~~~~~~~ expression}) end # # Miscellanea # def test_kwbegin_compstmt assert_parses( s(:kwbegin, s(:send, nil, :foo!), s(:send, nil, :bar!)), %q{begin foo!; bar! end}) end def test_crlf_line_endings with_versions(ALL_VERSIONS) do |_ver, parser| source_file = Parser::Source::Buffer.new('(comments)', source: "\r\nfoo") range = lambda do |from, to| Parser::Source::Range.new(source_file, from, to) end ast = parser.parse(source_file) assert_equal s(:lvar, :foo), ast assert_equal range.call(1, 4), ast.loc.expression end end def test_begin_cmdarg assert_parses( s(:send, nil, :p, s(:kwbegin, s(:block, s(:send, s(:int, 1), :times), s(:args), s(:int, 1)))), %q{p begin 1.times do 1 end end}, %{}, SINCE_2_0) end def test_bug_cmdarg assert_parses( s(:send, nil, :meth, s(:begin, s(:block, s(:send, nil, :lambda), s(:args), nil))), %q{meth (lambda do end)}, %q{}, %w(1.8)) assert_parses( s(:send, nil, :assert, s(:send, nil, :dogs)), %q{assert dogs}) assert_parses( s(:send, nil, :assert, s(:kwargs, s(:pair, s(:sym, :do), s(:true)))), %q{assert do: true}, %q{}, SINCE_1_9) assert_parses( s(:send, nil, :f, s(:kwargs, s(:pair, s(:sym, :x), s(:block, s(:lambda), s(:args), s(:block, s(:send, nil, :meth), s(:args), nil))))), %q{f x: -> do meth do end end}, %q{}, SINCE_1_9) end def test_file_line_non_literals with_versions(ALL_VERSIONS) do |_ver, parser| parser.builder.emit_file_line_as_literals = false source_file = Parser::Source::Buffer.new('(comments)', source: "[__FILE__, __LINE__]") ast = parser.parse(source_file) assert_equal s(:array, s(:__FILE__), s(:__LINE__)), ast end end def test_bom assert_parses( s(:int, 1), %Q{\xef\xbb\xbf1}.b, %q{}, %w(1.9 2.0 2.1)) end def test_magic_encoding_comment assert_parses( s(:begin, s(:lvasgn, :"проверка", s(:int, 42)), s(:send, nil, :puts, s(:lvar, :"проверка"))), %Q{# coding:koi8-r \xd0\xd2\xcf\xd7\xc5\xd2\xcb\xc1 = 42 puts \xd0\xd2\xcf\xd7\xc5\xd2\xcb\xc1}.b, %q{}, %w(1.9 2.0 2.1)) end def test_regexp_encoding assert_parses( s(:match_with_lvasgn, s(:regexp, s(:str, "\\xa8"), s(:regopt, :n)), s(:str, "")), %q{/\xa8/n =~ ""}.dup.force_encoding(Encoding::UTF_8), %{}, SINCE_3_1 - SINCE_1_9) end # # Error recovery # def test_unknown_percent_str assert_diagnoses( [:error, :unexpected_percent_str, { :type => '%k' }], %q{%k[foo]}, %q{~~ location}) end def test_unterminated_embedded_doc assert_diagnoses( [:fatal, :embedded_document], %Q{=begin\nfoo\nend}, %q{~~~~~~ location}) assert_diagnoses( [:fatal, :embedded_document], %Q{=begin\nfoo\nend\n}, %q{~~~~~~ location}) end def test_codepoint_too_large assert_diagnoses( [:error, :unicode_point_too_large], %q{"\u{120 120000}"}, %q{ ~~~~~~ location}, SINCE_1_9) end def test_on_error assert_diagnoses( [:error, :unexpected_token, { :token => 'tIDENTIFIER' }], %q{def foo(bar baz); end}, %q{ ~~~ location}) end # # Token and comment extraction # def assert_parses_with_comments(ast_pattern, source, comments_pattern) with_versions(ALL_VERSIONS) do |_ver, parser| source_file = Parser::Source::Buffer.new('(comments)', source: source) comments_pattern_here = comments_pattern.map do |(from, to)| range = Parser::Source::Range.new(source_file, from, to) Parser::Source::Comment.new(range) end ast, comments = parser.parse_with_comments(source_file) assert_equal ast_pattern, ast assert_equal comments_pattern_here, comments end end def test_comment_interleaved assert_parses_with_comments( s(:send, s(:int, 1), :+, s(:int, 2)), %Q{1 + # foo\n 2}, [ [4, 9] ]) end def test_comment_single assert_parses_with_comments( s(:send, nil, :puts), %Q{puts # whatever}, [ [5, 15] ]) end def test_tokenize with_versions(ALL_VERSIONS) do |_ver, parser| source_file = Parser::Source::Buffer.new('(tokenize)', source: "1 + # foo\n 2") range = lambda do |from, to| Parser::Source::Range.new(source_file, from, to) end ast, comments, tokens = parser.tokenize(source_file) assert_equal s(:send, s(:int, 1), :+, s(:int, 2)), ast assert_equal [ Parser::Source::Comment.new(range.call(4, 9)) ], comments assert_equal [ [:tINTEGER, [ 1, range.call(0, 1) ]], [:tPLUS, [ '+', range.call(2, 3) ]], [:tCOMMENT, [ '# foo', range.call(4, 9) ]], [:tINTEGER, [ 2, range.call(11, 12) ]], ], tokens end end def test_tokenize_recover with_versions(ALL_VERSIONS) do |_ver, parser| source_file = Parser::Source::Buffer.new('(tokenize)', source: "1 + # foo\n ") range = lambda do |from, to| Parser::Source::Range.new(source_file, from, to) end ast, comments, tokens = parser.tokenize(source_file, true) assert_nil ast assert_equal [ Parser::Source::Comment.new(range.call(4, 9)) ], comments assert_equal [ [:tINTEGER, [ 1, range.call(0, 1) ]], [:tPLUS, [ '+', range.call(2, 3) ]], [:tCOMMENT, [ '# foo', range.call(4, 9) ]], ], tokens end end # # Bug-specific tests # def test_bug_cmd_string_lookahead assert_parses( s(:block, s(:send, nil, :desc, s(:str, 'foo')), s(:args), nil), %q{desc "foo" do end}) end def test_bug_do_block_in_call_args # [ruby-core:59342] [Bug #9308] assert_parses( s(:send, nil, :bar, s(:def, :foo, s(:args), s(:block, s(:send, s(:self), :each), s(:args), nil))), %q{bar def foo; self.each do end end}, %q{}, SINCE_1_9) end def test_bug_do_block_in_cmdarg # [ruby-core:61950] [Bug #9726] assert_parses( s(:send, nil, :tap, s(:begin, s(:block, s(:send, nil, :proc), s(:args), nil))), %q{tap (proc do end)}, %q{}, ALL_VERSIONS - %w(1.8 mac ios)) end def test_bug_interp_single assert_parses( s(:dstr, s(:begin, s(:int, 1))), %q{"#{1}"}) assert_parses( s(:array, s(:dstr, s(:begin, s(:int, 1)))), %q{%W"#{1}"}) end def test_bug_def_no_paren_eql_begin assert_parses( s(:def, :foo, s(:args), nil), %Q{def foo\n=begin\n=end\nend}) end def test_bug_while_not_parens_do assert_parses( s(:while, s(:send, s(:begin, s(:true)), :"!"), nil), %q{while not (true) do end}, %q{}, SINCE_1_9) end def test_bug_rescue_empty_else assert_parses( s(:kwbegin, s(:rescue, nil, s(:resbody, s(:array, s(:const, nil, :LoadError)), nil, nil), nil)), %q{begin; rescue LoadError; else; end}, %q{ ~~~~ else (rescue) | ~~~~~~~~~~~~~~~~~~~~~~ expression (rescue)}) end def test_bug_def_empty_else assert_parses( s(:def, :foo, s(:args), s(:begin, s(:begin, nil))), %q{def foo; else; end}, %q{}, ALL_VERSIONS - SINCE_2_6) end def test_bug_heredoc_do assert_parses( s(:block, s(:send, nil, :f, s(:dstr)), s(:args), nil), %Q{f <<-TABLE do\nTABLE\nend}) end def test_bug_ascii_8bit_in_literal assert_diagnoses( [:error, :invalid_encoding], %q{".\xc3."}, %q{^^^^^^^^ location}, ALL_VERSIONS) assert_diagnoses( [:error, :invalid_encoding], %q{%W"x .\xc3."}, %q{ ^^^^^^ location}, ALL_VERSIONS) assert_diagnoses( [:error, :invalid_encoding], %q{:".\xc3."}, %q{ ^^^^^^ location}, ALL_VERSIONS) assert_diagnoses( [:error, :invalid_encoding], %q{%I"x .\xc3."}, %q{ ^^^^^^ location}, ALL_VERSIONS - %w(1.8 1.9 ios mac)) assert_parses( s(:int, 0xc3), %q{?\xc3}, %q{}, %w(1.8)) assert_diagnoses( [:error, :invalid_encoding], %q{?\xc3}, %q{^^^^^ location}, SINCE_1_9) assert_parses( s(:str, "проверка"), %q{# coding:utf-8 "\xD0\xBF\xD1\x80\xD0\xBE\xD0\xB2\xD0\xB5\xD1\x80\xD0\xBA\xD0\xB0"}, %q{}, SINCE_1_9) end def test_ruby_bug_9669 assert_parses( s(:def, :a, s(:args, s(:kwarg, :b)), s(:return)), %Q{def a b:\nreturn\nend}, %q{}, SINCE_2_1) assert_parses( s(:lvasgn, :o, s(:hash, s(:pair, s(:sym, :a), s(:int, 1)))), %Q{o = {\na:\n1\n}}, %q{}, SINCE_2_1) end def test_ruby_bug_10279 assert_parses( s(:hash, s(:pair, s(:sym, :a), s(:if, s(:true), s(:int, 42), nil))), %q{{a: if true then 42 end}}, %q{}, SINCE_2_1) end def test_ruby_bug_10653 assert_parses( s(:if, s(:true), s(:block, s(:send, s(:int, 1), :tap), s(:args, s(:procarg0, s(:arg, :n))), s(:send, nil, :p, s(:lvar, :n))), s(:int, 0)), %q{true ? 1.tap do |n| p n end : 0}, %q{}, SINCE_1_9) assert_parses( s(:if, s(:true), s(:block, s(:send, s(:int, 1), :tap), s(:args, s(:arg, :n)), s(:send, nil, :p, s(:lvar, :n))), s(:int, 0)), %q{true ? 1.tap do |n| p n end : 0}, %q{}, %w(1.8)) assert_parses( s(:if, s(:false), s(:block, s(:send, nil, :raise), s(:args), nil), s(:block, s(:send, nil, :tap), s(:args), nil)), %q{false ? raise {} : tap {}}, %q{}, ALL_VERSIONS) assert_parses( s(:if, s(:false), s(:block, s(:send, nil, :raise), s(:args), nil), s(:block, s(:send, nil, :tap), s(:args), nil)), %q{false ? raise do end : tap do end}, %q{}, ALL_VERSIONS) end def test_ruby_bug_11107 assert_parses( s(:send, nil, :p, s(:block, s(:lambda), s(:args), s(:block, s(:send, nil, :a), s(:args), nil))), %q{p ->() do a() do end end}, %q{}, SINCE_2_1) # no 1.9 backport end def test_ruby_bug_11380 assert_parses( s(:block, s(:send, nil, :p, s(:block, s(:lambda), s(:args), s(:sym, :hello)), s(:kwargs, s(:pair, s(:sym, :a), s(:int, 1)))), s(:args), nil), %q{p -> { :hello }, a: 1 do end}, %q{}, SINCE_2_1) # no 1.9 backport end def test_ruby_bug_11873_a [[":e", s(:sym, :e)], ["1", s(:int, 1)], ["1.0", s(:float, 1.0)], ["1.0r", s(:rational, Rational(1, 1))], ["1.0i", s(:complex, Complex(0.0, 1.0))]].each do |code, node| expect_a = \ s(:block, s(:send, nil, :a, s(:block, s(:send, nil, :b), s(:args), s(:send, nil, :c, s(:send, nil, :d))), node), s(:args), nil) assert_parses( expect_a, %Q{a b{c d}, #{code} do end}, %q{}, SINCE_2_4) assert_parses( expect_a, %Q{a b{c(d)}, #{code} do end}, %q{}, SINCE_2_4) expect_b = \ s(:block, s(:send, nil, :a, s(:send, nil, :b, s(:send, nil, :c, s(:send, nil, :d))), node), s(:args), nil) assert_parses( expect_b, %Q{a b(c d), #{code} do end}, %q{}, SINCE_2_4) assert_parses( expect_b, %Q{a b(c(d)), #{code} do end}, %q{}, SINCE_2_4) end end def test_ruby_bug_11873_b assert_parses( s(:block, s(:send, nil, :p, s(:block, s(:send, nil, :p), s(:args), s(:begin, s(:send, nil, :p, s(:send, nil, :p)), s(:send, nil, :p, s(:send, nil, :p)))), s(:send, nil, :tap)), s(:args), nil), %q{p p{p(p);p p}, tap do end}, %q{}, SINCE_2_4) end def test_ruby_bug_11989 assert_parses( s(:send, nil, :p, s(:str, "x\n y\n")), %Q{p <<~"E"\n x\\n y\nE}, %q{}, SINCE_2_3) end def test_ruby_bug_11990 assert_parses( s(:send, nil, :p, s(:dstr, s(:str, "x\n"), s(:str, " y"))), %Q{p <<~E " y"\n x\nE}, %q{}, SINCE_2_3) end def test_ruby_bug_12073 assert_parses( s(:begin, s(:lvasgn, :a, s(:int, 1)), s(:send, nil, :a, s(:kwargs, s(:pair, s(:sym, :b), s(:int, 1))))), %q{a = 1; a b: 1}, %q{}, SINCE_1_9) assert_parses( s(:def, :foo, s(:args, s(:arg, :raise)), s(:send, nil, :raise, s(:const, s(:const, nil, :A), :B), s(:str, ""))), %q{def foo raise; raise A::B, ''; end}, %q{}, SINCE_1_9) end def test_ruby_bug_12402 assert_parses( s(:lvasgn, :foo, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo = raise(bar) rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:lvasgn, :foo), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo += raise(bar) rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:indexasgn, s(:lvar, :foo), s(:int, 0)), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo[0] += raise(bar) rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :m), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo.m += raise(bar) rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :m), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo::m += raise(bar) rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :C), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo.C += raise(bar) rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:or_asgn, s(:casgn, s(:lvar, :foo), :C), s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo::C ||= raise(bar) rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:lvasgn, :foo, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo = raise bar rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:lvasgn, :foo), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo += raise bar rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:indexasgn, s(:lvar, :foo), s(:int, 0)), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo[0] += raise bar rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :m), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo.m += raise bar rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :m), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo::m += raise bar rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:send, s(:lvar, :foo), :C), :+, s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo.C += raise bar rescue nil}, %q{}, SINCE_2_4) assert_parses( s(:or_asgn, s(:casgn, s(:lvar, :foo), :C), s(:rescue, s(:send, nil, :raise, s(:lvar, :bar)), s(:resbody, nil, nil, s(:nil)), nil)), %q{foo::C ||= raise bar rescue nil}, %q{}, SINCE_2_4) end def test_ruby_bug_12669 assert_parses( s(:lvasgn, :a, s(:lvasgn, :b, s(:send, nil, :raise, s(:sym, :x)))), %q{a = b = raise :x}, %q{}, SINCE_2_0) assert_parses( s(:op_asgn, s(:lvasgn, :a), :+, s(:lvasgn, :b, s(:send, nil, :raise, s(:sym, :x)))), %q{a += b = raise :x}, %q{}, SINCE_2_4) assert_parses( s(:lvasgn, :a, s(:op_asgn, s(:lvasgn, :b), :+, s(:send, nil, :raise, s(:sym, :x)))), %q{a = b += raise :x}, %q{}, SINCE_2_4) assert_parses( s(:op_asgn, s(:lvasgn, :a), :+, s(:op_asgn, s(:lvasgn, :b), :+, s(:send, nil, :raise, s(:sym, :x)))), %q{a += b += raise :x}, %q{}, SINCE_2_4) end def test_ruby_bug_12686 assert_parses( s(:send, nil, :f, s(:begin, s(:rescue, s(:send, nil, :g), s(:resbody, nil, nil, s(:nil)), nil))), %q{f (g rescue nil)}, %q{}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, {:token => 'kRESCUE_MOD'}], %q{f(g rescue nil)}, %q{ ^^^^^^ location}) end def test_ruby_bug_11873 # strings assert_parses( s(:block, s(:send, nil, :a, s(:block, s(:send, nil, :b), s(:args), s(:send, nil, :c, s(:send, nil, :d))), s(:str, "x")), s(:args), nil), %q{a b{c d}, "x" do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:send, nil, :b, s(:send, nil, :c, s(:send, nil, :d))), s(:str, "x")), s(:args), nil), %q{a b(c d), "x" do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:block, s(:send, nil, :b), s(:args), s(:send, nil, :c, s(:send, nil, :d))), s(:str, "x")), s(:args), nil), %q{a b{c(d)}, "x" do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:send, nil, :b, s(:send, nil, :c, s(:send, nil, :d))), s(:str, "x")), s(:args), nil), %q{a b(c(d)), "x" do end}, %q{}, SINCE_2_4) # regexps without options assert_parses( s(:block, s(:send, nil, :a, s(:block, s(:send, nil, :b), s(:args), s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt))), s(:args), nil), %q{a b{c d}, /x/ do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:send, nil, :b, s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt))), s(:args), nil), %q{a b(c d), /x/ do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:block, s(:send, nil, :b), s(:args), s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt))), s(:args), nil), %q{a b{c(d)}, /x/ do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:send, nil, :b, s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt))), s(:args), nil), %q{a b(c(d)), /x/ do end}, %q{}, SINCE_2_4) # regexps with options assert_parses( s(:block, s(:send, nil, :a, s(:block, s(:send, nil, :b), s(:args), s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt, :m))), s(:args), nil), %q{a b{c d}, /x/m do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:send, nil, :b, s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt, :m))), s(:args), nil), %q{a b(c d), /x/m do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:block, s(:send, nil, :b), s(:args), s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt, :m))), s(:args), nil), %q{a b{c(d)}, /x/m do end}, %q{}, SINCE_2_4) assert_parses( s(:block, s(:send, nil, :a, s(:send, nil, :b, s(:send, nil, :c, s(:send, nil, :d))), s(:regexp, s(:str, "x"), s(:regopt, :m))), s(:args), nil), %q{a b(c(d)), /x/m do end}, %q{}, SINCE_2_4) end def test_parser_bug_198 assert_parses( s(:array, s(:regexp, s(:str, "()\\1"), s(:regopt)), s(:str, "#")), %q{[/()\\1/, ?#]}, %q{}, SINCE_3_1 - SINCE_1_9) end def test_parser_bug_272 assert_parses( s(:block, s(:send, nil, :a, s(:ivar, :@b)), s(:args, s(:procarg0, s(:arg, :c))), nil), %q{a @b do |c|;end}, %q{}, SINCE_1_9) assert_parses( s(:block, s(:send, nil, :a, s(:ivar, :@b)), s(:args, s(:arg, :c)), nil), %q{a @b do |c|;end}, %q{}, %w(1.8)) end def test_bug_lambda_leakage assert_parses( s(:begin, s(:block, s(:lambda), s(:args, s(:arg, :scope)), nil), s(:send, nil, :scope)), %q{->(scope) {}; scope}, %q{}, SINCE_1_9) end def test_bug_regex_verification assert_parses( s(:regexp, s(:str, "#)"), s(:regopt, :x)), %Q{/#)/x}) end def test_bug_do_block_in_hash_brace assert_parses( s(:send, nil, :p, s(:sym, :foo), s(:hash, s(:pair, s(:sym, :a), s(:block, s(:send, nil, :proc), s(:args), nil)), s(:pair, s(:sym, :b), s(:block, s(:send, nil, :proc), s(:args), nil)))), %q{p :foo, {a: proc do end, b: proc do end}}, %q{}, SINCE_2_3) assert_parses( s(:send, nil, :p, s(:sym, :foo), s(:hash, s(:pair, s(:sym, :a), s(:block, s(:send, nil, :proc), s(:args), nil)), s(:pair, s(:sym, :b), s(:block, s(:send, nil, :proc), s(:args), nil)))), %q{p :foo, {:a => proc do end, b: proc do end}}, %q{}, SINCE_2_3) assert_parses( s(:send, nil, :p, s(:sym, :foo), s(:hash, s(:pair, s(:sym, :a), s(:block, s(:send, nil, :proc), s(:args), nil)), s(:pair, s(:sym, :b), s(:block, s(:send, nil, :proc), s(:args), nil)))), %q{p :foo, {"a": proc do end, b: proc do end}}, %q{}, SINCE_2_3) assert_parses( s(:send, nil, :p, s(:sym, :foo), s(:hash, s(:pair, s(:block, s(:send, nil, :proc), s(:args), nil), s(:block, s(:send, nil, :proc), s(:args), nil)), s(:pair, s(:sym, :b), s(:block, s(:send, nil, :proc), s(:args), nil)))), %q{p :foo, {proc do end => proc do end, b: proc do end}}, %q{}, SINCE_2_3) assert_parses( s(:send, nil, :p, s(:sym, :foo), s(:hash, s(:kwsplat, s(:block, s(:send, nil, :proc), s(:args), nil)), s(:pair, s(:sym, :b), s(:block, s(:send, nil, :proc), s(:args), nil)))), %q{p :foo, {** proc do end, b: proc do end}}, %q{}, SINCE_2_3) end def test_lparenarg_after_lvar__since_25 assert_parses( s(:send, nil, :meth, s(:send, s(:begin, s(:float, -1.3)), :abs)), %q{meth (-1.3).abs}, %q{}, ALL_VERSIONS - SINCE_2_5) assert_parses( s(:send, s(:send, nil, :foo, s(:float, -1.3)), :abs), %q{foo (-1.3).abs}, %q{}, ALL_VERSIONS - SINCE_2_5) assert_parses( s(:send, nil, :meth, s(:send, s(:begin, s(:float, -1.3)), :abs)), %q{meth (-1.3).abs}, %q{}, SINCE_2_5) assert_parses( s(:send, nil, :foo, s(:send, s(:begin, s(:float, -1.3)), :abs)), %q{foo (-1.3).abs}, %q{}, SINCE_2_5) end def test_context_class [ %q{class A; get_context; end}, %q{class A < B; get_context; end} ].each do |code| assert_context([:in_class], code, ALL_VERSIONS) end end def test_context_module assert_context( [:in_class], %q{module M; get_context; end}, ALL_VERSIONS) end def test_context_def assert_context( [:in_def], %q{def m; get_context; end}, ALL_VERSIONS) assert_context( [:in_def], %q{def m() = get_context}, SINCE_3_0) assert_context( [:in_def], %q{def self.m; get_context; end}, ALL_VERSIONS) assert_context( [:in_def], %q{def self.m() = get_context}, SINCE_3_0) end def test_context_cmd_brace_block [ 'tap foo { get_context }', 'foo.tap foo { get_context }', 'foo::tap foo { get_context }' ].each do |code| assert_context([:in_block], code, ALL_VERSIONS) end end def test_context_brace_block [ 'tap { get_context }', 'foo.tap { get_context }', 'foo::tap { get_context }', 'tap do get_context end', 'foo.tap do get_context end', 'foo::tap do get_context end' ].each do |code| assert_context([:in_block], code, ALL_VERSIONS) end end def test_context_do_block [ %q{tap 1 do get_context end}, %q{foo.tap do get_context end}, %q{foo::tap do get_context end} ].each do |code| assert_context([:in_block], code, ALL_VERSIONS) end end def test_context_lambda [ '->() { get_context }', '->() do get_context end', '-> { get_context }', '-> do get_context end', '->(a = get_context) {}', '->(a = get_context) do end' ].each do |code| assert_context([:in_lambda], code, SINCE_1_9) end end def test_return_in_class assert_parses( s(:class, s(:const, nil, :A), nil, s(:return)), %q{class A; return; end}, %q{}, ALL_VERSIONS - SINCE_2_5) assert_diagnoses( [:error, :invalid_return, {}], %q{class A; return; end}, %q{ ^^^^^^ location}, SINCE_2_5) [ %q{class << foo; return; end}, %q{def m; return; end}, %q{tap { return }}, %q{class A; class << self; return; end; end}, %q{class A; def m; return; end; end}, ].each do |code| refute_diagnoses(code, ALL_VERSIONS) end [ %q{-> do return end}, %q{class A; -> do return end; end}, ].each do |code| refute_diagnoses(code, SINCE_1_9) end end def test_method_definition_in_while_cond assert_parses( s(:while, s(:def, :foo, s(:args), s(:block, s(:send, nil, :tap), s(:args), nil)), s(:break)), %q{while def foo; tap do end; end; break; end}, %q{}, SINCE_2_5) assert_parses( s(:while, s(:defs, s(:self), :foo, s(:args), s(:block, s(:send, nil, :tap), s(:args), nil)), s(:break)), %q{while def self.foo; tap do end; end; break; end}, %q{}, SINCE_2_5) assert_parses( s(:while, s(:def, :foo, s(:args, s(:optarg, :a, s(:block, s(:send, nil, :tap), s(:args), nil))), nil), s(:break)), %q{while def foo a = tap do end; end; break; end}, %q{}, SINCE_2_5) assert_parses( s(:while, s(:defs, s(:self), :foo, s(:args, s(:optarg, :a, s(:block, s(:send, nil, :tap), s(:args), nil))), nil), s(:break)), %q{while def self.foo a = tap do end; end; break; end}, %q{}, SINCE_2_5) end def test_class_definition_in_while_cond assert_parses( s(:while, s(:class, s(:const, nil, :Foo), nil, s(:block, s(:send, nil, :tap), s(:args), nil)), s(:break)), %q{while class Foo; tap do end; end; break; end}, %q{}, SINCE_2_5) assert_parses( s(:while, s(:class, s(:const, nil, :Foo), nil, s(:lvasgn, :a, s(:block, s(:send, nil, :tap), s(:args), nil))), s(:break)), %q{while class Foo a = tap do end; end; break; end}, %q{}, SINCE_2_5) assert_parses( s(:while, s(:sclass, s(:self), s(:block, s(:send, nil, :tap), s(:args), nil)), s(:break)), %q{while class << self; tap do end; end; break; end}, %q{}, SINCE_2_5) assert_parses( s(:while, s(:sclass, s(:self), s(:lvasgn, :a, s(:block, s(:send, nil, :tap), s(:args), nil))), s(:break)), %q{while class << self; a = tap do end; end; break; end}, %q{}, SINCE_2_5) end def test_rescue_in_lambda_block assert_diagnoses( [:error, :unexpected_token, { :token => 'kRESCUE'}], %q{-> do rescue; end}, %q{ ~~~~~~ location}, SINCE_1_9 - SINCE_2_6) assert_parses( s(:block, s(:lambda), s(:args), s(:rescue, nil, s(:resbody, nil, nil, nil), nil)), %q{-> do rescue; end}, %q{ ~~~~~~ keyword (rescue.resbody)}, SINCE_2_6) assert_diagnoses( [:error, :unexpected_token, { :token => 'kRESCUE'}], %q{-> { rescue; }}, %q{ ~~~~~~ location}, SINCE_1_9) end def test_ruby_bug_13547 assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m "x" {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m "#{'x'}" {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m 1 {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m 1.0 {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m 1r {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m 1i {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m :m {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m :"#{m}" {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m %[] {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m 0..1 {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m 0...1 {}}, %q{ ^ location}, SINCE_2_4) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLCURLY' }], %q{m [] {}}, %q{ ^ location}, SINCE_2_5) assert_parses( s(:block, s(:index, s(:send, nil, :meth)), s(:args), nil), %q{meth[] {}}, %q{}, SINCE_2_5 ) assert_diagnoses_many( [ [:warning, :ambiguous_literal], [:error, :unexpected_token, { :token => 'tLCURLY' }] ], %q{m /foo/ {}}, %w(2.4 2.5 2.6 2.7)) assert_diagnoses_many( [ [:warning, :ambiguous_literal], [:error, :unexpected_token, { :token => 'tLCURLY' }] ], %q{m /foo/x {}}, %w(2.4 2.5 2.6 2.7)) end def test_bug_447 assert_parses( s(:block, s(:send, nil, :m, s(:array)), s(:args), nil), %q{m [] do end}, %q{}, ALL_VERSIONS) assert_parses( s(:block, s(:send, nil, :m, s(:array), s(:int, 1)), s(:args), nil), %q{m [], 1 do end}, %q{}, ALL_VERSIONS) end def test_bug_435 assert_parses( s(:dstr, s(:begin, s(:block, s(:lambda), s(:args, s(:arg, :foo)), nil))), %q{"#{-> foo {}}"}, %q{}, SINCE_1_9) end def test_bug_452 assert_parses( s(:begin, s(:send, nil, :td, s(:send, s(:begin, s(:int, 1500)), :toString)), s(:block, s(:send, s(:send, nil, :td), :num), s(:args), nil)), %q{td (1_500).toString(); td.num do; end}, %q{}, ALL_VERSIONS) end def test_bug_466 assert_parses( s(:block, s(:send, nil, :foo, s(:dstr, s(:begin, s(:send, s(:begin, s(:send, s(:int, 1), :+, s(:int, 1))), :to_i)))), s(:args), nil), %q{foo "#{(1+1).to_i}" do; end}, %q{}, ALL_VERSIONS) end def test_bug_473 assert_parses( s(:send, nil, :m, s(:dstr, s(:begin, s(:array)))), %q{m "#{[]}"}, %q{}, ALL_VERSIONS) end def test_bug_480 assert_parses( s(:send, nil, :m, s(:dstr, s(:begin), s(:begin, s(:begin)))), %q{m "#{}#{()}"}, %q{}, ALL_VERSIONS) end def test_bug_481 assert_parses( s(:begin, s(:send, nil, :m, s(:def, :x, s(:args), nil)), s(:block, s(:send, s(:int, 1), :tap), s(:args), nil)), %q{m def x(); end; 1.tap do end}, %q{}, ALL_VERSIONS) end def test_parser_bug_490 assert_parses( s(:def, :m, s(:args), s(:sclass, s(:self), s(:class, s(:const, nil, :C), nil, nil))), %q{def m; class << self; class C; end; end; end}, %q{}, ALL_VERSIONS) assert_parses( s(:def, :m, s(:args), s(:sclass, s(:self), s(:module, s(:const, nil, :M), nil))), %q{def m; class << self; module M; end; end; end}, %q{}, ALL_VERSIONS) assert_parses( s(:def, :m, s(:args), s(:sclass, s(:self), s(:casgn, nil, :A, s(:nil)))), %q{def m; class << self; A = nil; end; end}, %q{}, ALL_VERSIONS) end def test_slash_newline_in_heredocs assert_parses( s(:dstr, s(:str, "1 2\n"), s(:str, "3\n")), %Q{<<~E\n 1 \\\n 2\n 3\nE\n}, %q{}, SINCE_2_3) assert_parses( s(:dstr, s(:str, " 1 2\n"), s(:str, " 3\n")), %Q{<<-E\n 1 \\\n 2\n 3\nE\n}, %q{}, ALL_VERSIONS) end def test_ambiuous_quoted_label_in_ternary_operator assert_parses( s(:if, s(:send, nil, :a), s(:send, s(:send, nil, :b), :&, s(:str, '')), s(:nil)), %q{a ? b & '': nil}, %q{}, ALL_VERSIONS) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLABEL_END' }], %q{a ? b | '': nil}, %q{ ^~ location}, SINCE_2_2) assert_diagnoses( [:error, :unexpected_token, { :token => 'tTILDE' }], %q{a ? b ~ '': nil}, %q{ ^ location}, SINCE_2_2) assert_diagnoses( [:error, :unexpected_token, { :token => 'tBANG' }], %q{a ? b ! '': nil}, %q{ ^ location}, SINCE_2_2) end def test_lbrace_arg_after_command_args assert_parses( s(:block, s(:send, nil, :let, s(:begin, s(:sym, :a))), s(:args), s(:block, s(:send, nil, :m), s(:args), nil)), %q{let (:a) { m do; end }}, %q{}, ALL_VERSIONS) end def test_ruby_bug_14690 assert_parses( s(:block, s(:send, nil, :let, s(:begin)), s(:args), s(:block, s(:send, nil, :m, s(:send, nil, :a)), s(:args), nil)), %q{let () { m(a) do; end }}, %q{}, SINCE_2_0) end def test_parser_bug_507 assert_parses( s(:lvasgn, :m, s(:block, s(:lambda), s(:args, s(:restarg, :args)), nil)), %q{m = -> *args do end}, %q{}, SINCE_1_9) end def test_parser_bug_518 assert_parses( s(:class, s(:const, nil, :A), s(:const, nil, :B), nil), "class A < B\nend", %q{}, ALL_VERSIONS) end def test_parser_bug_525 assert_parses( s(:block, s(:send, nil, :m1, s(:kwargs, s(:pair, s(:sym, :k), s(:send, nil, :m2)))), s(:args), s(:block, s(:send, nil, :m3), s(:args), nil)), 'm1 :k => m2 do; m3() do end; end', %q{}, ALL_VERSIONS) end def test_parser_slash_slash_n_escaping_in_literals [ ["'", "'", s(:dstr, s(:str, "a\\\n"), s(:str, "b")) ], ["<<-'HERE'\n", "\nHERE", s(:dstr, s(:str, "a\\\n"), s(:str, "b\n"))], ["%q{", "}", s(:dstr, s(:str, "a\\\n"), s(:str, "b")) ], ['"', '"', s(:str, "ab") ], ["<<-\"HERE\"\n", "\nHERE", s(:str, "ab\n") ], ["%{", "}", s(:str, "ab") ], ["%Q{", "}", s(:str, "ab") ], ["%w{", "}", s(:array, s(:str, "a\nb")) ], ["%W{", "}", s(:array, s(:str, "a\nb")) ], ["%i{", "}", s(:array, s(:sym, :"a\nb")) ], ["%I{", "}", s(:array, s(:sym, :"a\nb")) ], [":'", "'", s(:dsym, s(:str, "a\\\n"), s(:str, "b")) ], ["%s{", "}", s(:dsym, s(:str, "a\\\n"), s(:str, "b")) ], [':"', '"', s(:sym, :ab) ], ['/', '/', s(:regexp, s(:str, "ab"), s(:regopt)) ], ['%r{', '}', s(:regexp, s(:str, "ab"), s(:regopt)) ], ['%x{', '}', s(:xstr, s(:str, "ab")) ], ['`', '`', s(:xstr, s(:str, "ab")) ], ["<<-`HERE`\n", "\nHERE", s(:xstr, s(:str, "ab\n")) ], ].each do |literal_s, literal_e, expected| source = literal_s + "a\\\nb" + literal_e assert_parses( expected, source, %q{}, SINCE_2_0) end end def test_unterimated_heredoc_id__27 assert_diagnoses( [:error, :unterminated_heredoc_id], %Q{<<\"EOS\n\nEOS\n}, %q{^ location}, SINCE_2_7) assert_diagnoses( [:error, :unterminated_heredoc_id], %Q{<<\"EOS\n\"\nEOS\n}, %q{^ location}, SINCE_2_7) %W[\r\n \n].each do |nl| assert_diagnoses( [:error, :unterminated_heredoc_id], %Q{<<\"\r\"#{nl}\r#{nl}}, %q{^ location}, SINCE_2_7) end end def test_numbered_args_after_27 assert_parses( s(:numblock, s(:send, nil, :m), 9, s(:send, s(:lvar, :_1), :+, s(:lvar, :_9))), %q{m { _1 + _9 }}, %q{^^^^^^^^^^^^^ expression | ^^ name (send/2.lvar/1) | ^^ expression (send/2.lvar/1) | ^^ name (send/2.lvar/2) | ^^ expression (send/2.lvar/2)}, SINCE_2_7) assert_parses( s(:numblock, s(:send, nil, :m), 9, s(:send, s(:lvar, :_1), :+, s(:lvar, :_9))), %q{m do _1 + _9 end}, %q{^^^^^^^^^^^^^^^^ expression | ^^ name (send/2.lvar/1) | ^^ expression (send/2.lvar/1) | ^^ name (send/2.lvar/2) | ^^ expression (send/2.lvar/2)}, SINCE_2_7) # Lambdas assert_parses( s(:numblock, s(:lambda), 9, s(:send, s(:lvar, :_1), :+, s(:lvar, :_9))), %q{-> { _1 + _9}}, %q{^^^^^^^^^^^^^ expression | ^^ name (send.lvar/1) | ^^ expression (send.lvar/1) | ^^ name (send.lvar/2) | ^^ expression (send.lvar/2)}, SINCE_2_7) assert_parses( s(:numblock, s(:lambda), 9, s(:send, s(:lvar, :_1), :+, s(:lvar, :_9))), %q{-> do _1 + _9 end}, %q{^^^^^^^^^^^^^^^^^ expression | ^^ name (send.lvar/1) | ^^ expression (send.lvar/1) | ^^ name (send.lvar/2) | ^^ expression (send.lvar/2)}, SINCE_2_7) end def test_numbered_and_ordinary_parameters # Blocks assert_diagnoses( [:error, :ordinary_param_defined], %q{m { || _1 } }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{m { |a| _1 } }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{m do || _1 end }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{m do |a, b| _1 end }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{m { |x = _1| }}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{m { |x: _1| }}, %q{ ^^ location}, SINCE_2_7) # Lambdas assert_diagnoses( [:error, :ordinary_param_defined], %q{->() { _1 } }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{->(a) { _1 } }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{->() do _1 end }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{->(a, b) do _1 end}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{->(x=_1) {}}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{->(x: _1) {}}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], %q{proc {|;a| _1}}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :ordinary_param_defined], "proc {|\n| _1}", %q{ ^^ location}, SINCE_2_7) end def test_numparam_outside_block assert_parses( s(:class, s(:const, nil, :A), nil, s(:send, nil, :_1)), %q{class A; _1; end}, %q{}, SINCE_2_7) assert_parses( s(:module, s(:const, nil, :A), s(:send, nil, :_1)), %q{module A; _1; end}, %q{}, SINCE_2_7) assert_parses( s(:sclass, s(:lvar, :foo), s(:send, nil, :_1)), %q{class << foo; _1; end}, %q{}, SINCE_2_7) assert_parses( s(:defs, s(:self), :m, s(:args), s(:send, nil, :_1)), %q{def self.m; _1; end}, %q{}, SINCE_2_7) assert_parses( s(:send, nil, :_1), %q{_1}, %q{}, SINCE_2_7) end def test_assignment_to_numparams assert_parses( s(:block, s(:send, nil, :proc), s(:args), s(:lvasgn, :_1, s(:nil))), %q{proc {_1 = nil}}, %q{}, %w(2.7)) assert_diagnoses( [:error, :cant_assign_to_numparam, { :name => '_1' }], %q{proc {_1; _1 = nil}}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :cant_assign_to_numparam, { :name => '_1' }], %q{proc {_1; _1, foo = [nil, nil]}}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :cant_assign_to_numparam, { :name => '_1' }], %q{proc {_9; _1 = nil}}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :cant_assign_to_numparam, { :name => '_9' }], %q{proc {_1; _9 = nil}}, %q{ ^^ location}, SINCE_2_7) refute_diagnoses( %q{proc { _1 = nil; _1}}, %w(2.7)) end def test_numparams_in_nested_blocks assert_diagnoses( [:error, :numparam_used_in_outer_scope], %q{foo { _1; bar { _2 }; }}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :numparam_used_in_outer_scope], %q{-> { _1; -> { _2 }; }}, %q{ ^^ location}, SINCE_2_7) [ ['class A', 'end'], ['class << foo', 'end'], ['def m', 'end'], ['def self.m', 'end'] ].each do |open_scope, close_scope| refute_diagnoses( "proc { _1; #{open_scope}; proc { _2 }; #{close_scope}; }", SINCE_2_7) refute_diagnoses( "-> { _1; #{open_scope}; -> { _2 }; #{close_scope}; }", SINCE_2_7) end end def test_ruby_bug_15789 assert_parses( s(:send, nil, :m, s(:block, s(:lambda), s(:args, s(:optarg, :a, s(:numblock, s(:lambda), 1, s(:lvar, :_1)))), s(:lvar, :a))), %q{m ->(a = ->{_1}) {a}}, %q{}, SINCE_2_7) assert_parses( s(:send, nil, :m, s(:block, s(:lambda), s(:args, s(:kwoptarg, :a, s(:numblock, s(:lambda), 1, s(:lvar, :_1)))), s(:lvar, :a))), %q{m ->(a: ->{_1}) {a}}, %q{}, SINCE_2_7) end def test_ruby_bug_15839 assert_diagnoses( [:error, :invalid_encoding], %q{# encoding: cp932 <<-TEXT \xe9\x9d\u1234 TEXT }) assert_diagnoses( [:error, :invalid_encoding], %q{ # encoding: cp932 <<-TEXT \xe9\x9d \u1234 TEXT }) assert_diagnoses( [:error, :invalid_encoding], %q{ # encoding: cp932 <<-TEXT \u1234\xe9\x9d TEXT }) assert_diagnoses( [:error, :invalid_encoding], %q{ # encoding: cp932 <<-TEXT \u1234 \xe9\x9d TEXT }) end def test_numparam_as_symbols assert_diagnoses( [:error, :ivar_name, { :name => '@' }], %q{:@}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{:@1}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@' }], %q{:@@}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{:@@1}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :gvar_name, { :name => '$01234' }], %q{:$01234}, %q{ ^^^^^^ location}, SINCE_3_3) end def test_csend_inside_lhs_of_masgn__since_27 assert_diagnoses( [:error, :csend_in_lhs_of_masgn], %q{*a&.x = 0}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :csend_in_lhs_of_masgn], %q{a&.x, = 0}, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :csend_in_lhs_of_masgn], %q{*a&.A = 0}, %q{ ^^ location}, SINCE_2_7) end def test_parser_bug_604 assert_parses( s(:block, s(:send, nil, :m, s(:send, s(:send, nil, :a), :+, s(:send, nil, :b))), s(:args), nil), %q{m a + b do end}, %q{}, ALL_VERSIONS) end def test_comments_before_leading_dot__27 assert_parses( s(:send, s(:send, nil, :a), :foo), %Q{a #\n#\n.foo\n}, %q{}, SINCE_2_7) assert_parses( s(:send, s(:send, nil, :a), :foo), %Q{a #\n #\n.foo\n}, %q{}, SINCE_2_7) assert_parses( s(:csend, s(:send, nil, :a), :foo), %Q{a #\n#\n&.foo\n}, %q{}, SINCE_2_7) assert_parses( s(:csend, s(:send, nil, :a), :foo), %Q{a #\n #\n&.foo\n}, %q{}, SINCE_2_7) end def test_comments_before_leading_dot__before_27 assert_diagnoses( [:error, :unexpected_token, { :token => 'tDOT' }], %q{a #!#!.foo!}.gsub('!', "\n"), %q{ ^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tAMPER' }], %q{a #!#!&.foo!}.gsub('!', "\n"), %q{ ^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tDOT' }], %q{a #!#!.:foo!}.gsub('!', "\n"), %q{ ^ location}, ALL_VERSIONS - SINCE_2_7) end def test_circular_argument_reference_error assert_diagnoses( [:error, :circular_argument_reference, { :var_name => 'foo' }], %q{def m(foo = foo) end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :circular_argument_reference, { :var_name => 'foo' }], %q{def m(foo: foo) end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :circular_argument_reference, { :var_name => 'foo' }], %q{m { |foo = foo| } }, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :circular_argument_reference, { :var_name => 'foo' }], %q{m { |foo: foo| } }, %q{ ^^^ location}, SINCE_2_7) # Traversing assert_diagnoses( [:error, :circular_argument_reference, { :var_name => 'foo' }], %q{def m(foo = class << foo; end) end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :circular_argument_reference, { :var_name => 'foo' }], %q{def m(foo = def foo.m; end); end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :circular_argument_reference, { :var_name => 'foo' }], %q{m { |foo = proc { 1 + foo }| } }, %q{ ^^^ location}, SINCE_2_7) # Valid cases [ 'm { |foo = class A; foo; end| }', 'm { |foo = class << self; foo; end| }', 'm { |foo = def m(foo = bar); foo; end| }', 'm { |foo = def m(bar = foo); foo; end| }', 'm { |foo = def self.m(bar = foo); foo; end| }', 'def m(foo = def m; foo; end) end', 'def m(foo = def self.m; foo; end) end', 'm { |foo = proc { |bar| 1 + foo }| }', 'm { |foo = proc { || 1 + foo }| }' ].each do |code| refute_diagnoses(code, SINCE_2_7) end end def test_forward_args_legacy Parser::Builders::Default.emit_forward_arg = false assert_parses( s(:def, :foo, s(:forward_args), s(:send, nil, :bar, s(:forwarded_args))), %q{def foo(...); bar(...); end}, %q{ ~ begin (forward_args) | ~~~~~ expression (forward_args) | ~ end (forward_args) | ~~~ expression (send.forwarded_args)}, SINCE_2_7) assert_parses( s(:def, :foo, s(:forward_args), s(:super, s(:forwarded_args))), %q{def foo(...); super(...); end}, %q{ ~ begin (forward_args) | ~~~~~ expression (forward_args) | ~ end (forward_args) | ~~~ expression (super.forwarded_args)}, SINCE_2_7) assert_parses( s(:def, :foo, s(:forward_args), nil), %q{def foo(...); end}, %q{}, SINCE_2_7) ensure Parser::Builders::Default.emit_forward_arg = true end def test_forward_arg assert_parses( s(:def, :foo, s(:args, s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args))), %q{def foo(...); bar(...); end}, %q{ ~ begin (args) | ~~~~~ expression (args) | ~ end (args) | ~~~ expression (args.forward_arg) | ~~~ expression (send.forwarded_args)}, SINCE_2_7) end def test_forward_args_invalid assert_diagnoses( [:error, :block_and_blockarg], %q{def foo(...) bar(...) { }; end}, %q{ ^^^ location | ~ highlights (0)}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tBDOT3' }], %q{foo do |...| end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tBDOT3' }], %q{foo { |...| }}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tBDOT3' }], %q{def foo(x,y,z); bar(...); end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tBDOT3' }], %q{def foo(x,y,z); bar(x, y, z, ...); end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tBDOT3' }], %q{def foo(x,y,z); super(...); end}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tDOT3' }], %q{->... {}}, %q{ ^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tBDOT3' }], %q{->(...) {}}, %q{ ^^^ location}, ['2.7']) assert_diagnoses( [:error, :unexpected_token, { :token => 'tDOT3' }], %q{->(...) {}}, %q{ ^^^ location}, SINCE_3_0) # Here and below the parser asssumes that # it can be a beginningless range, so the error comes after reducing right paren assert_diagnoses( [:error, :unexpected_token, { :token => 'tRPAREN' }], %q{def foo(...); yield(...); end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tRPAREN' }], %q{def foo(...); return(...); end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tRPAREN' }], %q{def foo(...); a = (...); end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tRBRACK' }], %q{def foo(...); [...]; end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tRBRACK' }], %q{def foo(...) bar[...]; end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tRBRACK' }], %q{def foo(...) bar[...] = x; end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tRPAREN' }], %q{def foo(...) defined?(...); end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :unexpected_token, { :token => 'tDOT3' }], %q{def foo ...; end}, %q{ ^^^ location}, SINCE_3_1 - SINCE_2_7) end def test_trailing_forward_arg assert_parses( s(:def, :foo, s(:args, s(:arg, :a), s(:arg, :b), s(:forward_arg)), s(:send, nil, :bar, s(:lvar, :a), s(:int, 42), s(:forwarded_args))), %q{def foo(a, b, ...); bar(a, 42, ...); end}, %q{ ~ begin (args) | ~~~~~~~~~~~ expression (args) | ~ end (args) | ~~~ expression (args.forward_arg)}, SINCE_2_7) end def test_erange_without_parentheses_at_eol assert_diagnoses( [:warning, :triple_dot_at_eol], %Q{1...\n2}, %q{ ^^^ location}, SINCE_2_7) refute_diagnoses('(1...)', SINCE_2_7) refute_diagnoses("(1...\n)", SINCE_2_7) refute_diagnoses("[1...\n]", SINCE_2_7) refute_diagnoses("{a: 1...\n2}", SINCE_2_7) end def test_embedded_document_with_eof refute_diagnoses("=begin\n""=end", SINCE_2_7) refute_diagnoses("=begin\n""=end\0", SINCE_2_7) refute_diagnoses("=begin\n""=end\C-d", SINCE_2_7) refute_diagnoses("=begin\n""=end\C-z", SINCE_2_7) assert_diagnoses( [:fatal, :embedded_document], "=begin\n", %q{}, SINCE_2_7) assert_diagnoses( [:fatal, :embedded_document], "=begin", %q{}, SINCE_2_7) end def test_interp_digit_var # '#@1' assert_parses( s(:str, '#@1'), %q{ '#@1' }, %q{}, ALL_VERSIONS) assert_parses( s(:str, '#@@1'), %q{ '#@@1' }, %q{}, ALL_VERSIONS) # <<-'HERE' # #@1 # HERE assert_parses( s(:str, '#@1' + "\n"), %q{<<-'HERE'!#@1!HERE}.gsub('!', "\n"), %q{}, ALL_VERSIONS) assert_parses( s(:str, '#@@1' + "\n"), %q{<<-'HERE'!#@@1!HERE}.gsub('!', "\n"), %q{}, ALL_VERSIONS) # %q{#@1} assert_parses( s(:str, '#@1'), %q{ %q{#@1} }, %q{}, ALL_VERSIONS) assert_parses( s(:str, '#@@1'), %q{ %q{#@@1} }, %q{}, ALL_VERSIONS) # "#@1" assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ "#@1" }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ "#@@1" }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:str, '#@1'), %q{ "#@1" }, %q{}, SINCE_2_7) assert_parses( s(:str, '#@@1'), %q{ "#@@1" }, %q{}, SINCE_2_7) # <<-"HERE" # #@1 # HERE assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ <<-"HERE"!#@1!HERE }.gsub('!', "\n"), %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ <<-"HERE"!#@@1!HERE }.gsub('!', "\n"), %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:str, '#@1' + "\n"), %q{<<-"HERE"!#@1!HERE}.gsub('!', "\n"), %q{}, SINCE_2_7) assert_parses( s(:str, '#@@1' + "\n"), %q{<<-"HERE"!#@@1!HERE}.gsub('!', "\n"), %q{}, SINCE_2_7) # %{#@1} assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ %{#@1} }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ %{#@@1} }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:str, '#@1'), %q{ %{#@1} }, %q{}, SINCE_2_7) assert_parses( s(:str, '#@@1'), %q{ %{#@@1} }, %q{}, SINCE_2_7) # %Q{#@1} assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ %Q{#@1} }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ %Q{#@@1} }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:str, '#@1'), %q{ %Q{#@1} }, %q{}, SINCE_2_7) assert_parses( s(:str, '#@@1'), %q{ %Q{#@@1} }, %q{}, SINCE_2_7) # %w[#@1] assert_parses( s(:array, s(:str, '#@1')), %q{ %w[ #@1 ] }, %q{}, ALL_VERSIONS) assert_parses( s(:array, s(:str, '#@@1')), %q{ %w[ #@@1 ] }, %q{}, ALL_VERSIONS) # %W[#@1] assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ %W[#@1] }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ %W[#@@1] }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:array, s(:str, '#@1')), %q{ %W[#@1] }, %q{}, SINCE_2_7) assert_parses( s(:array, s(:str, '#@@1')), %q{ %W[#@@1] }, %q{}, SINCE_2_7) # %i[#@1] assert_parses( s(:array, s(:sym, :'#@1')), %q{ %i[ #@1 ] }, %q{}, SINCE_2_0) assert_parses( s(:array, s(:sym, :'#@@1')), %q{ %i[ #@@1 ] }, %q{}, SINCE_2_0) # %I[#@1] assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ %I[#@1] }, %q{ ^^ location}, SINCE_2_0 - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ %I[#@@1] }, %q{ ^^^ location}, SINCE_2_0 - SINCE_2_7) assert_parses( s(:array, s(:sym, :'#@1')), %q{ %I[#@1] }, %q{}, SINCE_2_7) assert_parses( s(:array, s(:sym, :'#@@1')), %q{ %I[#@@1] }, %q{}, SINCE_2_7) # :'#@1' assert_parses( s(:sym, :'#@1'), %q{ :'#@1' }, %q{}, ALL_VERSIONS) assert_parses( s(:sym, :'#@@1'), %q{ :'#@@1' }, %q{}, ALL_VERSIONS) # %s{#@1} assert_parses( s(:sym, :'#@1'), %q{ %s{#@1} }, %q{}, ALL_VERSIONS) assert_parses( s(:sym, :'#@@1'), %q{ %s{#@@1} }, %q{}, ALL_VERSIONS) # :"#@1" assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ :"#@1" }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ :"#@@1" }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:sym, :'#@1'), %q{ :"#@1" }, %q{}, SINCE_2_7) assert_parses( s(:sym, :'#@@1'), %q{ :"#@@1" }, %q{}, SINCE_2_7) # /#@1/ assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ /#@1/ }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ /#@@1/ }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:regexp, s(:str, '#@1'), s(:regopt)), %q{ /#@1/ }, %q{}, SINCE_2_7) assert_parses( s(:regexp, s(:str, '#@@1'), s(:regopt)), %q{ /#@@1/ }, %q{}, SINCE_2_7) # %r{#@1} assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ %r{#@1} }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ %r{#@@1} }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:regexp, s(:str, '#@1'), s(:regopt)), %q{ %r{#@1} }, %q{}, SINCE_2_7) assert_parses( s(:regexp, s(:str, '#@@1'), s(:regopt)), %q{ %r{#@@1} }, %q{}, SINCE_2_7) # %x{#@1} assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ %x{#@1} }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ %x{#@@1} }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:xstr, s(:str, '#@1')), %q{ %x{#@1} }, %q{}, SINCE_2_7) assert_parses( s(:xstr, s(:str, '#@@1')), %q{ %x{#@@1} }, %q{}, SINCE_2_7) # `#@1` assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ `#@1` }, %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ `#@@1` }, %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:xstr, s(:str, '#@1')), %q{ `#@1` }, %q{}, SINCE_2_7) assert_parses( s(:xstr, s(:str, '#@@1')), %q{ `#@@1` }, %q{}, SINCE_2_7) # <<-`HERE` # #@1 # HERE assert_diagnoses( [:error, :ivar_name, { :name => '@1' }], %q{ <<-`HERE`!#@1!HERE }.gsub('!', "\n"), %q{ ^^ location}, ALL_VERSIONS - SINCE_2_7) assert_diagnoses( [:error, :cvar_name, { :name => '@@1' }], %q{ <<-`HERE`!#@@1!HERE }.gsub('!', "\n"), %q{ ^^^ location}, ALL_VERSIONS - SINCE_2_7) assert_parses( s(:xstr, s(:str, '#@1' + "\n")), %q{<<-`HERE`!#@1!HERE}.gsub('!', "\n"), %q{}, SINCE_2_7) assert_parses( s(:xstr, s(:str, '#@@1' + "\n")), %q{<<-`HERE`!#@@1!HERE}.gsub('!', "\n"), %q{}, SINCE_2_7) end def assert_parses_pattern_match(ast, code, source_maps = '', versions = SINCE_2_7) case_pre = "case foo; " source_maps_offset = case_pre.length source_maps_prefix = ' ' * source_maps_offset source_maps = source_maps .lines .map { |line| source_maps_prefix + line.sub(/^\s*\|/, '') } .join("\n") assert_parses( s(:case_match, s(:lvar, :foo), ast, nil), "#{case_pre}#{code}; end", source_maps, versions ) end def test_pattern_matching_single_match assert_parses_pattern_match( s(:in_pattern, s(:match_var, :x), nil, s(:lvar, :x)), %q{in x then x}, %q{~~ keyword (in_pattern) |~~~~~~~~~~~ expression (in_pattern) | ~~~~ begin (in_pattern) | ~ expression (in_pattern.match_var) | ~ name (in_pattern.match_var)} ) end def test_pattern_matching_no_body assert_parses_pattern_match( s(:in_pattern, s(:int, 1), nil, nil), %q{in 1} ) end def test_pattern_matching_if_unless_modifiers assert_parses_pattern_match( s(:in_pattern, s(:match_var, :x), s(:if_guard, s(:true)), s(:nil) ), %q{in x if true; nil}, %q{~~ keyword (in_pattern) |~~~~~~~~~~~~~~~~~ expression (in_pattern) | ~ begin (in_pattern) | ~~ keyword (in_pattern.if_guard) | ~~~~~~~ expression (in_pattern.if_guard)} ) assert_parses_pattern_match( s(:in_pattern, s(:match_var, :x), s(:unless_guard, s(:true)), s(:nil) ), %q{in x unless true; nil}, %q{~~ keyword (in_pattern) |~~~~~~~~~~~~~~~~~~~~~ expression (in_pattern) | ~ begin (in_pattern) | ~~~~~~ keyword (in_pattern.unless_guard) | ~~~~~~~~~~~ expression (in_pattern.unless_guard)} ) end def test_pattern_matching_pin_variable assert_parses_pattern_match( s(:in_pattern, s(:pin, s(:lvar, :foo)), nil, s(:nil)), %q{in ^foo then nil}, %q{ ~ selector (in_pattern.pin) | ~~~~ expression (in_pattern.pin) | ~~~ name (in_pattern.pin.lvar)} ) end def test_pattern_matching_implicit_array_match assert_parses_pattern_match( s(:in_pattern, s(:array_pattern_with_tail, s(:match_var, :x)), nil, s(:nil)), %q{in x, then nil}, %q{ ~~ expression (in_pattern.array_pattern_with_tail)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_rest, s(:match_var, :x))), nil, s(:nil)), %q{in *x then nil}, %q{ ~~ expression (in_pattern.array_pattern) | ~ operator (in_pattern.array_pattern.match_rest) | ~ name (in_pattern.array_pattern.match_rest.match_var)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_rest)), nil, s(:nil)), %q{in * then nil}, %q{ ~ expression (in_pattern.array_pattern) | ~ operator (in_pattern.array_pattern.match_rest)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x), s(:match_var, :y)), nil, s(:nil)), %q{in x, y then nil}, %q{ ~~~~ expression (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern_with_tail, s(:match_var, :x), s(:match_var, :y)), nil, s(:nil)), %q{in x, y, then nil}, %q{ ~~~~~ expression (in_pattern.array_pattern_with_tail)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x), s(:match_rest, s(:match_var, :y)), s(:match_var, :z)), nil, s(:nil)), %q{in x, *y, z then nil}, %q{ ~~~~~~~~ expression (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_rest, s(:match_var, :x)), s(:match_var, :y), s(:match_var, :z)), nil, s(:nil)), %q{in *x, y, z then nil}, %q{ ~~~~~~~~ expression (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:int, 1), s(:str, 'a'), s(:array_pattern), s(:hash_pattern)), nil, s(:nil)), %q{in 1, "a", [], {} then nil}, %q{ ~~~~~~~~~~~~~~ expression (in_pattern.array_pattern)} ) end def test_pattern_matching_explicit_array_match assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x)), nil, s(:nil)), %q{in [x] then nil}, %q{ ~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern_with_tail, s(:match_var, :x)), nil, s(:nil)), %q{in [x,] then nil}, %q{ ~~~~ expression (in_pattern.array_pattern_with_tail) | ~ begin (in_pattern.array_pattern_with_tail) | ~ end (in_pattern.array_pattern_with_tail)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x), s(:match_var, :y)), nil, s(:true)), %q{in [x, y] then true}, %q{ ~~~~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern_with_tail, s(:match_var, :x), s(:match_var, :y)), nil, s(:true)), %q{in [x, y,] then true}, %q{ ~~~~~~~ expression (in_pattern.array_pattern_with_tail) | ~ begin (in_pattern.array_pattern_with_tail) | ~ end (in_pattern.array_pattern_with_tail)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x), s(:match_var, :y), s(:match_rest)), nil, s(:true)), %q{in [x, y, *] then true}, %q{ ~~~~~~~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x), s(:match_var, :y), s(:match_rest, s(:match_var, :z))), nil, s(:true)), %q{in [x, y, *z] then true}, %q{ ~~~~~~~~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x), s(:match_rest, s(:match_var, :y)), s(:match_var, :z)), nil, s(:true)), %q{in [x, *y, z] then true}, %q{ ~~~~~~~~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_var, :x), s(:match_rest), s(:match_var, :y)), nil, s(:true)), %q{in [x, *, y] then true}, %q{ ~~~~~~~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_rest, s(:match_var, :x)), s(:match_var, :y)), nil, s(:true)), %q{in [*x, y] then true}, %q{ ~~~~~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:array_pattern, s(:match_rest), s(:match_var, :x)), nil, s(:true)), %q{in [*, x] then true}, %q{ ~~~~~~ expression (in_pattern.array_pattern) | ~ begin (in_pattern.array_pattern) | ~ end (in_pattern.array_pattern)} ) end def test_pattern_matching_hash assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern), nil, s(:true)), %q{in {} then true}, %q{ ~~ expression (in_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 1))), nil, s(:true)), %q{in a: 1 then true}, %q{ ~~~~ expression (in_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 1))), nil, s(:true)), %q{in { a: 1 } then true}, %q{ ~~~~~~~~ expression (in_pattern.hash_pattern) | ~ begin (in_pattern.hash_pattern) | ~ end (in_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 1))), nil, s(:true)), %q{in { a: 1, } then true}, %q{ ~~~~~~~~~ expression (in_pattern.hash_pattern) | ~ begin (in_pattern.hash_pattern) | ~ end (in_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_var, :a)), nil, s(:true)), %q{in a: then true}, %q{ ~~ expression (in_pattern.hash_pattern) | ~ name (in_pattern.hash_pattern.match_var) | ~~ expression (in_pattern.hash_pattern.match_var)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_rest, s(:match_var, :a))), nil, s(:true)), %q{in **a then true}, %q{ ~~~ expression (in_pattern.hash_pattern) | ~~~ expression (in_pattern.hash_pattern.match_rest) | ~~ operator (in_pattern.hash_pattern.match_rest) | ~ expression (in_pattern.hash_pattern.match_rest.match_var) | ~ name (in_pattern.hash_pattern.match_rest.match_var)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_rest)), nil, s(:true)), %q{in ** then true}, %q{ ~~ expression (in_pattern.hash_pattern) | ~~ expression (in_pattern.hash_pattern.match_rest) | ~~ operator (in_pattern.hash_pattern.match_rest)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 1)), s(:pair, s(:sym, :b), s(:int, 2))), nil, s(:true)), %q{in a: 1, b: 2 then true}, %q{ ~~~~~~~~~~ expression (in_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_var, :a), s(:match_var, :b)), nil, s(:true)), %q{in a:, b: then true}, %q{ ~~~~~~ expression (in_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 1)), s(:match_var, :_a), s(:match_rest)), nil, s(:true)), %q{in a: 1, _a:, ** then true}, %q{ ~~~~~~~~~~~~~ expression (in_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 1))), nil, s(:false)), %q{ in {a: 1 } false }, %q{} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 2))), nil, s(:false)), %q{ in {a: 2} false }, %q{} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :Foo), s(:int, 42))), nil, s(:false)), %q{ in {Foo: 42 } false }, %q{} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:hash_pattern, s(:match_var, :b))), s(:match_var, :c)), nil, s(:send, nil, :p, s(:lvar, :c))), %q{ in a: {b:}, c: p c }, %q{} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_var, :a)), nil, s(:true)), %q{ in {a: } true }, %q{} ) end def test_ruby_bug_19539 assert_parses( s(:str, "[Bug #19539]\n"), "<<' FOO'\n""[Bug #19539]\n"" FOO\n", %q{}, SINCE_3_3) assert_parses( s(:str, "[Bug #19539]\n"), "<<-' FOO'\n""[Bug #19539]\n"" FOO\n", %q{}, SINCE_3_3) # closing identifier doesn't have enough leading spaces # so it's considered as a part of the string (and so we reach EOF) assert_diagnoses( [:fatal, :string_eof], "<<~' E'\n E", %q{}, SINCE_3_3) end def test_pattern_matching_hash_with_string_keys # Match + assign assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_var, :a)), nil, s(:true)), %q{in "a": then true}, %q{ ~~~~ expression (in_pattern.hash_pattern.match_var) | ~ name (in_pattern.hash_pattern.match_var)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_var, :a)), nil, s(:true)), %q{in "#{ 'a' }": then true}, %q{ ~~~~~~~~~~~ expression (in_pattern.hash_pattern.match_var) | ~ name (in_pattern.hash_pattern.match_var)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_var, :a)), nil, s(:true)), %q{in "#{ %q{a} }": then true}, %q{ ~~~~~~~~~~~~~ expression (in_pattern.hash_pattern.match_var) | ~ name (in_pattern.hash_pattern.match_var)} ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_var, :a)), nil, s(:true)), %q{in "#{ %Q{a} }": then true}, %q{ ~~~~~~~~~~~~~ expression (in_pattern.hash_pattern.match_var) | ~ name (in_pattern.hash_pattern.match_var)} ) # Only match assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :a), s(:int, 1))), nil, s(:true)), %q{in "a": 1 then true}, ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:dsym, s(:begin, s(:str, "a"))), s(:int, 1))), nil, s(:true)), %q{in "#{ 'a' }": 1 then true}, ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:dsym, s(:begin, s(:str, "a"))), s(:int, 1))), nil, s(:true)), %q{in "#{ %q{a} }": 1 then true}, ) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:dsym, s(:begin, s(:str, "a"))), s(:int, 1))), nil, s(:true)), %q{in "#{ %Q{a} }": 1 then true}, ) end def test_pattern_matching_hash_with_heredoc_keys # Ruby <3, the following case is acceptable by the MRI's grammar, # so it has to be reducable by parser. # We have a code for that in the builder.rb that reject it via # diagnostic error because of the wrong lvar name assert_diagnoses( [:error, :lvar_name, { name: "a\n" }], "case nil; in \"\#{ <<-HERE }\":;\na\nHERE\nelse\nend", %q{ ~~~~~~~ location}, SINCE_2_7 ) end def test_pattern_matching_hash_with_string_interpolation_keys assert_diagnoses( [:error, :pm_interp_in_var_name], %q{case a; in "#{a}": 1; end}, %q{ ~~~~~~~ location}, SINCE_2_7 ) assert_diagnoses( [:error, :pm_interp_in_var_name], %q{case a; in "#{a}": 1; end}, %q{ ~~~~~~~ location}, SINCE_2_7 ) end def test_pattern_matching_invalid_lvar_name assert_diagnoses( [:error, :lvar_name, { name: :a? }], %q{case a; in a?:; end}, %q{ ~~ location}, SINCE_2_7 ) end def test_pattern_matching_keyword_variable assert_parses_pattern_match( s(:in_pattern, s(:self), nil, s(:true)), %q{in self then true} ) end def test_pattern_matching_lambda assert_parses_pattern_match( s(:in_pattern, s(:block, s(:lambda), s(:args), s(:int, 42)), nil, s(:true)), %q{in ->{ 42 } then true} ) end def test_pattern_matching_ranges assert_parses_pattern_match( s(:in_pattern, s(:irange, s(:int, 1), s(:int, 2)), nil, s(:true)), %q{in 1..2 then true} ) assert_parses_pattern_match( s(:in_pattern, s(:irange, s(:int, 1), nil), nil, s(:true)), %q{in 1.. then true} ) assert_parses_pattern_match( s(:in_pattern, s(:irange, nil, s(:int, 2)), nil, s(:true)), %q{in ..2 then true} ) assert_parses_pattern_match( s(:in_pattern, s(:erange, s(:int, 1), s(:int, 2)), nil, s(:true)), %q{in 1...2 then true} ) assert_parses_pattern_match( s(:in_pattern, s(:erange, s(:int, 1), nil), nil, s(:true)), %q{in 1... then true} ) assert_parses_pattern_match( s(:in_pattern, s(:erange, nil, s(:int, 2)), nil, s(:true)), %q{in ...2 then true} ) end def test_pattern_matching_expr_in_paren assert_parses_pattern_match( s(:in_pattern, s(:begin, s(:int, 1)), nil, s(:true)), %q{in (1) then true}, %q{ ~~~ expression (in_pattern.begin) | ~ begin (in_pattern.begin) | ~ end (in_pattern.begin)} ) end def test_pattern_matching_constants assert_parses_pattern_match( s(:in_pattern, s(:const, nil, :A), nil, s(:true)), %q{in A then true}, %q{ ~ expression (in_pattern.const) | ~ name (in_pattern.const)} ) assert_parses_pattern_match( s(:in_pattern, s(:const, s(:const, nil, :A), :B), nil, s(:true)), %q{in A::B then true}, %q{ ~~~~ expression (in_pattern.const) | ~~ double_colon (in_pattern.const) | ~ name (in_pattern.const)} ) assert_parses_pattern_match( s(:in_pattern, s(:const, s(:cbase), :A), nil, s(:true)), %q{in ::A then true}, %q{ ~~~ expression (in_pattern.const) | ~~ double_colon (in_pattern.const) | ~ name (in_pattern.const)} ) end def test_pattern_matching_const_pattern assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :A), s(:array_pattern, s(:int, 1), s(:int, 2))), nil, s(:true)), %q{in A(1, 2) then true}, %q{ ~~~~~~~ expression (in_pattern.const_pattern) | ~ begin (in_pattern.const_pattern) | ~ end (in_pattern.const_pattern) | ~ expression (in_pattern.const_pattern.const) | ~~~~ expression (in_pattern.const_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :A), s(:hash_pattern, s(:match_var, :x))), nil, s(:true)), %q{in A(x:) then true}, %q{ ~~~~~ expression (in_pattern.const_pattern) | ~ begin (in_pattern.const_pattern) | ~ end (in_pattern.const_pattern) | ~ expression (in_pattern.const_pattern.const) | ~~ expression (in_pattern.const_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :A), s(:array_pattern)), nil, s(:true)), %q{in A() then true}, %q{ ~~~ expression (in_pattern.const_pattern) | ~ begin (in_pattern.const_pattern) | ~ end (in_pattern.const_pattern) | ~ expression (in_pattern.const_pattern.const) | ~~ expression (in_pattern.const_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :A), s(:array_pattern, s(:int, 1), s(:int, 2))), nil, s(:true)), %q{in A[1, 2] then true}, %q{ ~~~~~~~ expression (in_pattern.const_pattern) | ~ begin (in_pattern.const_pattern) | ~ end (in_pattern.const_pattern) | ~ expression (in_pattern.const_pattern.const) | ~~~~ expression (in_pattern.const_pattern.array_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :A), s(:hash_pattern, s(:match_var, :x))), nil, s(:true)), %q{in A[x:] then true}, %q{ ~~~~~ expression (in_pattern.const_pattern) | ~ begin (in_pattern.const_pattern) | ~ end (in_pattern.const_pattern) | ~ expression (in_pattern.const_pattern.const) | ~~ expression (in_pattern.const_pattern.hash_pattern)} ) assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :A), s(:array_pattern)), nil, s(:true)), %q{in A[] then true}, %q{ ~~~ expression (in_pattern.const_pattern) | ~ begin (in_pattern.const_pattern) | ~ end (in_pattern.const_pattern) | ~~ expression (in_pattern.const_pattern.array_pattern)} ) end def test_pattern_matching_match_alt assert_parses_pattern_match( s(:in_pattern, s(:match_alt, s(:int, 1), s(:int, 2)), nil, s(:true)), %q{in 1 | 2 then true}, %q{ ~~~~~ expression (in_pattern.match_alt) | ~ operator (in_pattern.match_alt)} ) end def test_pattern_matching_match_as assert_parses_pattern_match( s(:in_pattern, s(:match_as, s(:int, 1), s(:match_var, :a)), nil, s(:true)), %q{in 1 => a then true}, %q{ ~~~~~~ expression (in_pattern.match_as) | ~~ operator (in_pattern.match_as)} ) end def test_pattern_matching_else assert_parses( s(:case_match, s(:int, 1), s(:in_pattern, s(:int, 2), nil, s(:int, 3)), s(:int, 4)), %q{case 1; in 2; 3; else; 4; end}, %q{ ~~~~ else}, SINCE_2_7 ) end def test_pattern_matching_blank_else assert_parses( s(:case_match, s(:int, 1), s(:in_pattern, s(:int, 2), nil, s(:int, 3)), s(:empty_else)), %q{case 1; in 2; 3; else; end}, %q{ ~~~~ else}, SINCE_2_7 ) end def assert_pattern_matching_defines_local_variables(match_code, lvar_names, versions = SINCE_2_7) code = "case 1; #{match_code}; then [#{lvar_names.join(', ')}]; end" with_versions(versions) do |version, parser| source_file = Parser::Source::Buffer.new('(assert_context)', source: code) lvar_names.each do |lvar_name| refute parser.static_env.declared?(lvar_name), "(#{version}) local variable #{lvar_name.to_s.inspect} has to be undefined before asserting" end before = parser.static_env.instance_variable_get(:@variables).to_a begin _parsed_ast = parser.parse(source_file) rescue Parser::SyntaxError => exc backtrace = exc.backtrace Exception.instance_method(:initialize).bind(exc). call("(#{version}) #{exc.message}") exc.set_backtrace(backtrace) raise end lvar_names.each do |lvar_name| assert parser.static_env.declared?(lvar_name), "(#{version}) expected local variable #{lvar_name.to_s.inspect} to be defined after parsing" end after = parser.static_env.instance_variable_get(:@variables).to_a extra = after - before - lvar_names assert extra.empty?, "(#{version}) expected only #{lvar_names.inspect} " \ "to be defined during parsing, but also got #{extra.inspect}" end end def test_pattern_matching_creates_locals assert_pattern_matching_defines_local_variables( %q{in a, *b, c}, [:a, :b, :c] ) assert_pattern_matching_defines_local_variables( %q{in d | e | f}, [:d, :e, :f] ) assert_pattern_matching_defines_local_variables( %q{in { g:, **h }}, [:g, :h] ) assert_pattern_matching_defines_local_variables( %q{in A(i, *j, k)}, [:i, :j, :k] ) assert_pattern_matching_defines_local_variables( %q{in 1 => l}, [:l] ) assert_pattern_matching_defines_local_variables( %q{in "m":}, [:m] ) end def test_pattern_matching__FILE__LINE_literals assert_parses( s(:case_match, s(:array, s(:str, "(assert_parses)"), s(:send, s(:int, 1), :+, s(:int, 1)), s(:__ENCODING__)), s(:in_pattern, s(:array_pattern, s(:str, "(assert_parses)"), s(:int, 2), s(:__ENCODING__)), nil, nil), nil), <<-RUBY, case [__FILE__, __LINE__ + 1, __ENCODING__] in [__FILE__, __LINE__, __ENCODING__] end RUBY %q{}, SINCE_2_7) end def test_pattern_matching_nil_pattern assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:match_nil_pattern)), nil, s(:true)), %q{in **nil then true}, %q{ ~~~~~ expression (in_pattern.hash_pattern.match_nil_pattern) | ~~~ name (in_pattern.hash_pattern.match_nil_pattern)} ) end def test_pattern_matching_single_line__27__legacy Parser::Builders::Default.emit_match_pattern = false assert_parses( s(:begin, s(:in_match, s(:int, 1), s(:array_pattern, s(:match_var, :a))), s(:lvar, :a)), %q{1 in [a]; a}, %q{~~~~~~~~ expression (in_match) | ~~ operator (in_match)}, %w(2.7)) ensure Parser::Builders::Default.emit_match_pattern = true end def test_pattern_matching_single_line__27 assert_parses( s(:begin, s(:match_pattern, s(:int, 1), s(:array_pattern, s(:match_var, :a))), s(:lvar, :a)), %q{1 in [a]; a}, %q{~~~~~~~~ expression (match_pattern) | ~~ operator (match_pattern)}, %w(2.7)) end def test_pattern_matching_single_line assert_parses( s(:begin, s(:match_pattern, s(:int, 1), s(:array_pattern, s(:match_var, :a))), s(:lvar, :a)), %q{1 => [a]; a}, %q{~~~~~~~~ expression (match_pattern) | ~~ operator (match_pattern)}, SINCE_3_0) assert_parses( s(:begin, s(:match_pattern_p, s(:int, 1), s(:array_pattern, s(:match_var, :a))), s(:lvar, :a)), %q{1 in [a]; a}, %q{~~~~~~~~ expression (match_pattern_p) | ~~ operator (match_pattern_p)}, SINCE_3_0) end def test_pattern_matching_single_line_allowed_omission_of_parentheses assert_parses( s(:begin, s(:match_pattern, s(:array, s(:int, 1), s(:int, 2)), s(:array_pattern, s(:match_var, :a), s(:match_var, :b))), s(:lvar, :a)), %q{[1, 2] => a, b; a}, %q{~~~~~~~~~~~~~~ expression (match_pattern) | ~~ operator (match_pattern)}, SINCE_3_1) assert_parses( s(:begin, s(:match_pattern, s(:hash, s(:pair, s(:sym, :a), s(:int, 1))), s(:hash_pattern, s(:match_var, :a))), s(:lvar, :a)), %q{{a: 1} => a:; a}, %q{~~~~~~~~~~~~ expression (match_pattern) | ~~ operator (match_pattern)}, SINCE_3_1) assert_parses( s(:begin, s(:match_pattern_p, s(:array, s(:int, 1), s(:int, 2)), s(:array_pattern, s(:match_var, :a), s(:match_var, :b))), s(:lvar, :a)), %q{[1, 2] in a, b; a}, %q{~~~~~~~~~~~~~~ expression (match_pattern_p) | ~~ operator (match_pattern_p)}, SINCE_3_1) assert_parses( s(:begin, s(:match_pattern_p, s(:hash, s(:pair, s(:sym, :a), s(:int, 1))), s(:hash_pattern, s(:match_var, :a))), s(:lvar, :a)), %q{{a: 1} in a:; a}, %q{~~~~~~~~~~~~ expression (match_pattern_p) | ~~ operator (match_pattern_p)}, SINCE_3_1) assert_parses( s(:begin, s(:match_pattern_p, s(:hash, s(:pair, s(:sym, :key), s(:sym, :value))), s(:hash_pattern, s(:pair, s(:sym, :key), s(:match_var, :value)))), s(:lvar, :value)), %q{{key: :value} in key: value; value}, %q{~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (match_pattern_p) | ~~ operator (match_pattern_p)}, SINCE_3_1) assert_parses( s(:begin, s(:match_pattern, s(:hash, s(:pair, s(:sym, :key), s(:sym, :value))), s(:hash_pattern, s(:pair, s(:sym, :key), s(:match_var, :value)))), s(:lvar, :value)), %q{{key: :value} => key: value; value}, %q{~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression (match_pattern) | ~~ operator (match_pattern)}, SINCE_3_1) end def test_ruby_bug_pattern_matching_restore_in_kwarg_flag refute_diagnoses( "p(({} in {a:}), a:\n 1)", %w(2.7)) refute_diagnoses( "p(({} => {a:}), a:\n 1)", SINCE_3_0) end def test_pattern_matching_duplicate_variable_name assert_diagnoses( [:error, :duplicate_variable_name, { :name => 'a' }], %q{case 0; in a, a; end}, %q{ ^ location}, SINCE_2_7) refute_diagnoses( %q{case [0, 1, 2, 3]; in _, _, _a, _a; end}, SINCE_2_7) assert_diagnoses( [:error, :duplicate_variable_name, { :name => 'a' }], %q{case 0; in a, {a:}; end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :duplicate_variable_name, { :name => 'a' }], %q{case 0; in a, {"a":}; end}, %q{ ^ location}, SINCE_2_7) assert_diagnoses( [:error, :duplicate_variable_name, { :name => 'a' }], %q{0 in [a, a]}, %q{ ^ location}, %w(2.7)) assert_diagnoses( [:error, :duplicate_variable_name, { :name => 'a' }], %q{0 => [a, a]}, %q{ ^ location}, SINCE_3_0) assert_diagnoses( [:error, :duplicate_variable_name, { :name => 'a' }], %q{0 in [a, *a]}, %q{ ^ location}, SINCE_3_3) assert_diagnoses( [:error, :duplicate_variable_name, { :name => 'a' }], %q{0 in [*a, a, b, *b]}, %q{ ^ location}, SINCE_3_3) end def test_pattern_matching_duplicate_hash_keys assert_diagnoses( [:error, :duplicate_pattern_key, { :name => 'a' }], %q{ case 0; in a: 1, a: 2; end }, %q{ ^^ location}, SINCE_2_7) assert_diagnoses( [:error, :duplicate_pattern_key, { :name => 'a' }], %q{ case 0; in a: 1, "a": 2; end }, %q{ ^^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :duplicate_pattern_key, { :name => 'a' }], %q{ case 0; in "a": 1, "a": 2; end }, %q{ ^^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :duplicate_pattern_key, { :name => "a\0" }], %q{ case 0; in "a\x0":a1, "a\0":a2; end }, %q{ ^^^^^^ location}, SINCE_2_7) assert_diagnoses( [:error, :duplicate_pattern_key, { :name => "abc" }], %q{ case 0; in "abc":a1, "a#{"b"}c":a2; end }, %q{ ^^^^^^^^^^^ location}, SINCE_2_7) end def test_pattern_matching_required_parentheses_for_in_match assert_diagnoses( [:error, :unexpected_token, { :token => 'tCOMMA' }], %{1 in a, b}, %{ ^ location}, %w(2.7)) assert_diagnoses( [:error, :unexpected_token, { :token => 'tCOMMA' }], %{1 => a, b}, %{ ^ location}, %w(3.0)) assert_diagnoses( [:error, :unexpected_token, { :token => 'tASSOC' }], %{1 => a:}, %{ ^^ location}, %w(2.7)) assert_diagnoses( [:error, :unexpected_token, { :token => 'tLABEL' }], %{1 => a:}, %{ ^^ location}, %w(3.0)) end def test_pattern_matching_required_bound_variable_before_pin assert_diagnoses( [:error, :undefined_lvar, { :name => 'a' }], %{case 0; in ^a; true; end}, %{ ^ location}, SINCE_2_7) end def test_parser_bug_645 assert_parses( s(:block, s(:lambda), s(:args, s(:optarg, :arg, s(:hash))), nil), '-> (arg={}) {}', %{}, SINCE_1_9) end def test_endless_method assert_parses( s(:def, :foo, s(:args), s(:int, 42)), %q{def foo() = 42}, %q{~~~ keyword | ~~~ name | ^ assignment |! end |~~~~~~~~~~~~~~ expression}, SINCE_3_0) assert_parses( s(:def, :inc, s(:args, s(:arg, :x)), s(:send, s(:lvar, :x), :+, s(:int, 1))), %q{def inc(x) = x + 1}, %q{~~~ keyword | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_0) assert_parses( s(:defs, s(:send, nil, :obj), :foo, s(:args), s(:int, 42)), %q{def obj.foo() = 42}, %q{~~~ keyword | ^ operator | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_0) assert_parses( s(:defs, s(:send, nil, :obj), :inc, s(:args, s(:arg, :x)), s(:send, s(:lvar, :x), :+, s(:int, 1))), %q{def obj.inc(x) = x + 1}, %q{~~~ keyword | ~~~ name | ^ operator | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_0) end def test_endless_method_forwarded_args_legacy Parser::Builders::Default.emit_forward_arg = false assert_parses( s(:def, :foo, s(:forward_args), s(:send, nil, :bar, s(:forwarded_args))), %q{def foo(...) = bar(...)}, %q{~~~ keyword | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_0) Parser::Builders::Default.emit_forward_arg = true end def test_endless_method_with_rescue_mod assert_parses( s(:def, :m, s(:args), s(:rescue, s(:int, 1), s(:resbody, nil, nil, s(:int, 2)), nil)), %q{def m() = 1 rescue 2}, %q{}, SINCE_3_0) assert_parses( s(:defs, s(:self), :m, s(:args), s(:rescue, s(:int, 1), s(:resbody, nil, nil, s(:int, 2)), nil)), %q{def self.m() = 1 rescue 2}, %q{}, SINCE_3_0) end def test_endless_method_command_syntax assert_parses( s(:def, :foo, s(:args), s(:send, nil, :puts, s(:str, "Hello"))), %q{def foo = puts "Hello"}, %q{~~~ keyword | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args), s(:send, nil, :puts, s(:str, "Hello"))), %q{def foo() = puts "Hello"}, %q{~~~ keyword | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:arg, :x)), s(:send, nil, :puts, s(:lvar, :x))), %q{def foo(x) = puts x}, %q{~~~ keyword | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:defs, s(:send, nil, :obj), :foo, s(:args), s(:send, nil, :puts, s(:str, "Hello"))), %q{def obj.foo = puts "Hello"}, %q{~~~ keyword | ^ operator | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:defs, s(:send, nil, :obj), :foo, s(:args), s(:send, nil, :puts, s(:str, "Hello"))), %q{def obj.foo() = puts "Hello"}, %q{~~~ keyword | ^ operator | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:def, :rescued, s(:args, s(:arg, :x)), s(:rescue, s(:send, nil, :raise, s(:str, "to be caught")), s(:resbody, nil, nil, s(:dstr, s(:str, "instance "), s(:begin, s(:lvar, :x)))), nil)), %q{def rescued(x) = raise "to be caught" rescue "instance #{x}"}, %q{~~~ keyword | ~~~~~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:defs, s(:self), :rescued, s(:args, s(:arg, :x)), s(:rescue, s(:send, nil, :raise, s(:str, "to be caught")), s(:resbody, nil, nil, s(:dstr, s(:str, "class "), s(:begin, s(:lvar, :x)))), nil)), %q{def self.rescued(x) = raise "to be caught" rescue "class #{x}"}, %q{~~~ keyword | ^ operator | ~~~~~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:defs, s(:send, nil, :obj), :foo, s(:args, s(:arg, :x)), s(:send, nil, :puts, s(:lvar, :x))), %q{def obj.foo(x) = puts x}, %q{~~~ keyword | ^ operator | ~~~ name | ^ assignment |~~~~~~~~~~~~~~~~~~~~~~~ expression}, SINCE_3_1) end def test_private_endless_method_command_syntax assert_diagnoses( [:error, :unexpected_token, { :token => 'tSTRING' }], %q{private def foo = puts "Hello"}, %q{ ^^^^^^^ location}, SINCE_3_1) assert_diagnoses( [:error, :unexpected_token, { :token => 'tSTRING' }], %q{private def foo() = puts "Hello"}, %q{ ^^^^^^^ location}, SINCE_3_1) assert_diagnoses( [:error, :unexpected_token, { :token => 'tIDENTIFIER' }], %q{private def foo(x) = puts x}, %q{ ^ location}, SINCE_3_1) assert_diagnoses( [:error, :unexpected_token, { :token => 'tSTRING' }], %q{private def obj.foo = puts "Hello"}, %q{ ^^^^^^^ location}, SINCE_3_1) assert_diagnoses( [:error, :unexpected_token, { :token => 'tSTRING' }], %q{private def obj.foo() = puts "Hello"}, %q{ ^^^^^^^ location}, SINCE_3_1) assert_diagnoses( [:error, :unexpected_token, { :token => 'tIDENTIFIER' }], %q{private def obj.foo(x) = puts x}, %q{ ^ location}, SINCE_3_1) end def test_hash_pair_value_omission assert_parses( s(:hash, s(:pair, s(:sym, :a), s(:send, nil, :a)), s(:pair, s(:sym, :b), s(:send, nil, :b))), %q{{a:, b:}}, %q{^ begin | ^ end | ^ operator (pair) | ~ expression (pair.sym) | ~ expression (pair.send) | ~~ expression (pair) |~~~~~~~~ expression}, SINCE_3_1) assert_parses( s(:hash, s(:pair, s(:sym, :puts), s(:send, nil, :puts))), %q{{puts:}}, %q{ ^ operator (pair) | ~~~~ expression (pair.sym) | ~~~~ expression (pair.send) | ~~~~ selector (pair.send) | ~~~~~ expression (pair)}, SINCE_3_1) assert_parses( s(:hash, s(:pair, s(:sym, :BAR), s(:const, nil, :BAR))), %q{{BAR:}}, %q{ ^ operator (pair) | ~~~ expression (pair.sym) | ~~~ expression (pair.const) | ~~~ name (pair.const) | ~~~~ expression (pair)}, SINCE_3_1) assert_diagnoses( [:error, :unexpected_token, { :token => 'tRCURLY' }], %q{{"#{x}":}}, %q{ ^ location}, SINCE_3_1) end def test_keyword_argument_omission assert_parses( s(:send, nil, :foo, s(:kwargs, s(:pair, s(:sym, :a), s(:send, nil, :a)), s(:pair, s(:sym, :b), s(:send, nil, :b)))), %q{foo(a:, b:)}, %q{ ^ begin | ^ end | ^ operator (kwargs.pair) | ~ expression (kwargs.pair.sym) | ~ expression (kwargs.pair.send) | ~~ expression (kwargs.pair) | ~~~~~~ expression (kwargs) |~~~~~~~~~~~ expression}, SINCE_3_1) end def test_hash_pair_value_omission_invalid_label assert_diagnoses( [:error, :invalid_id_to_get, { :identifier => 'foo?' }], %q{{ foo?: }}, %q{ ^^^^ location}, SINCE_3_1) assert_diagnoses( [:error, :invalid_id_to_get, { :identifier => 'bar!' }], %q{{ bar!: }}, %q{ ^^^^ location}, SINCE_3_1) end def test_rasgn_line_continuation assert_diagnoses( [:error, :unexpected_token, { :token => 'tASSOC' }], %Q{13.divmod(5)\n=> a,b; [a, b]}, %{ ^^ location}, SINCE_3_0) end def test_find_pattern assert_parses_pattern_match( s(:in_pattern, s(:find_pattern, s(:match_rest, s(:match_var, :x)), s(:match_as, s(:int, 1), s(:match_var, :a)), s(:match_rest, s(:match_var, :y))), nil, s(:true)), %q{in [*x, 1 => a, *y] then true}, %q{ ~~~~~~~~~~~~~~~~ expression (in_pattern.find_pattern) | ~ begin (in_pattern.find_pattern) | ~ end (in_pattern.find_pattern) | ~~ expression (in_pattern.find_pattern.match_rest/1) | ~~ expression (in_pattern.find_pattern.match_rest/2)}, SINCE_3_0) assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :String), s(:find_pattern, s(:match_rest), s(:int, 1), s(:match_rest))), nil, s(:true)), %q{in String(*, 1, *) then true}, %q{ ~~~~~~~ expression (in_pattern.const_pattern.find_pattern)}, SINCE_3_0) assert_parses_pattern_match( s(:in_pattern, s(:const_pattern, s(:const, nil, :Array), s(:find_pattern, s(:match_rest), s(:int, 1), s(:match_rest))), nil, s(:true)), %q{in Array[*, 1, *] then true}, %q{ ~~~~~~~ expression (in_pattern.const_pattern.find_pattern)}, SINCE_3_0) assert_parses_pattern_match( s(:in_pattern, s(:find_pattern, s(:match_rest), s(:int, 42), s(:match_rest)), nil, s(:true)), %q{in *, 42, * then true}, %q{ ~~~~~~~~ expression (in_pattern.find_pattern)}, SINCE_3_0) end def test_invalid_source with_versions(ALL_VERSIONS) do |_ver, parser| source_file = Parser::Source::Buffer.new('(comments)', source: "def foo; en") parser.diagnostics.all_errors_are_fatal = false ast = parser.parse(source_file) assert_nil(ast) end end def test_reserved_for_numparam__before_30 assert_parses( s(:block, s(:send, nil, :proc), s(:args), s(:lvasgn, :_1, s(:nil))), %q{proc {_1 = nil}}, %q{}, ALL_VERSIONS - SINCE_3_0) assert_parses( s(:lvasgn, :_2, s(:int, 1)), %q{_2 = 1}, %q{}, ALL_VERSIONS - SINCE_3_0) assert_parses( s(:block, s(:send, nil, :proc), s(:args, s(:procarg0, s(:arg, :_3))), nil), %q{proc {|_3|}}, %q{}, SINCE_1_9 - SINCE_3_0) assert_parses( s(:def, :x, s(:args, s(:arg, :_4)), nil), %q{def x(_4) end}, %q{}, ALL_VERSIONS - SINCE_3_0) assert_parses( s(:def, :_5, s(:args), nil), %q{def _5; end}, %q{}, ALL_VERSIONS - SINCE_3_0) assert_parses( s(:defs, s(:self), :_6, s(:args), nil), %q{def self._6; end}, %q{}, ALL_VERSIONS - SINCE_3_0) end def test_reserved_for_numparam__since_30 # Regular assignments: assert_diagnoses( [:error, :reserved_for_numparam, { :name => '_1' }], %q{proc {_1 = nil}}, %q{ ^^ location}, SINCE_3_0) assert_diagnoses( [:error, :reserved_for_numparam, { :name => '_2' }], %q{_2 = 1}, %q{^^ location}, SINCE_3_0) # Arguments: [ # req (procarg0) [ %q{proc {|_3|}}, %q{ ^^ location}, ], # req [ %q{proc {|_3,|}}, %q{ ^^ location}, ], # opt [ %q{proc {|_3 = 42|}}, %q{ ^^ location}, ], # mlhs [ %q{proc {|(_3)|}}, %q{ ^^ location}, ], # rest [ %q{proc {|*_3|}}, %q{ ^^ location}, ], # kwarg [ %q{proc {|_3:|}}, %q{ ^^^ location}, ], # kwoptarg [ %q{proc {|_3: 42|}}, %q{ ^^^ location}, ], # kwrestarg [ %q{proc {|**_3|}}, %q{ ^^ location}, ], # block [ %q{proc {|&_3|}}, %q{ ^^ location}, ], # shadowarg [ %q{proc {|;_3|}}, %q{ ^^ location}, ], ].each do |(code, location)| assert_diagnoses( [:error, :reserved_for_numparam, { :name => '_3' }], code, location, SINCE_3_0) end # Method definitions: [ # regular method [ %q{def _5; end}, %q{ ^^ location} ], # regular singleton method [ %q{def self._5; end}, %q{ ^^ location} ], # endless method [ %q{def _5() = nil}, %q{ ^^ location} ], # endless singleton method [ %q{def self._5() = nil}, %q{ ^^ location} ], ].each do |(code, location)| assert_diagnoses( [:error, :reserved_for_numparam, { :name => '_5' }], code, location, SINCE_3_0) end end def test_numparam_ruby_bug_19025 assert_diagnoses_many( [ [:warning, :ambiguous_prefix, { :prefix => '**' }], [:error, :unexpected_token, { :token => 'tDSTAR' }] ], 'p { [_1 **2] }', %w[3.0 3.1]) assert_parses( s(:numblock, s(:send, nil, :p), 1, s(:array, s(:send, s(:lvar, :_1), :**, s(:int, 2)))), 'p { [_1 **2] }', %q{}, SINCE_3_2) end def test_endless_setter assert_diagnoses( [:error, :endless_setter], %q{def foo=() = 42}, %q{ ^^^^ location}, SINCE_3_0) assert_diagnoses( [:error, :endless_setter], %q{def obj.foo=() = 42}, %q{ ^^^^ location}, SINCE_3_0) assert_diagnoses( [:error, :endless_setter], %q{def foo=() = 42 rescue nil}, %q{ ^^^^ location}, SINCE_3_0) assert_diagnoses( [:error, :endless_setter], %q{def obj.foo=() = 42 rescue nil}, %q{ ^^^^ location}, SINCE_3_0) end def test_endless_comparison_method %i[=== == != <= >= !=].each do |method_name| assert_parses( s(:def, method_name, s(:args, s(:arg, :other)), s(:send, nil, :do_something)), %Q{def #{method_name}(other) = do_something}, %q{}, SINCE_3_0) end end def test_endless_method_without_args assert_parses( s(:def, :foo, s(:args), s(:int, 42)), %q{def foo = 42}, %q{}, SINCE_3_0) assert_parses( s(:def, :foo, s(:args), s(:rescue, s(:int, 42), s(:resbody, nil, nil, s(:nil)), nil)), %q{def foo = 42 rescue nil}, %q{}, SINCE_3_0) assert_parses( s(:defs, s(:self), :foo, s(:args), s(:int, 42)), %q{def self.foo = 42}, %q{}, SINCE_3_0) assert_parses( s(:defs, s(:self), :foo, s(:args), s(:rescue, s(:int, 42), s(:resbody, nil, nil, s(:nil)), nil)), %q{def self.foo = 42 rescue nil}, %q{}, SINCE_3_0) end def test_parser_drops_truncated_parts_of_squiggly_heredoc assert_parses( s(:dstr, s(:begin), s(:str, "\n")), "<<~HERE\n \#{}\nHERE", %q{}, SINCE_2_3) end def test_pin_expr assert_parses_pattern_match( s(:in_pattern, s(:pin, s(:begin, s(:int, 42))), nil, s(:nil)), %q{in ^(42) then nil}, %q{ ~ selector (in_pattern.pin) | ~~~~~ expression (in_pattern.pin) | ~ begin (in_pattern.pin.begin) | ~ end (in_pattern.pin.begin) | ~~~~ expression (in_pattern.pin.begin)}, SINCE_3_1) assert_parses_pattern_match( s(:in_pattern, s(:hash_pattern, s(:pair, s(:sym, :foo), s(:pin, s(:begin, s(:int, 42))))), nil, s(:nil)), %q{in { foo: ^(42) } then nil}, %q{ ~ selector (in_pattern.hash_pattern.pair.pin) | ~~~~~ expression (in_pattern.hash_pattern.pair.pin) | ~ begin (in_pattern.hash_pattern.pair.pin.begin) | ~ end (in_pattern.hash_pattern.pair.pin.begin) | ~~~~ expression (in_pattern.hash_pattern.pair.pin.begin)}, SINCE_3_1) assert_parses_pattern_match( s(:in_pattern, s(:pin, s(:begin, s(:send, s(:int, 0), :+, s(:int, 0)))), nil, s(:nil)), %q{in ^(0+0) then nil}, %q{ ~ selector (in_pattern.pin) | ~~~~~~ expression (in_pattern.pin) | ~ begin (in_pattern.pin.begin) | ~ end (in_pattern.pin.begin) | ~~~~~ expression (in_pattern.pin.begin)}, SINCE_3_1) assert_parses_pattern_match( s(:in_pattern, s(:pin, s(:ivar, :@a)), nil, nil), %q{in ^@a}, %q{ ~ selector (in_pattern.pin) | ~~~ expression (in_pattern.pin)}, SINCE_3_1) assert_parses_pattern_match( s(:in_pattern, s(:pin, s(:cvar, :@@TestPatternMatching)), nil, nil), %q{in ^@@TestPatternMatching}, %q{ ~ selector (in_pattern.pin) | ~~~~~~~~~~~~~~~~~~~~~~ expression (in_pattern.pin)}, SINCE_3_1) assert_parses_pattern_match( s(:in_pattern, s(:pin, s(:gvar, :$TestPatternMatching)), nil, nil), %q{in ^$TestPatternMatching}, %q{ ~ selector (in_pattern.pin) | ~~~~~~~~~~~~~~~~~~~~~ expression (in_pattern.pin)}, SINCE_3_1) assert_parses_pattern_match( s(:in_pattern, s(:pin, s(:begin, s(:int, 1))), nil, nil), %Q{in ^(1\n)}, %q{ ~ selector (in_pattern.pin) | ~~~~~ expression (in_pattern.pin)}, SINCE_3_2) end def test_assignment_to_numparam_via_pattern_matching assert_diagnoses( [:error, :reserved_for_numparam, { :name => '_1' }], %q{proc { 1 in _1 }}, %q{ ~~ location}, SINCE_3_0) assert_diagnoses( [:error, :cant_assign_to_numparam, { :name => '_1' }], %q{proc { _1; 1 in _1 }}, %q{ ~~ location}, SINCE_2_7) end def test_warn_on_duplicate_hash_key # symbol assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { :foo => 1, :foo => 2 } }, %q{ ^^^^ location}, ALL_VERSIONS) # string assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { "foo" => 1, "foo" => 2 } }, %q{ ^^^^^ location}, ALL_VERSIONS) # small number assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { 1000 => 1, 1000 => 2 } }, %q{ ^^^^ location}, ALL_VERSIONS) # float assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { 1.0 => 1, 1.0 => 2 } }, %q{ ^^^ location}, ALL_VERSIONS) # bignum assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { 1_000_000_000_000_000_000 => 1, 1_000_000_000_000_000_000 => 2 } }, %q{ ^^^^^^^^^^^^^^^^^^^^^^^^^ location}, ALL_VERSIONS) # rational (tRATIONAL exists starting from 2.7) refute_diagnoses(%q{ { 1.0r => 1, 1.0r => 2 } }, SINCE_2_1 - SINCE_3_1) assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { 1.0r => 1, 1.0r => 2 } }, %q{ ~~~~ location}, SINCE_3_1) # complex (tIMAGINARY exists starting from 2.7) refute_diagnoses(%q{ { 1.0i => 1, 1.0i => 2 } }, SINCE_2_1 - SINCE_3_1) assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { 1.0i => 1, 1.0i => 2 } }, %q{ ~~~~ location}, SINCE_3_1) # small float assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { 1.72723e-77 => 1, 1.72723e-77 => 2 } }, %q{ ~~~~~~~~~~~ location}, ALL_VERSIONS) # regexp refute_diagnoses(%q{ { /foo/ => 1, /foo/ => 2 } }, ALL_VERSIONS - SINCE_3_1) assert_diagnoses( [:warning, :duplicate_hash_key], %q{ { /foo/ => 1, /foo/ => 2 } }, %q{ ~~~~~ location}, SINCE_3_1) end def test_parser_bug_830 assert_parses( s(:regexp, s(:str, "\\("), s(:regopt)), %q{/\(/}, %q{}, ALL_VERSIONS) end def test_control_meta_escape_chars_in_regexp__before_31 assert_parses( s(:regexp, s(:str, "\\c\\xFF"), s(:regopt)), %q{/\c\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, ALL_VERSIONS - SINCE_3_1) assert_parses( s(:regexp, s(:str, "\\c\\M-\\xFF"), s(:regopt)), %q{/\c\M-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, ALL_VERSIONS - SINCE_3_1) assert_parses( s(:regexp, s(:str, "\\C-\\xFF"), s(:regopt)), %q{/\C-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, ALL_VERSIONS - SINCE_3_1) assert_parses( s(:regexp, s(:str, "\\C-\\M-\\xFF"), s(:regopt)), %q{/\C-\M-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, ALL_VERSIONS - SINCE_3_1) assert_parses( s(:regexp, s(:str, "\\M-\\xFF"), s(:regopt)), %q{/\M-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, ALL_VERSIONS - SINCE_3_1) assert_parses( s(:regexp, s(:str, "\\M-\\C-\\xFF"), s(:regopt)), %q{/\M-\C-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, ALL_VERSIONS - SINCE_3_1) assert_parses( s(:regexp, s(:str, "\\M-\\c\\xFF"), s(:regopt)), %q{/\M-\c\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, ALL_VERSIONS - SINCE_3_1) end def test_control_meta_escape_chars_in_regexp__since_31 x9f = "\x9F".dup.force_encoding('ascii-8bit') assert_parses( s(:regexp, s(:str, x9f), s(:regopt)), %q{/\c\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, SINCE_3_1) assert_parses( s(:regexp, s(:str, x9f), s(:regopt)), %q{/\c\M-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, SINCE_3_1) assert_parses( s(:regexp, s(:str, x9f), s(:regopt)), %q{/\C-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, SINCE_3_1) assert_parses( s(:regexp, s(:str, x9f), s(:regopt)), %q{/\C-\M-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, SINCE_3_1) assert_parses( s(:regexp, s(:str, x9f), s(:regopt)), %q{/\M-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, SINCE_3_1) assert_parses( s(:regexp, s(:str, x9f), s(:regopt)), %q{/\M-\C-\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, SINCE_3_1) assert_parses( s(:regexp, s(:str, x9f), s(:regopt)), %q{/\M-\c\xFF/}.dup.force_encoding('ascii-8bit'), %q{}, SINCE_3_1) end def test_forward_arg_with_open_args assert_diagnoses_many( [ [:warning, :triple_dot_at_eol], [:error, :unexpected_token, { :token => 'tDOT3' }], ], %Q{def foo ...\nend}, SINCE_2_7 - SINCE_3_1) assert_diagnoses_many( [ [:error, :unexpected_token, { :token => 'tBDOT3' }], ], %Q{def foo a, b = 1, ...\nend}, SINCE_2_7 - SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:forward_arg)), nil), %Q{def foo ...\nend}, %q{ ~~~ expression (args.forward_arg)}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:arg, :a), s(:optarg, :b, s(:int, 1)), s(:forward_arg)), nil), %Q{def foo a, b = 1, ...\nend}, %q{ ~~~ expression (args.forward_arg)}, SINCE_3_1) assert_diagnoses( [:error, :forward_arg_after_restarg], %Q{def foo *rest, ...\nend}, %q{ ~~~ location | ~~~~~ highlights (0)}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:arg, :a), s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args))), "def foo(a, ...) bar(...) end", %q{}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:arg, :a), s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args))), "def foo a, ...\n bar(...)\nend", %q{}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:optarg, :b, s(:int, 1)), s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args))), "def foo b = 1, ...\n bar(...)\nend", %q{}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args))), "def foo ...; bar(...); end", %q{}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:arg, :a), s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args))), "def foo a, ...; bar(...); end", %q{}, SINCE_3_1) assert_parses( s(:def, :foo, s(:args, s(:optarg, :b, s(:int, 1)), s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args))), "def foo b = 1, ...; bar(...); end", %q{}, SINCE_3_1) assert_parses( s(:begin, s(:def, :foo, s(:args, s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args)))), "(def foo ...\n bar(...)\nend)", %q{}, SINCE_3_1) assert_parses( s(:begin, s(:def, :foo, s(:args, s(:forward_arg)), s(:send, nil, :bar, s(:forwarded_args)))), "(def foo ...; bar(...); end)", %q{}, SINCE_3_1) end def test_anonymous_blockarg assert_parses( s(:def, :foo, s(:args, s(:blockarg, nil)), s(:send, nil, :bar, s(:block_pass, nil))), %q{def foo(&); bar(&); end}, %q{ ~ expression (args.blockarg) | ~ operator (send.block_pass) | ~ expression (send.block_pass)}, SINCE_3_1) assert_diagnoses( [:error, :no_anonymous_blockarg], %q{def foo(); bar(&); end}, %q{ ^ location}, SINCE_3_1) assert_diagnoses( [:error, :unexpected_token, { :token => 'tINTEGER' }], %q{def foo(&0); end}, %q{ ^ location}, SINCE_3_1) end def test_invalid_escape_sequence_in_regexp__before_32 assert_diagnoses( [:fatal, :invalid_unicode_escape], %q{/foo-\\u-bar/}, %q{}, ALL_VERSIONS - SINCE_3_2) end if RUBY_ENGINE != 'truffleruby' def test_invalid_escape_sequence_in_regexp__since_32 assert_diagnoses( [:error, :invalid_regexp, { :message => "invalid Unicode escape: /foo-\\u-bar/" }], %q{/foo-\\u-bar/}, %q{}, SINCE_3_2) end end def test_forwarded_restarg assert_parses( s(:def, :foo, s(:args, s(:restarg)), s(:send, nil, :bar, s(:forwarded_restarg))), %q{def foo(*); bar(*); end}, %q{ ~ expression (send.forwarded_restarg)}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_restarg], %q{def foo; bar(*); end}, %q{}, SINCE_3_2) end def test_forwarded_argument_with_restarg assert_parses( s(:def, :foo, s(:args, s(:arg, :argument), s(:restarg)), s(:send, nil, :bar, s(:lvar, :argument), s(:forwarded_restarg))), %q{def foo(argument, *); bar(argument, *); end}, %q{ ~ expression (send.forwarded_restarg)}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_restarg], %q{def foo; bar(argument, *); end}, %q{}, SINCE_3_2) end def test_forwarded_kwrestarg assert_parses( s(:def, :foo, s(:args, s(:kwrestarg)), s(:send, nil, :bar, s(:kwargs, s(:forwarded_kwrestarg)))), %q{def foo(**); bar(**); end}, %q{ ~~ expression (send.kwargs.forwarded_kwrestarg)}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_kwrestarg], %q{def foo; bar(**); end}, %q{}, SINCE_3_2) end def test_forwarded_kwrestarg_with_additional_kwarg assert_parses( s(:def, :foo, s(:args, s(:kwrestarg)), s(:send, nil, :bar, s(:kwargs, s(:forwarded_kwrestarg), s(:pair, s(:sym, :from_foo), s(:true))))), %q{def foo(**); bar(**, from_foo: true); end}, %q{ ~~ expression (send.kwargs.forwarded_kwrestarg)}, SINCE_3_2) refute_diagnoses( %q{def foo(**); bar(**, from_foo: true); end}, SINCE_3_2) assert_diagnoses( [:warning, :duplicate_hash_key], %q{def foo(**); bar(foo: 1, **, foo: 2); end}, %q{ ^^^ location}, SINCE_3_2) end def test_forwarded_argument_with_kwrestarg assert_parses( s(:def, :foo, s(:args, s(:arg, :argument), s(:kwrestarg)), s(:send, nil, :bar, s(:lvar, :argument), s(:kwargs, s(:forwarded_kwrestarg)))), %q{def foo(argument, **); bar(argument, **); end}, %q{ ~~ expression (send.kwargs.forwarded_kwrestarg)}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_kwrestarg], %q{def foo; bar(argument, **); end}, %q{}, SINCE_3_2) end def test_if_while_after_class__since_32 assert_parses( s(:module, s(:const, s(:if, s(:true), s(:const, nil, :Object), nil), :Kernel), nil), %q{module if true; Object end::Kernel; end}, %q{}, SINCE_3_2) assert_parses( s(:module, s(:const, s(:while, s(:true), s(:break, s(:const, nil, :Object))), :Kernel), nil), %q{module while true; break Object end::Kernel; end}, %q{}, SINCE_3_2) assert_parses( s(:class, s(:const, s(:if, s(:true), s(:const, nil, :Object), nil), :Kernel), nil, nil), %q{class if true; Object end::Kernel; end}, %q{}, SINCE_3_2) assert_parses( s(:class, s(:const, s(:while, s(:true), s(:break, s(:const, nil, :Object))), :Kernel), nil, nil), %q{class while true; break Object end::Kernel; end}, %q{}, SINCE_3_2) end def test_bare_backslash assert_diagnoses( [:error, :bare_backslash], %q{x = \ 42}, %q{ ^ location}, ALL_VERSIONS) end def test_newline_in_hash_argument assert_parses( s(:send, s(:send, nil, :obj), :set, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1)))), %Q{obj.set foo:\n1}, %q{}, SINCE_3_2) assert_parses( s(:send, s(:send, nil, :obj), :set, s(:kwargs, s(:pair, s(:sym, :foo), s(:int, 1)))), %Q{obj.set "foo":\n1}, %q{}, SINCE_3_2) assert_parses( s(:case_match, s(:lvar, :foo), s(:in_pattern, s(:hash_pattern, s(:match_var, :a)), nil, s(:begin, s(:int, 0), s(:true))), s(:in_pattern, s(:hash_pattern, s(:match_var, :b)), nil, s(:begin, s(:int, 0), s(:true))), nil), %Q{case foo\nin a:\n0\ntrue\nin "b":\n0\ntrue\nend}, %q{}, SINCE_3_2) end def test_multiple_pattern_matches code = '{a: 0} => a:' node = s(:match_pattern, s(:hash, s(:pair, s(:sym, :a), s(:int, 0))), s(:hash_pattern, s(:match_var, :a))) assert_parses( s(:begin, node, node), %Q{#{code}\n#{code}}, %q{}, SINCE_3_1) code = '{a: 0} in a:' node = s(:match_pattern_p, s(:hash, s(:pair, s(:sym, :a), s(:int, 0))), s(:hash_pattern, s(:match_var, :a))) assert_parses( s(:begin, node, node), %Q{#{code}\n#{code}}, %q{}, SINCE_3_1) end def test_kwoptarg_with_kwrestarg_and_forwarded_args assert_parses( s(:def, :f, s(:args, s(:kwoptarg, :a, s(:nil)), s(:kwrestarg)), s(:send, nil, :b, s(:kwargs, s(:forwarded_kwrestarg)))), %Q{def f(a: nil, **); b(**) end}, %q{}, SINCE_3_2) end def test_argument_forwarding_with_anon_rest_kwrest_and_block assert_diagnoses( [:error, :unexpected_token, { token: 'tBDOT3' }], %q{def f(*, **, &); g(...); end}, %q{}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_restarg], %q{def f(...); g(*); end}, %q{}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_restarg], %q{def f(...); g(0, *); end}, %q{}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_kwrestarg], %q{def f(...); g(**); end}, %q{}, SINCE_3_2) assert_diagnoses( [:error, :no_anonymous_kwrestarg], %q{def f(...); g(x: 1, **); end}, %q{}, SINCE_3_2) end def test_ruby_bug_18878 assert_parses( s(:block, s(:send, s(:const, nil, :Foo), :Bar), s(:args, s(:procarg0, s(:arg, :a))), s(:int, 42)), 'Foo::Bar { |a| 42 }', %q{}, SINCE_3_3) end def test_ruby_bug_19281 assert_parses( s(:send, nil, :p, s(:begin, s(:int, 1), s(:int, 2)), s(:begin, s(:int, 3)), s(:begin, s(:int, 4))), 'p (1;2),(3),(4)', %q{}, SINCE_3_3) assert_parses( s(:send, nil, :p, s(:begin), s(:begin), s(:begin)), 'p (;),(),()', %q{}, SINCE_3_3) assert_parses( s(:send, s(:send, nil, :a), :b, s(:begin, s(:int, 1), s(:int, 2)), s(:begin, s(:int, 3)), s(:begin, s(:int, 4))), 'a.b (1;2),(3),(4)', %q{}, SINCE_3_3) assert_parses( s(:send, s(:send, nil, :a), :b, s(:begin), s(:begin), s(:begin)), 'a.b (;),(),()', %q{}, SINCE_3_3) end def test_ungettable_gvar assert_diagnoses( [:error, :gvar_name, { :name => '$01234' }], '$01234', '^^^^^^ location', ALL_VERSIONS) assert_diagnoses( [:error, :gvar_name, { :name => '$01234' }], '"#$01234"', ' ^^^^^^ location', ALL_VERSIONS) end def test_it_warning_in_33 refute_diagnoses( 'if false; it; end', ALL_VERSIONS) refute_diagnoses( 'def foo; it; end', ALL_VERSIONS) assert_diagnoses( [:warning, :ambiguous_it_call, {}], '0.times { it }', ' ^^ location', ['3.3']) refute_diagnoses( '0.times { || it }', ALL_VERSIONS) refute_diagnoses( '0.times { |_n| it }', ALL_VERSIONS) assert_diagnoses( [:warning, :ambiguous_it_call, {}], '0.times { it; it = 1; it }', ' ^^ location', ['3.3']) refute_diagnoses( '0.times { it = 1; it }', ALL_VERSIONS) refute_diagnoses( 'it = 1; 0.times { it }', ALL_VERSIONS) end def test_anonymous_params_in_nested_scopes assert_diagnoses( [:error, :ambiguous_anonymous_blockarg, {}], 'def b(&) ->(&) {c(&)} end', ' ^ location', SINCE_3_3) assert_diagnoses( [:error, :ambiguous_anonymous_restarg, {}], 'def b(*) ->(*) {c(*)} end', ' ^ location', SINCE_3_3) assert_diagnoses( [:error, :ambiguous_anonymous_restarg, {}], 'def b(a, *) ->(*) {c(1, *)} end', ' ^ location', SINCE_3_3) assert_diagnoses( [:error, :ambiguous_anonymous_restarg, {}], 'def b(*) ->(a, *) {c(*)} end', ' ^ location', SINCE_3_3) assert_diagnoses( [:error, :ambiguous_anonymous_kwrestarg, {}], 'def b(**) ->(**) {c(**)} end', ' ^^ location', SINCE_3_3) assert_diagnoses( [:error, :ambiguous_anonymous_kwrestarg, {}], 'def b(k:, **) ->(**) {c(k: 1, **)} end', ' ^^ location', SINCE_3_3) assert_diagnoses( [:error, :ambiguous_anonymous_kwrestarg, {}], 'def b(**) ->(k:, **) {c(**)} end', ' ^^ location', SINCE_3_3) refute_diagnoses( 'def b(&) ->(&) {c()} end', SINCE_3_3) refute_diagnoses( 'def b(*) ->(*) {c()} end', SINCE_3_3) refute_diagnoses( 'def b(**) ->(**) {c()} end', SINCE_3_3) end def test_parser_bug_989 assert_parses( s(:str, "\t\tcontent\n"), "\t<<-HERE\n\t\tcontent\n\tHERE", %q{}, ALL_VERSIONS) end def test_parser_bug_19370 refute_diagnoses( 'def b(&) ->() {c(&)} end', SINCE_3_3) refute_diagnoses( 'def b(*) ->() {c(*)} end', SINCE_3_3) refute_diagnoses( 'def b(a, *) ->() {c(1, *)} end', SINCE_3_3) refute_diagnoses( 'def b(*) ->(a) {c(*)} end', SINCE_3_3) refute_diagnoses( 'def b(**) ->() {c(**)} end', SINCE_3_3) refute_diagnoses( 'def b(k:, **) ->() {c(k: 1, **)} end', SINCE_3_3) refute_diagnoses( 'def b(**) ->(k:) {c(**)} end', SINCE_3_3) end end parser-3.3.4.2/test/test_runner_parse.rb000066400000000000000000000027461465510415600202430ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' require 'open3' class TestRunnerParse < Minitest::Test PATH_TO_RUBY_PARSE = File.expand_path('../bin/ruby-parse', __dir__).freeze def assert_prints(argv, expected_output) stdout, _stderr, status = Open3.capture3(PATH_TO_RUBY_PARSE, *argv) assert_equal 0, status.to_i assert_includes(stdout, expected_output) end def test_emit_ruby assert_prints ['--emit-ruby', '-e 123'], 's(:int, 123)' end def test_emit_modern_ruby assert_prints ['-e', '->{}'], '(lambda)' assert_prints ['-e', 'self[1] = 2'], 'indexasgn' end def test_emit_legacy assert_prints ['--legacy', '-e', '->{}'], '(send nil :lambda)' assert_prints ['--legacy', '-e', 'self[1] = 2'], ':[]=' end def test_emit_legacy_lambda assert_prints ['--legacy-lambda', '-e', '->{}'], '(send nil :lambda)' assert_prints ['--legacy-lambda', '-e', 'self[1] = 2'], 'indexasgn' end def test_emit_json assert_prints ['--emit-json', '-e', '123'], '["int",123]' end def test_emit_ruby_empty assert_prints ['--emit-ruby', '-e', ''], "\n" end def test_emit_json_empty assert_prints ['--emit-json', '-e', ''], "\n" end def test_stdin_input assert_prints ['--emit-ruby', '-', { stdin_data: '123' }], 's(:int, 123)' end end parser-3.3.4.2/test/test_runner_rewrite.rb000066400000000000000000000030061465510415600206000ustar00rootroot00000000000000# frozen_string_literal: true require 'pathname' require 'fileutils' require 'shellwords' require 'open3' BASE_DIR = Pathname.new(__FILE__) + '..' require (BASE_DIR + 'helper').expand_path class TestRunnerRewrite < Minitest::Test def assert_rewriter_output(path, args, input: 'input.rb', output: 'output.rb', expected_output: '', expected_error: '') @ruby_rewrite = ::BASE_DIR.expand_path + '../bin/ruby-rewrite' @test_dir = ::BASE_DIR + path @fixtures_dir = @test_dir + 'fixtures' Dir.mktmpdir("parser", ::BASE_DIR.expand_path.to_s) do |tmp_dir| tmp_dir = Pathname.new(tmp_dir) sample_file = tmp_dir + "#{path}.rb" sample_file_expanded = sample_file.expand_path expected_file = @fixtures_dir + output FileUtils.cp(@fixtures_dir + input, sample_file_expanded) stdout, stderr, _exit_code = Dir.chdir @test_dir do Open3.capture3 %Q{ #{Shellwords.escape(@ruby_rewrite.to_s)} #{args} \ #{Shellwords.escape(sample_file_expanded.to_s)} } end assert_equal expected_output.chomp, stdout.chomp assert_match expected_error.chomp, stderr.chomp assert_equal File.read(expected_file.expand_path), File.read(sample_file) end end def test_rewriter_bug_163 assert_rewriter_output('bug_163', '--modify -l rewriter.rb', expected_error: Parser::Rewriter::DEPRECATION_WARNING ) end def test_tree_rewriter assert_rewriter_output('using_tree_rewriter', '-l using_tree_rewriter.rb --modify') end end parser-3.3.4.2/test/test_source_buffer.rb000066400000000000000000000112471465510415600203650ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestSourceBuffer < Minitest::Test def setup @buffer = Parser::Source::Buffer.new('(string)') end def test_initialize buffer = Parser::Source::Buffer.new(nil) assert_equal '', buffer.name buffer = Parser::Source::Buffer.new(Pathname('a/b')) assert_equal 'a/b', buffer.name buffer = Parser::Source::Buffer.new('(string)') assert_equal '(string)', buffer.name assert_equal 1, buffer.first_line buffer = Parser::Source::Buffer.new('(string)', 5) assert_equal 5, buffer.first_line buffer = Parser::Source::Buffer.new('(string)', source: '2+2') assert_equal '2+2', buffer.source end def test_source_setter @buffer.source = 'foo' assert_equal 'foo', @buffer.source assert @buffer.source.frozen? end def test_source_double_setter @buffer.source = 'foo' assert_raises(ArgumentError) do @buffer.source = 'bar' end end def test_source_setter_encoding_error error = assert_raises EncodingError do @buffer.source = [ '# encoding: utf-8', "# \xf9" ].join("\n") end assert_match(/invalid byte sequence in UTF\-8/, error.message) end def test_read tempfile = Tempfile.new('parser') tempfile.write('foobar') tempfile.flush buffer = Parser::Source::Buffer.new(tempfile.path) buffer.read assert_equal 'foobar', buffer.source assert buffer.source.frozen? end def test_uninitialized assert_raises RuntimeError do @buffer.source end end def test_decompose_position @buffer.source = "1\nfoo\nbar" assert_equal [1, 0], @buffer.decompose_position(0) assert_equal [1, 1], @buffer.decompose_position(1) assert_equal [2, 0], @buffer.decompose_position(2) assert_equal [3, 1], @buffer.decompose_position(7) assert_equal [3, 36], @buffer.decompose_position(42) assert_equal [0, -52], @buffer.decompose_position(-42) end def test_decompose_position_mapped @buffer = Parser::Source::Buffer.new('(string)', 5) @buffer.source = "1\nfoo\nbar" assert_equal [5, 0], @buffer.decompose_position(0) assert_equal [6, 0], @buffer.decompose_position(2) end def test_line @buffer.source = "1\nfoo\nbar" assert_equal '1', @buffer.source_line(1) assert_equal 'foo', @buffer.source_line(2) end def test_line_mutate @buffer.source = "1\nfoo\nbar" assert_equal '1', @buffer.source_line(1) @buffer.source_line(1)[0] = '2' assert_equal '1', @buffer.source_line(1) end def test_line_mapped @buffer = Parser::Source::Buffer.new('(string)', 5) @buffer.source = "1\nfoo\nbar" assert_equal '1', @buffer.source_line(5) assert_equal 'foo', @buffer.source_line(6) end def test_line_range @buffer = Parser::Source::Buffer.new('(string)', 5) @buffer.source = "abc\ndef\nghi\n" assert_raises IndexError do @buffer.line_range(4) end assert_equal 'abc', @buffer.line_range(5).source assert_equal 'def', @buffer.line_range(6).source assert_equal 'ghi', @buffer.line_range(7).source assert_equal '', @buffer.line_range(8).source assert_raises IndexError do @buffer.line_range(9) end end def test_source_range @buffer = Parser::Source::Buffer.new('(string)', 5) assert_raises RuntimeError do @buffer.source_range end @buffer.source = "abc\ndef\nghi\n" assert_equal Parser::Source::Range.new(@buffer, 0, @buffer.source.size), @buffer.source_range end def test_last_line @buffer.source = "1\nfoo\nbar" assert_equal 3, @buffer.last_line @buffer = Parser::Source::Buffer.new('(string)', 5) @buffer.source = "" assert_equal 5, @buffer.last_line @buffer = Parser::Source::Buffer.new('(string)', 5) @buffer.source = "abc\n" assert_equal 6, @buffer.last_line end def test_source_lines @buffer.source = "1\nfoo\nbar\n" assert_equal ['1', 'foo', 'bar', ''], @buffer.source_lines assert @buffer.source_lines.frozen? assert @buffer.source_lines.all?(&:frozen?) @buffer = Parser::Source::Buffer.new('(string)', 5) @buffer.source = "foo\nbar" assert_equal ['foo', 'bar'], @buffer.source_lines end def test_freeze @buffer.source = "1\nfoo\nbar\n" @buffer.freeze @buffer.source_lines @buffer.source_range assert_equal 'foo', @buffer.line_range(2).source end def test_ractor @buffer.source = "hello\n:world\nstrange\nodd" ::Ractor.make_shareable(@buffer) assert ::Ractor.shareable?(@buffer) assert_equal ':world', @buffer.line_range(2).source end if defined?(::Ractor) def test_inspect assert_equal '#', @buffer.inspect end end parser-3.3.4.2/test/test_source_comment.rb000066400000000000000000000014501465510415600205510ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestSourceComment < Minitest::Test def setup @buf = Parser::Source::Buffer.new('(string)', source: "# foo\n=begin foo\nbar\n=end baz\n") end def range(s, e) Parser::Source::Range.new(@buf, s, e) end def test_initialize comment = Parser::Source::Comment.new(range(0, 5)) assert comment.frozen? end def test_text comment = Parser::Source::Comment.new(range(0, 5)) assert_equal '# foo', comment.text end def test_inline comment = Parser::Source::Comment.new(range(0, 5)) assert_equal :inline, comment.type assert comment.inline? end def test_document comment = Parser::Source::Comment.new(range(6, 25)) assert_equal :document, comment.type assert comment.document? end end parser-3.3.4.2/test/test_source_comment_associator.rb000066400000000000000000000236141465510415600230060ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' require 'parser/ruby18' class TestSourceCommentAssociator < Minitest::Test def parse_with_comments(code) parser = Parser::Ruby18.new buffer = Parser::Source::Buffer.new('(comments)') buffer.source = code parser.parse_with_comments(buffer) end def associate(code) ast, comments = parse_with_comments(code) associations = Parser::Source::Comment.associate(ast, comments) [ ast, associations ] end def associate_locations(code) ast, comments = parse_with_comments(code) associations = Parser::Source::Comment.associate_locations(ast, comments) [ ast, associations ] end def associate_by_identity(code) ast, comments = parse_with_comments(code) associations = Parser::Source::Comment.associate_by_identity(ast, comments) [ ast, associations ] end def test_associate ast, associations = associate(<<-END) #!/usr/bin/env ruby # coding: utf-8 # class preceding # another class preceding class Foo # class keyword line # method foo preceding def foo puts 'foo' end # method foo decorating # method bar preceding def bar # expression preceding 1 + # 1 decorating 2 # method bar sparse end # method bar decorating # class sparse end # class decorating END klass_node = ast klass_name_node = klass_node.children[0] foo_node = klass_node.children[2].children[0] # def foo bar_node = klass_node.children[2].children[1] # def bar expr_node = bar_node.children[2] # 1 + 2 one_node = expr_node.children[0] # 1 assert_equal 6, associations.size assert_equal [ '# class preceding', '# another class preceding', '# class sparse', '# class decorating' ], associations[klass_node].map(&:text) assert_equal [ '# class keyword line' ], associations[klass_name_node].map(&:text) assert_equal [ '# method foo preceding', '# method foo decorating' ], associations[foo_node].map(&:text) assert_equal [ '# method bar preceding', '# method bar sparse', '# method bar decorating' ], associations[bar_node].map(&:text) assert_equal [ '# expression preceding' ], associations[expr_node].map(&:text) assert_equal [ '# 1 decorating' ], associations[one_node].map(&:text) end def setup_dupe_statement(method = :associate) @ast, @associations = send(method, <<-END) class Foo def bar f1 # comment on 1st call to f1 f2 f1 # comment on 2nd call to f1 end end END _klass_node = @ast @method_node = @ast.children[2] @body = @method_node.children[2] @f1_1_node = @body.children[0] @f1_2_node = @body.children[2] end # The bug below is fixed by using associate_locations def test_associate_dupe_statement setup_dupe_statement assert_equal 1, @associations.size assert_equal ['# comment on 1st call to f1', '# comment on 2nd call to f1'], @associations[@f1_1_node].map(&:text) assert_equal ['# comment on 1st call to f1', '# comment on 2nd call to f1'], @associations[@f1_2_node].map(&:text) end def test_associate_by_identity_dupe_statement setup_dupe_statement(:associate_by_identity) assert_equal 2, @associations.size assert_equal ['# comment on 1st call to f1'], @associations[@f1_1_node].map(&:text) assert_equal ['# comment on 2nd call to f1'], @associations[@f1_2_node].map(&:text) end def test_associate_locations ast, associations = associate_locations(<<-END) #!/usr/bin/env ruby # coding: utf-8 # class preceding # another class preceding class Foo # class keyword line # method foo preceding def foo puts 'foo' end # method foo decorating # method bar preceding def bar # expression preceding 1 + # 1 decorating 2 # method bar sparse end # method bar decorating # class sparse end # class decorating END klass_node = ast klass_name_node = klass_node.children[0] foo_node = klass_node.children[2].children[0] # def foo bar_node = klass_node.children[2].children[1] # def bar expr_node = bar_node.children[2] # 1 + 2 one_node = expr_node.children[0] # 1 assert_equal 6, associations.size assert_equal [ '# class preceding', '# another class preceding', '# class sparse', '# class decorating' ], associations[klass_node.loc].map(&:text) assert_equal [ '# class keyword line' ], associations[klass_name_node.loc].map(&:text) assert_equal [ '# method foo preceding', '# method foo decorating' ], associations[foo_node.loc].map(&:text) assert_equal [ '# method bar preceding', '# method bar sparse', '# method bar decorating' ], associations[bar_node.loc].map(&:text) assert_equal [ '# expression preceding' ], associations[expr_node.loc].map(&:text) assert_equal [ '# 1 decorating' ], associations[one_node.loc].map(&:text) end def test_associate_locations_dupe_statement ast, associations = associate_locations(<<-END) class Foo def bar f1 # comment on 1st call to f1 f2 f1 # comment on 2nd call to f1 end end END _klass_node = ast method_node = ast.children[2] body = method_node.children[2] f1_1_node = body.children[0] f1_2_node = body.children[2] assert_equal 2, associations.size assert_equal ['# comment on 1st call to f1'], associations[f1_1_node.loc].map(&:text) assert_equal ['# comment on 2nd call to f1'], associations[f1_2_node.loc].map(&:text) end def test_associate_no_body ast, associations = associate(<<-END) # foo class Foo end END assert_equal 1, associations.size assert_equal ['# foo'], associations[ast].map(&:text) end def test_associate_empty_tree _ast, associations = associate("") assert_equal 0, associations.size end def test_associate_shebang_only _ast, associations = associate(<<-END) #!ruby class Foo end END assert_equal 0, associations.size end def test_associate_frozen_string_literal _ast, associations = associate(<<-END) # frozen_string_literal: true class Foo end END assert_equal 0, associations.size end def test_associate_frozen_string_literal_dash_star_dash _ast, associations = associate(<<-END) # -*- frozen_string_literal: true -*- class Foo end END assert_equal 0, associations.size end def test_associate_frozen_string_literal_no_space_after_colon _ast, associations = associate(<<-END) # frozen_string_literal:true class Foo end END assert_equal 0, associations.size end def test_associate_warn_indent _ast, associations = associate(<<-END) # warn_indent: true class Foo end END assert_equal 0, associations.size end def test_associate_warn_indent_dash_star_dash _ast, associations = associate(<<-END) # -*- warn_indent: true -*- class Foo end END assert_equal 0, associations.size end def test_associate_warn_past_scope _ast, associations = associate(<<-END) # warn_past_scope: true class Foo end END assert_equal 0, associations.size end def test_associate_warn_past_scope_dash_star_dash _ast, associations = associate(<<-END) # -*- warn_past_scope: true -*- class Foo end END assert_equal 0, associations.size end def test_associate_multiple _ast, associations = associate(<<-END) # frozen_string_literal: true; warn_indent: true class Foo end END assert_equal 0, associations.size end def test_associate_multiple_dash_star_dash _ast, associations = associate(<<-END) # -*- frozen_string_literal: true; warn_indent: true -*- class Foo end END assert_equal 0, associations.size end def test_associate_no_comments _ast, associations = associate(<<-END) class Foo end END assert_equal 0, associations.size end def test_associate_comments_after_root_node _ast, associations = associate(<<-END) class Foo end # not associated END assert_equal 0, associations.size end def test_associate_stray_comment ast, associations = associate(<<-END) def foo # foo end END assert_equal 1, associations.size assert_equal ['# foo'], associations[ast].map(&:text) end def test_associate___ENCODING__ ast, associations = associate(<<-END) # foo __ENCODING__ END assert_equal 1, associations.size assert_equal ['# foo'], associations[ast].map(&:text) end def test_associate_inside_heredoc ast, associations = associate(<<-END) <", source: "1") ast = parser_for_ruby_version('1.8').parse(buf) assert_equal [:expression, :operator], ast.loc.to_hash.keys.sort_by(&:to_s) end end parser-3.3.4.2/test/test_source_range.rb000066400000000000000000000124061465510415600202060ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestSourceRange < Minitest::Test def setup @buf = Parser::Source::Buffer.new('(string)', source: "foobar\nbaz") @sr1_3 = Parser::Source::Range.new(@buf, 1, 3) @sr2_2 = Parser::Source::Range.new(@buf, 2, 2) @sr3_3 = Parser::Source::Range.new(@buf, 3, 3) @sr2_6 = Parser::Source::Range.new(@buf, 2, 6) @sr5_8 = Parser::Source::Range.new(@buf, 5, 8) @sr5_7 = Parser::Source::Range.new(@buf, 5, 7) @sr6_7 = Parser::Source::Range.new(@buf, 6, 7) end def test_initialize assert_equal 1, @sr1_3.begin_pos assert_equal 3, @sr1_3.end_pos assert @sr1_3.frozen? end def test_size assert_equal 4, @sr2_6.size end def test_bad_size assert_raises ArgumentError do Parser::Source::Range.new(@buf, 2, 1) end end def test_join sr = @sr1_3.join(@sr5_8) assert_equal 1, sr.begin_pos assert_equal 8, sr.end_pos end def test_intersect assert_equal 2, @sr1_3.intersect(@sr2_6).begin_pos assert_equal 3, @sr1_3.intersect(@sr2_6).end_pos assert_equal 5, @sr2_6.intersect(@sr5_8).begin_pos assert_equal 6, @sr2_6.intersect(@sr5_8).end_pos assert @sr1_3.intersect(@sr5_8) == nil assert_equal 2, @sr1_3.intersect(@sr2_2).begin_pos assert_equal 2, @sr1_3.intersect(@sr2_2).end_pos assert_equal 2, @sr2_2.intersect(@sr2_2).begin_pos assert_equal 2, @sr2_2.intersect(@sr2_2).end_pos end def test_overlaps assert !@sr1_3.overlaps?(@sr5_8) assert @sr1_3.overlaps?(@sr2_6) assert @sr2_6.overlaps?(@sr5_8) assert @sr1_3.overlaps?(@sr2_2) assert !@sr2_6.overlaps?(@sr2_2) assert @sr2_2.overlaps?(@sr2_2) end def check_relationship(relationship, sr1, sr2, reflexive_relationship = relationship) # Double check equality assert_equal true, sr1 == sr1.dup assert_equal true, sr1 != sr2 # Check relationships and reflexivity assert_equal true, sr1.send(relationship, sr2) assert_equal true, sr2.send(reflexive_relationship, sr1) # Check it's not true for itself assert_equal false, sr1.send(relationship, sr1) # Check other relationships return false others = %i[disjoint? crossing? contains? contained?] - [relationship, reflexive_relationship] others.each do |other_rel| assert_equal false, sr1.send(other_rel, sr2), other_rel assert_equal false, sr2.send(other_rel, sr1), other_rel end end def test_disjoint check_relationship(:disjoint?, @sr1_3, @sr5_8) check_relationship(:disjoint?, @sr2_2, @sr2_6) check_relationship(:disjoint?, @sr2_2, @sr3_3) check_relationship(:disjoint?, @sr2_6, @sr6_7) end def test_crossing check_relationship(:crossing?, @sr1_3, @sr2_6) end def test_containment check_relationship(:contained?, @sr2_2, @sr1_3, :contains?) check_relationship(:contained?, @sr5_7, @sr5_8, :contains?) check_relationship(:contained?, @sr6_7, @sr5_8, :contains?) end def test_order assert_equal 0, @sr1_3 <=> @sr1_3 assert_equal(-1, @sr1_3 <=> @sr5_8) assert_equal(-1, @sr2_2 <=> @sr2_6) assert_equal(+1, @sr2_6 <=> @sr2_2) assert_equal(-1, @sr1_3 <=> @sr2_6) assert_equal(+1, @sr2_2 <=> @sr1_3) assert_equal(-1, @sr1_3 <=> @sr2_2) assert_equal(-1, @sr5_7 <=> @sr5_8) assert_nil @sr1_3 <=> Parser::Source::Range.new(@buf.dup, 1, 3) assert_nil @sr1_3 <=> 4 end def test_empty assert !@sr1_3.empty? assert @sr2_2.empty? end def test_line sr = Parser::Source::Range.new(@buf, 7, 8) assert_equal 2, sr.line end def test_source_line sr = Parser::Source::Range.new(@buf, 7, 8) assert_equal 'baz', sr.source_line end def test_columns sr = Parser::Source::Range.new(@buf, 7, 8) assert_equal 0, sr.begin.column assert_equal 1, sr.end.column assert_equal 0...1, sr.column_range end def test_begin_end sr_beg = @sr2_6.begin assert_equal 2, sr_beg.begin_pos assert_equal 2, sr_beg.end_pos sr_end = @sr2_6.end assert_equal 6, sr_end.begin_pos assert_equal 6, sr_end.end_pos end def test_source sr = Parser::Source::Range.new(@buf, 0, 3) assert_equal 'foo', sr.source sr_multi = Parser::Source::Range.new(@buf, 0, 10) assert_equal "foobar\nbaz", sr_multi.source end def test_is? sr = Parser::Source::Range.new(@buf, 0, 3) assert sr.is?('foo') refute sr.is?('bar') end def test_to_range sr = Parser::Source::Range.new(@buf, 10, 20) assert_equal (10...20), sr.to_range end def test_to_s sr = Parser::Source::Range.new(@buf, 8, 9) assert_equal '(string):2:2', sr.to_s end def test_with sr2 = @sr1_3.with(begin_pos: 2) sr3 = @sr1_3.with(end_pos: 4) assert_equal 2, sr2.begin_pos assert_equal 3, sr2.end_pos assert_equal 1, sr3.begin_pos assert_equal 4, sr3.end_pos end def test_eql_and_hash assert_equal false, @sr1_3.eql?(@sr3_3) assert @sr1_3.hash != @sr3_3.hash also_1_3 = @sr3_3.with(begin_pos: 1) assert_equal true, @sr1_3.eql?(also_1_3) assert_equal @sr1_3.hash, also_1_3.hash buf2 = Parser::Source::Buffer.new('(string)', source: "foobar\nbaz") from_other_buf = Parser::Source::Range.new(buf2, 1, 3) assert_equal false, @sr1_3.eql?(from_other_buf) assert @sr1_3.hash != from_other_buf.hash end end parser-3.3.4.2/test/test_source_rewriter.rb000066400000000000000000000331211465510415600207520ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestSourceRewriter < Minitest::Test def setup @buf = Parser::Source::Buffer.new('(rewriter)', source: 'foo bar baz') Parser::Source::Rewriter.warned_of_deprecation = true @rewriter = Parser::Source::Rewriter.new(@buf) end def range(from, len) Parser::Source::Range.new(@buf, from, from + len) end def test_remove assert_equal 'foo baz', @rewriter. remove(range(4, 3)). process end def test_insert_before assert_equal 'foo quux bar baz', @rewriter. insert_before(range(4, 3), 'quux '). process end def test_insert_after assert_equal 'foo bar quux baz', @rewriter. insert_after(range(4, 3), ' quux'). process end def test_replace assert_equal 'foo quux baz', @rewriter. replace(range(4, 3), 'quux'). process end def test_composing_asc assert_equal 'foo---bar---baz', @rewriter. replace(range(3, 1), '---'). replace(range(7, 1), '---'). process end def test_composing_desc assert_equal 'foo---bar---baz', @rewriter. replace(range(7, 1), '---'). replace(range(3, 1), '---'). process end # # Merging/clobbering of overlapping edits # def test_insertion_just_before_replace assert_equal 'foostrawberry jam---bar baz', @rewriter. replace(range(3, 1), '---'). insert_before(range(3, 1), 'strawberry jam'). process end def test_insertion_just_after_replace assert_equal 'foo---strawberry jam baz', @rewriter. replace(range(3, 4), '---'). insert_after(range(3, 4), 'strawberry jam'). process end def test_insertion_just_before_remove assert_equal 'foostrawberry jambar baz', @rewriter. remove(range(3, 1)). insert_before(range(3, 1), 'strawberry jam'). process end def test_insertion_just_after_remove assert_equal 'foostrawberry jam baz', @rewriter. remove(range(3, 4)). insert_after(range(3, 4), 'strawberry jam'). process end def test_insertion_just_before_replace_at_buffer_start assert_equal 'strawberry jam--- bar baz', @rewriter. replace(range(0, 3), '---'). insert_before(range(0, 1), 'strawberry jam'). process end def test_insertion_just_after_replace_at_buffer_end assert_equal 'foo bar ---strawberry jam', @rewriter. replace(range(8, 3), '---'). insert_after(range(9, 2), 'strawberry jam'). process end def test_insertion_just_before_remove_at_buffer_start assert_equal 'strawberry bar baz', @rewriter. remove(range(0, 3)). insert_before(range(0, 1), 'strawberry'). process end def test_insertion_just_after_remove_at_buffer_end assert_equal 'foo bar strawberry', @rewriter. remove(range(8, 3)). insert_after(range(10, 1), 'strawberry'). process end def test_multiple_insertions_at_same_location_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. insert_before(range(0, 11), '<'). insert_after( range(0, 11), '>'). insert_before(range(0, 7), '(') end end def test_intentional_multiple_insertions_at_same_location assert_equal 'foo [(bar)] baz', @rewriter. insert_before_multi(range(4, 0), '('). insert_after_multi(range(7, 0), ')'). insert_before_multi(range(4, 0), '['). insert_after_multi(range(7, 0), ']'). process end def test_insertion_within_replace_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(3, 2), '<'). insert_after(range(3, 1), '>') end end def test_insertion_within_remove_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. remove(range(3, 2)). insert_after(range(3, 1), '>') end end def test_replace_overlapping_insertion_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. insert_after(range(3, 1), '>'). replace(range(3, 2), '<') end end def test_remove_overlapping_insertion_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. insert_after(range(3, 1), '>'). remove(range(3, 2)) end end def test_multi_insertion_within_replace_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(3, 2), '<'). insert_after_multi(range(3, 1), '>') end end def test_multi_insertion_within_remove_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. remove(range(3, 2)). insert_after_multi(range(3, 1), '>') end end def test_replace_overlapping_multi_insertion_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. insert_after_multi(range(3, 1), '>'). replace(range(3, 2), '<') end end def test_remove_overlapping_multi_insertion_clobber silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. insert_after_multi(range(3, 1), '>'). remove(range(3, 2)) end end def test_insertion_on_merged_insertion_clobber # 2 insertions at the same point clobber each other, even if the 1st one # was merged with an adjacent edit, and even if the same text is being # inserted silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. insert_before(range(3, 1), '>'). remove(range(3, 2)). insert_after(range(2, 1), '>') end end def test_insertion_merge_with_overlapping_replace assert_equal 'fo abc bar baz', @rewriter. insert_before(range(3, 1), 'abc'). replace(range(2, 2), ' abc '). process end def test_replace_merge_with_overlapped_insertion assert_equal 'fo abc bar baz', @rewriter. replace(range(2, 2), ' abc '). insert_before(range(3, 1), 'abc'). process end def test_replace_same_begin_larger_than_replaced_range_matching assert_equal 'foo supercalifragilistic baz', @rewriter. replace(range(4, 3), 'super'). replace(range(4, 3), 'supercalifragilistic'). process end def test_replace_same_begin_larger_than_replaced_range_non_matching silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(4, 3), 'super!'). replace(range(4, 3), 'supercalifragilistic') end end def test_overlapping_replace_left_smaller_than_replaced_matching assert_equal 'superbaz', @rewriter. replace(range(0, 7), 'super'). replace(range(2, 6), 'per'). process end def test_overlapping_replace_left_smaller_than_replaced_non_matching silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(0, 7), 'super'). replace(range(2, 8), 'perk') end end def test_overlapping_replace_left_larger_right_smaller_matching assert_equal 'foods baz', @rewriter. replace(range(1, 3), 'oods '). replace(range(3, 6), 'ds b'). process end def test_overlapping_replace_left_larger_right_larger_matching assert_equal 'foods abcdefghijklm', @rewriter. replace(range(1, 3), 'oods '). replace(range(3, 8), 'ds abcdefghijklm'). process end def test_overlapping_replace_left_larger_right_smaller_non_matching silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(1, 3), 'oods '). replace(range(3, 6), 'ds') end end def test_overlapping_replace_left_larger_right_larger_non_matching silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(1, 3), 'oods b'). replace(range(3, 8), 'ds abcdefghijklm') end end def test_subsuming_replace_both_smaller_matching assert_equal 'food baz', @rewriter. replace(range(0, 7), 'food'). replace(range(3, 3), 'd'). process end def test_subsuming_replace_both_smaller_non_matching silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(0, 7), 'foo'). replace(range(3, 3), 'd') end end def test_subsuming_replace_both_larger_matching assert_equal 'foo barr baz', @rewriter. replace(range(0, 7), 'foo barr'). replace(range(3, 3), ' bar'). process end def test_subsuming_replace_both_larger_non_matching silence_diagnostics assert_raises Parser::ClobberingError do @rewriter. replace(range(0, 7), 'foo barr'). replace(range(3, 3), ' bar ') end end def test_replaced_ranges_merge_when_furthest_right_range_is_not_furthest_left # regression test; previously, when actions were merged, the resulting # replaced range could be too small sometimes assert_equal 'foo_***_***', @rewriter. replace(range(3, 1), '_'). replace(range(7, 1), '_'). replace(range(4, 7), '***_***'). process end def test_clobber diagnostics = [] @rewriter.diagnostics.consumer = lambda do |diag| diagnostics << diag end assert_raises Parser::ClobberingError do @rewriter. replace(range(3, 1), '---'). remove(range(3, 1)) end assert_equal 2, diagnostics.count assert_equal :error, diagnostics.first.level assert_equal 'cannot remove 1 character(s)', diagnostics.first.message assert_equal range(3, 1), diagnostics.first.location assert_equal :note, diagnostics.last.level assert_equal "clobbered by: replace 1 character(s) with \"---\"", diagnostics.last.message assert_equal range(3, 1), diagnostics.last.location end def test_clobbering_error_backward_compatibility silence_diagnostics rescued = false # We use begin..rescue..end here rather than #assert_raises # since #assert_raises expects exact error class. begin @rewriter. replace(range(3, 1), '---'). remove(range(3, 1)) rescue RuntimeError => error rescued = true if error.message.include?('clobber') end assert rescued end def test_crossing_delete assert_equal 'faz', @rewriter. remove(range(1, 4)). remove(range(6, 3)). remove(range(4, 3)). process end def test_overlapping_replace assert_equal 'flippin flyin flapjackz', @rewriter. replace(range(1, 4), 'lippin f'). replace(range(4, 4), 'pin flyin flap'). replace(range(7, 3), ' flyin flapjack'). process end def test_subsuming_delete assert_equal 'foo', @rewriter. remove(range(6, 3)). remove(range(7, 2)). remove(range(3, 8)). process end def test_subsuming_replace assert_equal 'freebie', @rewriter. replace(range(3, 3), 'ebi'). replace(range(1, 10), 'reebie'). replace(range(5, 2), 'ie'). process end def test_equivalent_delete_insert_replace # A deletion + insertion just before or after the deleted range is # identical in every way to a replacement! So logically, they shouldn't # conflict. assert_equal 'tin bar baz', @rewriter. remove(range(0, 3)). # ' bar baz' insert_before(range(0, 1), 'tin'). # 'tin bar baz' replace(range(0, 3), 'tin'). process end def test_transaction_returns_self assert_equal @rewriter, @rewriter.transaction {} end def test_transaction_commit silence_diagnostics # Original: 'foo bar baz' # Rewrite as 'foo BAR baz' @rewriter.replace(range(4, 3), 'BAR') # Rewrite as '( bar )' @rewriter.transaction do @rewriter.replace(range(0, 3), '(') @rewriter.replace(range(8, 3), ')') end @rewriter.replace(range(3, 1), '_') @rewriter.replace(range(7, 1), '_') assert_equal '(_BAR_)', @rewriter.process end def test_transaction_rollback silence_diagnostics # Original: 'foo bar baz' # Rewrite as 'foo bar BAZ' @rewriter.replace(range(8, 3), 'BAZ') assert_raises Parser::ClobberingError do # Trying to rewrite as '( bar )', but it fails @rewriter.transaction do @rewriter.replace(range(0, 3), '(') @rewriter.replace(range(8, 3), ')') end end @rewriter.replace(range(0, 3), 'FOO') assert_equal 'FOO bar BAZ', @rewriter.process end def test_nested_transaction_raises_error error = assert_raises RuntimeError do @rewriter.transaction do @rewriter.transaction do end end end assert_match(/nested/i, error.message) end def test_process_in_transaction_raises_error error = assert_raises RuntimeError do @rewriter.transaction do @rewriter.process end end assert_match(/transaction/, error.message) end def silence_diagnostics @rewriter.diagnostics.consumer = proc {} end end parser-3.3.4.2/test/test_source_rewriter_action.rb000066400000000000000000000020521465510415600223060ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestSourceRewriterAction < Minitest::Test def setup @buf = Parser::Source::Buffer.new('(rewriter_action)', source: 'foo bar baz') end def range(from, len) Parser::Source::Range.new(@buf, from, from + len) end def action(range, replacement) Parser::Source::Rewriter::Action.new(range, replacement) end def test_accessors action = action(range(1, 10), 'foo') assert action.frozen? assert_equal range(1, 10), action.range assert_equal 'foo', action.replacement end def test_to_s_replace action = action(range(3, 1), 'foo') assert_equal "replace 1 character(s) with \"foo\"", action.to_s end def test_to_s_insert action = action(range(3, 0), 'foo') assert_equal "insert \"foo\"", action.to_s end def test_to_s_remove action = action(range(3, 2), '') assert_equal 'remove 2 character(s)', action.to_s end def test_to_s_nop action = action(range(3, 0), '') assert_equal 'do nothing', action.to_s end end parser-3.3.4.2/test/test_source_tree_rewriter.rb000066400000000000000000000275361465510415600220060ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestSourceTreeRewriter < Minitest::Test module Setup def setup @buf = Parser::Source::Buffer.new('(rewriter)', source: 'puts(:hello, :world)') @hello = range(5, 6) @ll = range(8, 2) @comma_space = range(11,2) @world = range(13,6) @whole = range(0, @buf.source.length) end def range(from, len = nil) from, len = from.begin, from.end - from.begin unless len Parser::Source::Range.new(@buf, from, from + len) end end include Setup # Returns either: # - yield rewriter # - [diagnostic, ...] (Diagnostics) # - Parser::ClobberingError # def build(actions, **policy) diagnostics = [] diags = -> { diagnostics.flatten.map(&:strip).join("\n") } rewriter = Parser::Source::TreeRewriter.new(@buf, **policy) rewriter.diagnostics.consumer = -> diag { diagnostics << diag.render } actions.each do |action, range, *args| rewriter.public_send(action, range, *args) end if diagnostics.empty? yield rewriter else diags.call end rescue ::Parser::ClobberingError => _e [::Parser::ClobberingError, diags.call] end # Returns either: # - String (Normal operation) # - [diagnostic, ...] (Diagnostics) # - Parser::ClobberingError # def apply(actions, **policy) build(actions, **policy) { |rewriter| rewriter.process } end # Expects ordered actions to be grouped together def check_actions(expected, grouped_actions, **policy) grouped_actions.permutation do |sequence| # [action, [action, action]] => [action, action, action] # except we can't use flatten because "action" are arrays themselves actions = sequence.flat_map { |group| group.first.is_a?(Array) ? group : [group] } assert_equal(expected, apply(actions, **policy)) end end def assert_actions_result(expected, *actions, **rest) if expected == :raise diagnostic = rest.values.first check_actions([::Parser::ClobberingError, diagnostic], actions) elsif rest.empty? check_actions(expected, actions) else policy, diagnostic = rest.first check_actions(expected, actions, policy => :accept) check_actions(diagnostic, actions, policy => :warn) diagnostic.gsub!(/warning: /, 'error: ') check_actions([::Parser::ClobberingError, diagnostic], actions, policy => :raise) end end ### Simple cases def test_remove assert_actions_result 'puts(, :world)', [:remove, @hello] end def test_insert_before assert_actions_result 'puts(:hello, 42, :world)', [:insert_before, @world, '42, '] end def test_insert_after assert_actions_result 'puts(:hello, 42, :world)', [:insert_after, @hello, ', 42'] end def test_wrap assert_actions_result 'puts([:hello], :world)', [:wrap, @hello, '[', ']'] end def test_replace assert_actions_result 'puts(:hi, :world)', [:replace, @hello, ':hi'] end # # All other cases, as per doc # def test_crossing_non_deletions check = [ [:wrap, '(', ')'], [:remove], [:replace, 'xx'], ] check.combination(2) do |(action, *args), (action_b, *args_b)| next if action == :remove && action_b == :remove assert_actions_result :raise, [[action, @hello.join(@comma_space), *args], [action_b, @world.join(@comma_space), *args_b]], diagnostic: <<-DIAGNOSTIC.chomp (rewriter):1:12: error: the rewriting action on: (rewriter):1: puts(:hello, :world) (rewriter):1: ^~~~~~~~ (rewriter):1:6: error: is crossing that on: (rewriter):1: puts(:hello, :world) (rewriter):1: ^~~~~~~~ DIAGNOSTIC end end def test_crossing_deletions assert_actions_result 'puts()', [[:remove, @hello.join(@comma_space)], [:remove, @world.join(@comma_space)]], crossing_deletions: <<-DIAGNOSTIC.chomp (rewriter):1:12: warning: the deletion of: (rewriter):1: puts(:hello, :world) (rewriter):1: ^~~~~~~~ (rewriter):1:6: warning: is crossing: (rewriter):1: puts(:hello, :world) (rewriter):1: ^~~~~~~~ DIAGNOSTIC end def test_multiple_actions assert_actions_result 'puts({:hello => [:everybody]})', [:replace, @comma_space, ' => '], [:wrap, @hello.join(@world), '{', '}'], [:replace, @world, ':everybody'], [:wrap, @world, '[', ']'] end def test_wraps_same_range assert_actions_result 'puts([(:hello)], :world)', [[:wrap, @hello, '(', ')'], [:wrap, @hello, '[', ']']] end def test_inserts_on_empty_ranges assert_actions_result 'puts({x}:hello[y], :world)', [:insert_before, @hello.begin, '{'], [:replace, @hello.begin, 'x'], [:insert_after, @hello.begin, '}'], [:insert_before, @hello.end, '['], [:replace, @hello.end, 'y'], [:insert_after, @hello.end, ']'] end def test_replace_same_range assert_actions_result 'puts(:hey, :world)', [[:replace, @hello, ':hi'], [:replace, @hello, ':hey']], different_replacements: <<-DIAGNOSTIC.chomp (rewriter):1:6: warning: different replacements: :hey vs :hi (rewriter):1: puts(:hello, :world) (rewriter):1: ^~~~~~ DIAGNOSTIC end def test_swallowed_insertions assert_actions_result 'puts(:hi)', [[:wrap, @hello.adjust(begin_pos: 1), '__', '__'], [:replace, @world.adjust(end_pos: -2), 'xx'], [:replace, @hello.join(@world), ':hi']], swallowed_insertions: <<-DIAGNOSTIC.chomp (rewriter):1:6: warning: this replacement: (rewriter):1: puts(:hello, :world) (rewriter):1: ^~~~~~~~~~~~~~ (rewriter):1:7: warning: swallows some inner rewriting actions: (rewriter):1: puts(:hello, :world) (rewriter):1: ^~~~~ ~~~~ DIAGNOSTIC end def test_out_of_range_ranges rewriter = Parser::Source::TreeRewriter.new(@buf) assert_raises(IndexError) { rewriter.insert_before(range(0, 100), 'hola') } end def test_empty rewriter = Parser::Source::TreeRewriter.new(@buf) assert_equal true, rewriter.empty? # This is a trivial wrap rewriter.wrap(range(2,3), '', '') assert_equal true, rewriter.empty? # This is a trivial deletion rewriter.remove(range(2,0)) assert_equal true, rewriter.empty? rewriter.remove(range(2,3)) assert_equal false, rewriter.empty? end # splits array into two groups, yield all such possible pairs of groups # each_split([1, 2, 3, 4]) yields [1, 2], [3, 4]; # then [1, 3], [2, 4] # ... # and finally [3, 4], [1, 2] def each_split(array) n = array.size first_split_size = n.div(2) splitting = (0...n).to_set splitting.to_a.combination(first_split_size) do |indices| yield array.values_at(*indices), array.values_at(*(splitting - indices)) end end # Checks that `actions+extra` give the same result when # made in order or from subgroups that are later merged. # The `extra` actions are always added at the end of the second group. # def check_all_merge_possibilities(actions, extra, **policy) expected = apply(actions + extra, **policy) each_split(actions) do |actions_1, actions_2| build(actions_1, **policy) do |rewriter_1| build(actions_2 + extra, **policy) do |rewriter_2| result = rewriter_1.merge(rewriter_2).process assert_equal(expected, result, "Group 1: #{actions_1.inspect}\n\n" + "Group 2: #{(actions_2 + extra).inspect}" ) end end end end def test_merge check_all_merge_possibilities([ [:wrap, @whole, '<', '>'], [:replace, @comma_space, ' => '], [:wrap, @hello, '!', '!'], # Following two wraps must have same value as they # will be applied in different orders... [:wrap, @hello.join(@world), '{', '}'], [:wrap, @hello.join(@world), '{', '}'], [:remove, @ll], [:replace, @world, ':everybody'], [:wrap, @world, '[', ']'] ], [ # ... but this one is always going to be applied last (extra) [:wrap, @hello.join(@world), '@', '@'], ]) end def representation_example Parser::Source::TreeRewriter.new(@buf) .wrap(range(1...10), '(', ')') .insert_after(range(2...6), '!') .replace(range(2...4), 'foo') .remove(range(5...6)) end def test_nested_actions result = representation_example.as_nested_actions assert_equal( [ [:wrap, 1...10, '(', ')'], [:wrap, 2...6, '', '!'], # aka "insert_after" [:replace, 2...4, 'foo'], [:replace, 5...6, ''], # aka "removal" ], result.each {|arr| arr[1] = arr[1].to_range } ) end def test_ordered_replacements result = representation_example.as_replacements assert_equal( [ [ 1...1, '('], [ 2...4, 'foo'], [ 5...6, ''], [ 6...6, '!'], [ 10...10, ')'], ], result.map {|r, s| [r.to_range, s]} ) end end class TestSourceTreeRewriterImport < Minitest::Test include TestSourceTreeRewriter::Setup def setup super @buf2 = Parser::Source::Buffer.new('(rewriter 2)', source: ':hello') @rewriter = Parser::Source::TreeRewriter.new(@buf) @rewriter2 = Parser::Source::TreeRewriter.new(@buf2) @hello2 = range2(0, 6) @ll2 = range2(3, 2) end def range2(from, len) Parser::Source::Range.new(@buf2, from, from + len) end def test_import_with_offset @rewriter2.wrap(@hello2, '[', ']') @rewriter.wrap(@hello.join(@world), '{', '}') @rewriter.import!(@rewriter2, offset: @hello.begin_pos) assert_equal 'puts({[:hello], :world})', @rewriter.process end def test_import_with_offset_from_bigger_source @rewriter2.wrap(@ll2, '[', ']') @rewriter.wrap(@hello, '{', '}') @rewriter2.import!(@rewriter, offset: -@hello.begin_pos) assert_equal '{:he[ll]o}', @rewriter2.process end def test_import_with_offset_and_self @rewriter.wrap(@ll, '[', ']') @rewriter.import!(@rewriter, offset: +3) @rewriter.replace(range(8,1), '**') assert_equal 'puts(:he[**l]o[, ]:world)', @rewriter.process @rewriter.import!(@rewriter, offset: -6) assert_equal 'pu[**s]([:h]e[**l]o[, ]:world)', @rewriter.process end def test_import_with_invalid_offset @rewriter.wrap(@ll, '[', ']') m = @rewriter.dup.import!(@rewriter, offset: -@ll.begin_pos) assert_equal '[pu]ts(:he[ll]o, :world)', m.process off = @buf.source.size - @ll.end_pos m = @rewriter.dup.import!(@rewriter, offset: off) assert_equal 'puts(:he[ll]o, :worl[d)]', m.process assert_raises { @rewriter.import!(@rewriter, offset: -@ll.begin_pos - 1) } assert_raises { @rewriter.import!(@rewriter, offset: off + 1) } assert_equal 'puts(:he[ll]o, :world)', @rewriter.process # Test atomicity of import! end def test_empty_import assert_equal @rewriter, @rewriter.import!(@rewriter2) assert_equal @rewriter, @rewriter.import!(@rewriter, offset: 42) end def test_inspect assert_equal '#', @rewriter.inspect @rewriter.insert_before(@hello, '[') @rewriter.replace(@ll, 'xxx') @rewriter.remove(@comma_space) assert_equal '#', @rewriter.inspect @rewriter.insert_before(@hello, '{') @rewriter.remove(@world) assert_equal '#', @rewriter.inspect end end parser-3.3.4.2/test/test_static_environment.rb000066400000000000000000000014131465510415600214410ustar00rootroot00000000000000# frozen_string_literal: true require 'helper' class TestStaticEnvironment < Minitest::Test def setup @env = Parser::StaticEnvironment.new end def test_declare refute @env.declared?(:foo) @env.declare :foo assert @env.declared?(:foo) end def test_extend_static @env.declare :foo @env.extend_static @env.declare :bar refute @env.declared?(:foo) assert @env.declared?(:bar) end def test_extend_dynamic @env.declare :foo @env.extend_dynamic @env.declare :bar assert @env.declared?(:foo) assert @env.declared?(:bar) end def test_unextend @env.declare :foo @env.extend_dynamic @env.declare :bar @env.unextend assert @env.declared?(:foo) refute @env.declared?(:bar) end end parser-3.3.4.2/test/using_tree_rewriter/000077500000000000000000000000001465510415600202325ustar00rootroot00000000000000parser-3.3.4.2/test/using_tree_rewriter/fixtures/000077500000000000000000000000001465510415600221035ustar00rootroot00000000000000parser-3.3.4.2/test/using_tree_rewriter/fixtures/input.rb000066400000000000000000000000451465510415600235660ustar00rootroot00000000000000# frozen_string_literal: true 6 * 7 parser-3.3.4.2/test/using_tree_rewriter/fixtures/output.rb000066400000000000000000000000511465510415600237640ustar00rootroot00000000000000# frozen_string_literal: true ([6] * 7) parser-3.3.4.2/test/using_tree_rewriter/using_tree_rewriter.rb000066400000000000000000000003221465510415600246430ustar00rootroot00000000000000# frozen_string_literal: true class UsingTreeRewriter < Parser::TreeRewriter def on_send(node) wrap(node.loc.expression, '(', ')') wrap(node.children[0].loc.expression, '[', ']') super end end