pax_global_header00006660000000000000000000000064130540467160014520gustar00rootroot0000000000000052 comment=e6a533eed532bbf9bf3fc01cf5fef216dd9114b6 psych-2.2.4/000077500000000000000000000000001305404671600126535ustar00rootroot00000000000000psych-2.2.4/.gitignore000066400000000000000000000001711305404671600146420ustar00rootroot00000000000000*.swp *.bundle *.jar *.class .mvn /.bundle/ /.yardoc /Gemfile.lock /_yardoc/ /coverage/ /doc/ /pkg/ /spec/reports/ /tmp/ psych-2.2.4/.travis.yml000066400000000000000000000003401305404671600147610ustar00rootroot00000000000000rvm: - 2.1 - 2.2 - 2.3.0 - ruby-head - jruby-9.1.6.0 matrix: allow_failures: - rvm: jruby-9.1.6.0 before_install: gem install bundler --no-document script: rake addons: apt: packages: - haveged psych-2.2.4/CHANGELOG.rdoc000066400000000000000000000520751305404671600150240ustar00rootroot00000000000000Fri Feb 6 17:47:05 2015 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: register nodes when dumping objects with custom coders. [ruby-core:66215] [Bug #10496] * test/psych/test_coder.rb: test for fix Fri Feb 6 16:58:31 2015 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: fix support for regular expressions with newlines. tenderlove/psych#222 * test/psych/test_yaml.rb: test for change. Thu Jan 29 02:34:27 2015 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: fix parsing hashes with instance variables when it is referenced multiple times. * ext/psych/lib/psych.rb: bump version * ext/psych/psych.gemspec: bump version * test/psych/test_hash.rb: test for fix Fri Jan 9 07:13:55 2015 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: call `allocate` on hash subclasses. Fixes github.com/tenderlove/psych/issues/196 * test/psych/test_hash.rb: test for change Fri Jan 9 06:58:43 2015 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: revive hashes with ivars * ext/psych/lib/psych/visitors/yaml_tree.rb: dump hashes with ivars. Fixes github.com/psych/issues/43 * test/psych/test_hash.rb: test for change Sun Nov 23 13:11:24 2014 Sean Griffin * lib/psych/visitors/to_ruby.rb: Allow loading any BasicObject that defines #marshal_load, fixes #100 * lib/psych/visitors/yaml_tree.rb: Allow dumping any BasicObject that defines #marshal_dump Sat Aug 30 06:39:48 2014 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: fix NameError dumping and loading. Fixes GH #85. Thanks @brentdax for the patch! * test/psych/test_exception.rb: test for fix Sat Aug 30 06:23:40 2014 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: fix loading strings that look like integers but have a newline. Fixes GH #189 * test/psych/test_string.rb: test for fix Sat Aug 30 06:10:39 2014 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: merge keys with a hash should merge the hash in to the parent. * test/psych/test_merge_keys.rb: test for change. Fixes GH #202 Sat Aug 30 06:00:26 2014 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: quoted "<<" strings should not be treated as merge keys. * ext/psych/lib/psych/visitors/yaml_tree.rb: hashes with keys containing "<<" should roundtrip. * test/psych/test_merge_keys.rb: test for change. Fixes GH #203 Wed Aug 6 03:41:21 2014 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: backwards compatibility for hashes emitted by Syck. Github #198 * test/psych/test_hash.rb: test for change. Fri Jun 6 07:41:41 2014 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: dump empty symbols with a tag so that they can be parsed on input. [Bug #9873] [ruby-core:62825] * test/psych/test_symbol.rb: test for change Sun May 25 11:35:41 2014 Zachary Scott * test/psych/*: YAML::ENGINE was removed in [Bug #8344] 2014-03-27 SHIBATA Hiroshi * ext/psych/yaml/scanner.c: merge libyaml 0.1.6 * ext/psych/yaml/yaml_private.h: ditto Sat Mar 1 11:08:00 2014 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: support dumping Encoding objects. * ext/psych/lib/psych/visitors/to_ruby.rb: support loading Encoding objects. * test/psych/test_encoding.rb: add test * ext/psych/lib/psych.rb: add version Wed Feb 5 10:11:36 2014 Zachary Scott * ext/psych/yaml/config.h: bump libyaml to 0.1.5 Wed Feb 5 04:16:41 2014 Aaron Patterson * ext/psych/yaml/emitter.c: merge libyaml 0.1.5 * ext/psych/yaml/loader.c: ditto * ext/psych/yaml/parser.c: ditto * ext/psych/yaml/reader.c: ditto * ext/psych/yaml/scanner.c: ditto * ext/psych/yaml/writer.c: ditto * ext/psych/yaml/yaml_private.h: ditto Thu Jan 9 09:55:20 2014 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: dumping strings with quotes should not have changed. [ruby-core:59316] [Bug #9300] * ext/psych/lib/psych.rb: fixed missing require. * test/psych/test_string.rb: test Wed Nov 27 06:40:18 2013 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: fix support for negative years. * ext/psych/lib/psych/visitors/yaml_tree.rb: ditto * test/psych/test_date_time.rb: test for change. Fixes: https://github.com/tenderlove/psych/issues/168 Wed Nov 27 04:46:55 2013 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: fix regexp for matching TIME strings. * test/psych/test_date_time.rb: test for change. Fixes: https://github.com/tenderlove/psych/issues/171 Wed Nov 6 04:14:25 2013 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: process merge keys before reviving objects. Fixes GH psych #168 * test/psych/test_merge_keys.rb: test for change https://github.com/tenderlove/psych/issues/168 Wed Oct 30 03:25:10 2013 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: make less garbage when testing if a string is binary. Wed Oct 30 03:08:24 2013 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: string subclasses should not be considered to be binary. Fixes Psych / GH 166 https://github.com/tenderlove/psych/issues/166 * test/psych/test_string.rb: test for fix Fri Sep 20 23:44:07 2013 Zachary Scott * ext/psych/yaml/yaml.h: [DOC] fix typo by @GreenGeorge [Fixes GH-161] https://github.com/tenderlove/psych/pull/161 Fri Sep 6 02:37:22 2013 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: use double quotes when strings start with special characters. [Fixes GH-157] https://github.com/tenderlove/psych/issues/157 * test/psych/test_string.rb: test for change. Thu Aug 29 02:40:45 2013 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: invalid floats should be treated as strings. [Fixes GH-156] https://github.com/tenderlove/psych/issues/156 * test/psych/test_string.rb: test for change Sat Jul 6 04:49:38 2013 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: register time objects so they are referenced as ids during output. * test/psych/test_date_time.rb: corresponding test. Wed May 15 02:22:16 2013 Aaron Patterson * ext/psych/lib/psych.rb: Adding Psych.safe_load for loading a user defined, restricted subset of Ruby object types. * ext/psych/lib/psych/class_loader.rb: A class loader for encapsulating the logic for which objects are allowed to be deserialized. * ext/psych/lib/psych/deprecated.rb: Changes to use the class loader * ext/psych/lib/psych/exception.rb: ditto * ext/psych/lib/psych/json/stream.rb: ditto * ext/psych/lib/psych/nodes/node.rb: ditto * ext/psych/lib/psych/scalar_scanner.rb: ditto * ext/psych/lib/psych/stream.rb: ditto * ext/psych/lib/psych/streaming.rb: ditto * ext/psych/lib/psych/visitors/json_tree.rb: ditto * ext/psych/lib/psych/visitors/to_ruby.rb: ditto * ext/psych/lib/psych/visitors/yaml_tree.rb: ditto * ext/psych/psych_to_ruby.c: ditto * test/psych/helper.rb: ditto * test/psych/test_safe_load.rb: tests for restricted subset. * test/psych/test_scalar_scanner.rb: ditto * test/psych/visitors/test_to_ruby.rb: ditto * test/psych/visitors/test_yaml_tree.rb: ditto Sat Apr 6 02:54:08 2013 Aaron Patterson * ext/psych/lib/psych/exception.rb: there should be only one exception base class. Fixes tenderlove/psych #125 * ext/psych/lib/psych.rb: require the correct exception class * ext/psych/lib/psych/syntax_error.rb: ditto * ext/psych/lib/psych/visitors/to_ruby.rb: ditto Sat Apr 6 02:06:04 2013 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: correctly register self-referential strings. Fixes tenderlove/psych #135 * test/psych/test_string.rb: appropriate test. Fri Mar 1 09:15:00 2013 Zachary Scott * lib/psych.rb: specify in rdoc what object is returned in parser By Adam Stankiewicz [Github Fixes #133] Fri Mar 1 03:22:00 2013 Zachary Scott * lib/psych.rb: rdoc for Psych overview by Adam Stankiewicz [Github Fixes #134] Sun Feb 17 01:13:00 2013 Zachary Scott * lib/psych/y.rb: Document Kernel#y by Adam Stankiewicz [Github Fixes #127] Fri Feb 8 08:53:27 2013 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: fixing string quotation when dumping Ruby strings. Thanks Ingy * test/psych/test_psych.rb: appropriate tests. * test/psych/test_yaml.rb: ditto Fri Feb 8 08:50:42 2013 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: change output reference ids to be sequential numbers. Thu Jan 17 10:48:56 2013 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: use constants rather than calculating Inf and NaN. Sun Jan 13 16:40:00 2013 Zachary Scott * ext/psych/yaml/scanner.c: Typos by James Dabbs [Github Fixes #118] Sat Jan 12 08:58:47 2013 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: merge key values that contain something besides a hash should be left in tact. * test/psych/test_merge_keys.rb: test for change Thu Jan 10 04:23:07 2013 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: strip trailing dots from floats so that Float() will not raise an exception. * test/psych/test_numeric.rb: test to ensure "1." can be loaded * test/psych/test_string.rb: make sure "1." can round trip Sat Nov 17 12:03:41 2012 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: avoid raising exceptions when parsing Floats and Integers. Thanks riffraff [ruby-core:44426] * test/psych/test_numeric.rb: associated test Sat Nov 17 11:26:36 2012 Aaron Patterson * ext/psych/lib/psych/core_ext.rb: move Kernel#y so that it can manually be required as 'psych/y'. * ext/psych/lib/psych/y.rb: ditto Tue Nov 6 09:37:57 2012 NARUSE, Yui * ruby.c (load_file_internal): set default source encoding as UTF-8 instead of US-ASCII. [ruby-core:46021] [Feature #6679] * parse.y (parser_initialize): set default parser encoding as UTF-8 instead of US-ASCII. Mon Oct 29 10:22:00 2012 Aaron Patterson * ext/psych/lib/psych/handlers/recorder.rb: added a class for recording YAML parse and emit events. * ext/psych/lib/psych/handler.rb: adding a list of events so that handler classes can more easily be meta-programmed. * test/psych/handlers/test_recorder.rb: tests for the change. Sun Oct 28 10:12:15 2012 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: `tree` should return the same thing on every call. * test/psych/visitors/test_yaml_tree.rb: related test. Sun Oct 28 10:05:03 2012 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: YAML Tree object should be able to take an emitter object as it's output. * test/psych/visitors/test_yaml_tree.rb: related test. Thu Jul 19 09:33:46 2012 Aaron Patterson * ext/psych/emitter.c (initialize): allow a configuration object to be passed to the constructor so that mutation isn't required after instantiation. * ext/psych/lib/psych/handler.rb: add configuration object * ext/psych/lib/psych/visitors/emitter.rb: use configuration object if extra configuration is present. Fri May 18 01:28:21 2012 Aaron Patterson * ext/psych/parser.c (transcode_string): fix encoding index names. Thanks markizko for reporting. Wed May 16 05:11:29 2012 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: fix a bug with string subclass dumping and loading. * test/psych/test_array.rb: pertinent tests * test/psych/test_string.rb: ditto Wed May 16 01:31:21 2012 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: convert omap tagged maps to Psych::Omap objects rather than hashes. [Bug #6425] * test/psych/test_omap.rb: pertinent test. Wed May 16 01:15:45 2012 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: keep a reference to custom coders so that GC does not impact dumped yaml reference ids. Mon Apr 30 04:43:53 2012 Aaron Patterson * ext/psych/lib/psych/json/yaml_events.rb: implicit styles should not be changeable for JSON events. Sat Apr 7 02:07:00 2012 Aaron Patterson * ext/psych/parser.c: fall back to any encoding if the external encoding is wrong. [ruby-core:44163] * test/psych/test_encoding.rb: fix test Fri Mar 9 06:29:22 2012 Aaron Patterson * ext/psych/lib/psych.rb (load, parse): stop parsing or loading after the first document has been parsed. * test/psych/test_stream.rb: pertinent tests. Fri Mar 9 06:17:05 2012 Aaron Patterson * ext/psych/lib/psych.rb (parse_stream, load_stream): if a block is given, documents will be yielded to the block as they are parsed. [ruby-core:42404] [Bug #5978] * ext/psych/lib/psych/handlers/document_stream.rb: add a handler that yields documents as they are parsed * test/psych/test_stream.rb: corresponding tests. Tue Mar 6 02:31:20 2012 Aaron Patterson * ext/psych/lib/psych/core_ext.rb: only extend Kernel if IRB is loaded in order to stop method pollution. Tue Feb 28 10:28:51 2012 Aaron Patterson * ext/psych/lib/psych.rb: default open YAML files with utf8 external encoding. [ruby-core:42967] * test/psych/test_tainted.rb: ditto Fri Feb 24 13:54:33 2012 Aaron Patterson * ext/psych/parser.c: prevent a memory leak by protecting calls to handler callbacks. * test/psych/test_parser.rb: test to demonstrate leak. Fri Feb 24 08:08:38 2012 Aaron Patterson * ext/psych/parser.c: set parser encoding based on the YAML input rather than user configuration. * test/psych/test_encoding.rb: corresponding tests. * test/psych/test_parser.rb: ditto * test/psych/test_tainted.rb: ditto Fri Feb 10 03:41:31 2012 Aaron Patterson * ext/psych/parser.c: removed external encoding setter, allow parser to be reused. * ext/psych/lib/psych/parser.rb: added external encoding setter. * test/psych/test_parser.rb: test parser reuse Wed Jan 18 12:49:15 2012 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: Added support for loading subclasses of String with ivars * ext/psych/lib/psych/visitors/yaml_tree.rb: Added support for dumping subclasses of String with ivars * test/psych/test_string.rb: corresponding tests Sun Dec 18 12:42:48 2011 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: BigDecimals can be restored from YAML. * ext/psych/lib/psych/visitors/yaml_tree.rb: BigDecimals can be dumped to YAML. * test/psych/test_numeric.rb: tests for BigDecimal serialization Sun Dec 18 12:03:13 2011 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: Strings that look like dates should be treated as strings and not dates. * test/psych/test_scalar_scanner.rb: corresponding tests. Wed Dec 7 08:04:31 2011 Aaron Patterson * ext/psych/lib/psych.rb (module Psych): parse and load methods take an optional file name that is used when raising Psych::SyntaxError exceptions * ext/psych/lib/psych/syntax_error.rb (module Psych): allow nil file names and handle nil file names in the exception message * test/psych/test_exception.rb (module Psych): Tests for changes. Wed Nov 30 09:09:37 2011 Aaron Patterson * ext/psych/parser.c (parse): parse method can take an option file name for use in exception messages. * test/psych/test_parser.rb: corresponding tests. Tue Nov 22 04:46:22 2011 Aaron Patterson * ext/psych/lib/psych.rb: remove autoload from psych * ext/psych/lib/psych/json.rb: ditto Thu Nov 17 10:36:46 2011 Aaron Patterson * ext/psych/lib/psych.rb (load_file): make sure opened yaml files are also closed. [ruby-core:41088] Wed Nov 9 04:52:16 2011 Aaron Patterson * ext/psych/lib/psych/tree_builder.rb: dump complex numbers, rationals, etc with reference ids. * ext/psych/lib/psych/visitors/yaml_tree.rb: ditto * ext/psych/lib/psych/visitors/to_ruby.rb: loading complex numbers, rationals, etc with reference ids. * test/psych/test_object_references.rb: corresponding tests Mon Nov 7 20:31:52 2011 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: make sure strings that look like base 60 numbers are serialized as quoted strings. * test/psych/test_string.rb: test for change. Wed Oct 5 02:50:27 2011 Aaron Patterson * ext/psych/lib/psych/syntax_error.rb: Add file, line, offset, and message attributes during parse failure. * ext/psych/parser.c: Update parser to raise exception with correct values. * test/psych/test_exception.rb: corresponding tests. Wed Oct 5 01:52:16 2011 Aaron Patterson * ext/psych/parser.c (parse): Use context_mark for indicating error line and column. Tue Oct 4 06:29:55 2011 Aaron Patterson * ext/psych/lib/psych.rb: calling `yaml` rather than `to_yaml`. * ext/psych/lib/psych/nodes/node.rb: Rename `to_yaml` to just `yaml` in order to avoid YAML::ENGINE switching from replacing this method. * test/psych/helper.rb: fix tests for method name change. * test/psych/test_document.rb: ditto * test/psych/visitors/test_emitter.rb: ditto Tue Oct 4 06:20:19 2011 Aaron Patterson * ext/psych/lib/psych/scalar_scanner.rb: Match values against the floating point spec defined in YAML to avoid erronious parses. * test/psych/test_numeric.rb: corresponding test. Tue Oct 4 05:59:24 2011 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: ToRuby visitor can be constructed with a ScalarScanner. * ext/psych/lib/psych/visitors/yaml_tree.rb: ScalarScanner can be passed to the YAMLTree visitor. Tue Oct 4 05:47:23 2011 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: Define Regexp::NOENCODING for 1.9.2 backwards compatibility. * ext/psych/lib/psych/visitors/yaml_tree.rb: Fix Date string generation for 1.9.2 backwards compatibility. Fri Sep 2 04:05:25 2011 Aaron Patterson * ext/psych/lib/psych/visitors/yaml_tree.rb: emit strings tagged as ascii-8bit as binary in YAML. * test/psych/test_string.rb: corresponding test. Thu Aug 25 06:11:35 2011 Aaron Patterson * ext/psych/lib/psych/nodes/node.rb: default `to_yaml` encoding to be UTF-8. * test/psych/test_encoding.rb: test yaml dump encoding. Wed Jun 22 03:20:52 2011 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: Fix cyclic references of objects. Thanks to CvX for reporting the bug and a test case. * test/psych/test_object.rb: test for cyclic object references. Thu Jun 9 10:57:03 2011 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: Hash subclasses can be read from YAML files. * ext/psych/lib/psych/visitors/yaml_tree.rb: Hash subclasses can be dumped to YAML files. * test/psych/test_hash.rb: corresponding test. Thu Jun 9 09:18:51 2011 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: Ruby modules can be loaded from YAML files. * ext/psych/lib/psych/visitors/yaml_tree.rb: Ruby modules can be dumped to YAML files. * test/psych/test_class.rb: corresponding test. Thu Jun 9 09:05:04 2011 Aaron Patterson * ext/psych/lib/psych/visitors/to_ruby.rb: Ruby classes can be loaded from YAML files. * ext/psych/lib/psych/visitors/yaml_tree.rb: Ruby classes can be dumped to YAML files. * test/psych/test_class.rb: corresponding test. Mon Jun 6 09:39:43 2011 Aaron Patterson * ext/psych/parser.c (parse): release event objects to plug memory leak. Thanks Mark J. Titorenko! psych-2.2.4/Gemfile000066400000000000000000000000471305404671600141470ustar00rootroot00000000000000source 'https://rubygems.org' gemspec psych-2.2.4/Mavenfile000066400000000000000000000002221305404671600145000ustar00rootroot00000000000000#-*- mode: ruby -*- jar 'org.yaml:snakeyaml:${snakeyaml.version}' plugin :dependency, '2.8', :outputFile => 'pkg/classpath' # vim: syntax=Ruby psych-2.2.4/README.md000066400000000000000000000047641305404671600141450ustar00rootroot00000000000000# Psych [![Build Status](https://travis-ci.org/ruby/psych.svg?branch=master)](https://travis-ci.org/ruby/psych) [![Build status](https://ci.appveyor.com/api/projects/status/2t6x109xfmbx209k/branch/master?svg=true)](https://ci.appveyor.com/project/ruby/psych/branch/master) * https://github.com/ruby/psych ## Description Psych is a YAML parser and emitter. Psych leverages [libyaml](http://pyyaml.org/wiki/LibYAML) for its YAML parsing and emitting capabilities. In addition to wrapping libyaml, Psych also knows how to serialize and de-serialize most Ruby objects to and from the YAML format. ## Examples # Load YAML in to a Ruby object Psych.load('--- foo') # => 'foo' # Emit YAML from a Ruby object Psych.dump("foo") # => "--- foo\n...\n" ## Dependencies * libyaml ## Installation Psych has been included with MRI since 1.9.2, and is the default YAML parser in 1.9.3. If you want a newer gem release of Psych, you can use rubygems: gem install psych In order to use the gem release in your app, and not the stdlib version, you'll need the following: gem 'psych' require 'psych' Or if you use Bundler add this to your `Gemfile`: gem 'psych' JRuby ships with a pure Java implementation of Psych. If you're on Rubinius, Psych is available in 1.9 mode, please refer to the Language Modes section of the [Rubinius README](https://github.com/rubinius/rubinius#readme) for more information on building and 1.9 mode. ## License Copyright 2009 Aaron Patterson, et al. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. psych-2.2.4/Rakefile000066400000000000000000000021131305404671600143150ustar00rootroot00000000000000require "bundler/gem_tasks" require "rake/testtask" Rake::TestTask.new(:test) do |t| t.libs << "test" t.libs << "lib" t.test_files = FileList['test/**/test_*.rb'] t.verbose = true t.warning = true end if RUBY_PLATFORM =~ /java/ require 'rake/javaextensiontask' Rake::JavaExtensionTask.new("psych") do |ext| require 'maven/ruby/maven' # uses Mavenfile to write classpath into pkg/classpath # and tell maven via system properties the snakeyaml version # this is basically the same as running from the commandline: # rmvn dependency:build-classpath -Dsnakeyaml.version='use version from Psych::DEFAULT_SNAKEYAML_VERSION here' Maven::Ruby::Maven.new.exec('dependency:build-classpath', "-Dsnakeyaml.version=1.18", '-Dverbose=true') ext.source_version = '1.7' ext.target_version = '1.7' ext.classpath = File.read('pkg/classpath') ext.ext_dir = 'ext/java' end else require 'rake/extensiontask' Rake::ExtensionTask.new("psych") do |ext| ext.lib_dir = File.join(*['lib', ENV['FAT_DIR']].compact) end end task :default => [:compile, :test] psych-2.2.4/appveyor.yml000066400000000000000000000005461305404671600152500ustar00rootroot00000000000000--- clone_depth: 10 install: - SET PATH=C:\Ruby%ruby_version%\bin;%PATH% - bundle install build_script: - rake -rdevkit compile test_script: - rake test deploy: off environment: matrix: - ruby_version: "21" - ruby_version: "21-x64" - ruby_version: "22" - ruby_version: "22-x64" - ruby_version: "23" - ruby_version: "23-x64" psych-2.2.4/bin/000077500000000000000000000000001305404671600134235ustar00rootroot00000000000000psych-2.2.4/bin/console000077500000000000000000000001261305404671600150120ustar00rootroot00000000000000#!/usr/bin/env ruby require "bundler/setup" require "psych" require "irb" IRB.start psych-2.2.4/bin/setup000077500000000000000000000001121305404671600145030ustar00rootroot00000000000000#!/usr/bin/env bash set -euo pipefail IFS=$'\n\t' set -vx bundle install psych-2.2.4/ext/000077500000000000000000000000001305404671600134535ustar00rootroot00000000000000psych-2.2.4/ext/java/000077500000000000000000000000001305404671600143745ustar00rootroot00000000000000psych-2.2.4/ext/java/PsychEmitter.java000066400000000000000000000316571305404671600176730ustar00rootroot00000000000000/***** BEGIN LICENSE BLOCK ***** * Version: EPL 1.0/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Eclipse Public * License Version 1.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.eclipse.org/legal/epl-v10.html * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * Copyright (C) 2010 Charles O Nutter * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the EPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the EPL, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ package org.jruby.ext.psych; import java.io.IOException; import java.io.OutputStreamWriter; import java.nio.charset.Charset; import java.util.HashMap; import java.util.Map; import org.jcodings.Encoding; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyClass; import org.jruby.RubyFixnum; import org.jruby.RubyModule; import org.jruby.RubyObject; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import org.jruby.runtime.ObjectAllocator; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.IOOutputStream; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.emitter.Emitter; import org.yaml.snakeyaml.emitter.EmitterException; import org.yaml.snakeyaml.error.Mark; import org.yaml.snakeyaml.events.AliasEvent; import org.yaml.snakeyaml.events.DocumentEndEvent; import org.yaml.snakeyaml.events.DocumentStartEvent; import org.yaml.snakeyaml.events.Event; import org.yaml.snakeyaml.events.ImplicitTuple; import org.yaml.snakeyaml.events.MappingEndEvent; import org.yaml.snakeyaml.events.MappingStartEvent; import org.yaml.snakeyaml.events.ScalarEvent; import org.yaml.snakeyaml.events.SequenceEndEvent; import org.yaml.snakeyaml.events.SequenceStartEvent; import org.yaml.snakeyaml.events.StreamEndEvent; import org.yaml.snakeyaml.events.StreamStartEvent; import static org.jruby.runtime.Visibility.*; public class PsychEmitter extends RubyObject { public static void initPsychEmitter(Ruby runtime, RubyModule psych) { RubyClass psychHandler = runtime.defineClassUnder("Handler", runtime.getObject(), runtime.getObject().getAllocator(), psych); RubyClass psychEmitter = runtime.defineClassUnder("Emitter", psychHandler, new ObjectAllocator() { public IRubyObject allocate(Ruby runtime, RubyClass klazz) { return new PsychEmitter(runtime, klazz); } }, psych); psychEmitter.defineAnnotatedMethods(PsychEmitter.class); } public PsychEmitter(Ruby runtime, RubyClass klass) { super(runtime, klass); } @JRubyMethod(visibility = PRIVATE) public IRubyObject initialize(ThreadContext context, IRubyObject io) { options = new DumperOptions(); options.setIndent(2); this.io = io; return context.nil; } @JRubyMethod(visibility = PRIVATE) public IRubyObject initialize(ThreadContext context, IRubyObject io, IRubyObject rbOptions) { IRubyObject width = rbOptions.callMethod(context, "line_width"); IRubyObject canonical = rbOptions.callMethod(context, "canonical"); IRubyObject level = rbOptions.callMethod(context, "indentation"); options = new DumperOptions(); options.setCanonical(canonical.isTrue()); options.setIndent((int)level.convertToInteger().getLongValue()); line_width_set(context, width); this.io = io; return context.nil; } @JRubyMethod public IRubyObject start_stream(ThreadContext context, IRubyObject encoding) { if (!(encoding instanceof RubyFixnum)) { throw context.runtime.newTypeError(encoding, context.runtime.getFixnum()); } initEmitter(context, encoding); StreamStartEvent event = new StreamStartEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; } @JRubyMethod public IRubyObject end_stream(ThreadContext context) { StreamEndEvent event = new StreamEndEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; } @JRubyMethod public IRubyObject start_document(ThreadContext context, IRubyObject _version, IRubyObject tags, IRubyObject implicit) { DumperOptions.Version version = null; boolean implicitBool = implicit.isTrue(); Map tagsMap = null; RubyArray versionAry = _version.convertToArray(); if (versionAry.size() == 2) { int versionInt0 = (int)versionAry.eltInternal(0).convertToInteger().getLongValue(); int versionInt1 = (int)versionAry.eltInternal(1).convertToInteger().getLongValue(); if (versionInt0 == 1) { if (versionInt1 == 0) { version = DumperOptions.Version.V1_0; } else if (versionInt1 == 1) { version = DumperOptions.Version.V1_1; } } if (version == null) { throw context.runtime.newArgumentError("invalid YAML version: " + versionAry); } } RubyArray tagsAry = tags.convertToArray(); if (tagsAry.size() > 0) { tagsMap = new HashMap(tagsAry.size()); for (int i = 0; i < tagsAry.size(); i++) { RubyArray tagsTuple = tagsAry.eltInternal(i).convertToArray(); if (tagsTuple.size() != 2) { throw context.runtime.newRuntimeError("tags tuple must be of length 2"); } IRubyObject key = tagsTuple.eltInternal(0); IRubyObject value = tagsTuple.eltInternal(1); tagsMap.put( key.asJavaString(), value.asJavaString()); } } DocumentStartEvent event = new DocumentStartEvent(NULL_MARK, NULL_MARK, !implicitBool, version, tagsMap); emit(context, event); return this; } @JRubyMethod public IRubyObject end_document(ThreadContext context, IRubyObject implicit) { DocumentEndEvent event = new DocumentEndEvent(NULL_MARK, NULL_MARK, !implicit.isTrue()); emit(context, event); return this; } @JRubyMethod(required = 6) public IRubyObject scalar(ThreadContext context, IRubyObject[] args) { IRubyObject value = args[0]; IRubyObject anchor = args[1]; IRubyObject tag = args[2]; IRubyObject plain = args[3]; IRubyObject quoted = args[4]; IRubyObject style = args[5]; if (!(value instanceof RubyString)) { throw context.runtime.newTypeError(value, context.runtime.getString()); } ScalarEvent event = new ScalarEvent( anchor.isNil() ? null : anchor.asJavaString(), tag.isNil() ? null : tag.asJavaString(), new ImplicitTuple(plain.isTrue(), quoted.isTrue()), value.asJavaString(), NULL_MARK, NULL_MARK, SCALAR_STYLES[(int)style.convertToInteger().getLongValue()]); emit(context, event); return this; } @JRubyMethod(required = 4) public IRubyObject start_sequence(ThreadContext context, IRubyObject[] args) { IRubyObject anchor = args[0]; IRubyObject tag = args[1]; IRubyObject implicit = args[2]; IRubyObject style = args[3]; final int SEQUENCE_BLOCK = 1; // see psych/nodes/sequence.rb SequenceStartEvent event = new SequenceStartEvent( anchor.isNil() ? null : anchor.asJavaString(), tag.isNil() ? null : tag.asJavaString(), implicit.isTrue(), NULL_MARK, NULL_MARK, SEQUENCE_BLOCK != style.convertToInteger().getLongValue()); emit(context, event); return this; } @JRubyMethod public IRubyObject end_sequence(ThreadContext context) { SequenceEndEvent event = new SequenceEndEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; } @JRubyMethod(required = 4) public IRubyObject start_mapping(ThreadContext context, IRubyObject[] args) { IRubyObject anchor = args[0]; IRubyObject tag = args[1]; IRubyObject implicit = args[2]; IRubyObject style = args[3]; final int MAPPING_BLOCK = 1; // see psych/nodes/mapping.rb MappingStartEvent event = new MappingStartEvent( anchor.isNil() ? null : anchor.asJavaString(), tag.isNil() ? null : tag.asJavaString(), implicit.isTrue(), NULL_MARK, NULL_MARK, MAPPING_BLOCK != style.convertToInteger().getLongValue()); emit(context, event); return this; } @JRubyMethod public IRubyObject end_mapping(ThreadContext context) { MappingEndEvent event = new MappingEndEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; } @JRubyMethod public IRubyObject alias(ThreadContext context, IRubyObject anchor) { AliasEvent event = new AliasEvent(anchor.asJavaString(), NULL_MARK, NULL_MARK); emit(context, event); return this; } @JRubyMethod(name = "canonical=") public IRubyObject canonical_set(ThreadContext context, IRubyObject canonical) { // TODO: unclear if this affects a running emitter options.setCanonical(canonical.isTrue()); return canonical; } @JRubyMethod public IRubyObject canonical(ThreadContext context) { // TODO: unclear if this affects a running emitter return context.runtime.newBoolean(options.isCanonical()); } @JRubyMethod(name = "indentation=") public IRubyObject indentation_set(ThreadContext context, IRubyObject level) { // TODO: unclear if this affects a running emitter options.setIndent((int)level.convertToInteger().getLongValue()); return level; } @JRubyMethod public IRubyObject indentation(ThreadContext context) { // TODO: unclear if this affects a running emitter return context.runtime.newFixnum(options.getIndent()); } @JRubyMethod(name = "line_width=") public IRubyObject line_width_set(ThreadContext context, IRubyObject width) { int newWidth = (int)width.convertToInteger().getLongValue(); if (newWidth <= 0) newWidth = Integer.MAX_VALUE; options.setWidth(newWidth); return width; } @JRubyMethod public IRubyObject line_width(ThreadContext context) { return context.runtime.newFixnum(options.getWidth()); } private void emit(ThreadContext context, Event event) { try { if (emitter == null) throw context.runtime.newRuntimeError("uninitialized emitter"); emitter.emit(event); } catch (IOException ioe) { throw context.runtime.newIOErrorFromException(ioe); } catch (EmitterException ee) { throw context.runtime.newRuntimeError(ee.toString()); } } private void initEmitter(ThreadContext context, IRubyObject _encoding) { if (emitter != null) throw context.runtime.newRuntimeError("already initialized emitter"); Encoding encoding = PsychLibrary.YAMLEncoding.values()[(int)_encoding.convertToInteger().getLongValue()].encoding; Charset charset = context.runtime.getEncodingService().charsetForEncoding(encoding); emitter = new Emitter(new OutputStreamWriter(new IOOutputStream(io, encoding), charset), options); } Emitter emitter; DumperOptions options = new DumperOptions(); IRubyObject io; private static final Mark NULL_MARK = new Mark(null, 0, 0, 0, null, 0); // Map style constants from Psych values (ANY = 0 ... FOLDED = 5) // to SnakeYaml values; see psych/nodes/scalar.rb. private static final Character[] SCALAR_STYLES = new Character[] { null, // ANY; we'll choose plain null, // PLAIN '\'', // SINGLE_QUOTED '"', // DOUBLE_QUOTED '|', // LITERAL '>', // FOLDED }; } psych-2.2.4/ext/java/PsychLibrary.java000066400000000000000000000077431305404671600176650ustar00rootroot00000000000000/***** BEGIN LICENSE BLOCK ***** * Version: EPL 1.0/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Eclipse Public * License Version 1.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.eclipse.org/legal/epl-v10.html * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * Copyright (C) 2010 Charles O Nutter * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the EPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the EPL, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ package org.jruby.ext.psych; import java.io.InputStream; import java.io.IOException; import java.util.Properties; import org.jcodings.Encoding; import org.jcodings.specific.UTF16BEEncoding; import org.jcodings.specific.UTF16LEEncoding; import org.jcodings.specific.UTF8Encoding; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyModule; import org.jruby.RubyString; import org.jruby.internal.runtime.methods.JavaMethod.JavaMethodZero; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.Visibility; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.runtime.load.Library; public class PsychLibrary implements Library { public void load(final Ruby runtime, boolean wrap) { RubyModule psych = runtime.defineModule("Psych"); // load version from properties packed with the jar Properties props = new Properties(); try( InputStream is = runtime.getJRubyClassLoader().getResourceAsStream("META-INF/maven/org.yaml/snakeyaml/pom.properties") ) { props.load(is); } catch( IOException e ) { // ignored } RubyString version = runtime.newString(props.getProperty("version", "0.0") + ".0"); version.setFrozen(true); psych.setConstant("SNAKEYAML_VERSION", version); String[] versionParts = version.toString().split("\\."); final RubyArray versionElements = runtime.newArray(runtime.newFixnum(Integer.parseInt(versionParts[0])), runtime.newFixnum(Integer.parseInt(versionParts[1])), runtime.newFixnum(Integer.parseInt(versionParts[2]))); versionElements.setFrozen(true); psych.getSingletonClass().addMethod("libyaml_version", new JavaMethodZero(psych, Visibility.PUBLIC) { @Override public IRubyObject call(ThreadContext context, IRubyObject self, RubyModule clazz, String name) { return versionElements; } }); PsychParser.initPsychParser(runtime, psych); PsychEmitter.initPsychEmitter(runtime, psych); PsychToRuby.initPsychToRuby(runtime, psych); PsychYamlTree.initPsychYamlTree(runtime, psych); } public enum YAMLEncoding { YAML_ANY_ENCODING(UTF8Encoding.INSTANCE), YAML_UTF8_ENCODING(UTF8Encoding.INSTANCE), YAML_UTF16LE_ENCODING(UTF16LEEncoding.INSTANCE), YAML_UTF16BE_ENCODING(UTF16BEEncoding.INSTANCE); YAMLEncoding(Encoding encoding) { this.encoding = encoding; } public final Encoding encoding; } } psych-2.2.4/ext/java/PsychParser.java000066400000000000000000000402721305404671600175070ustar00rootroot00000000000000/***** BEGIN LICENSE BLOCK ***** * Version: EPL 1.0/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Eclipse Public * License Version 1.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.eclipse.org/legal/epl-v10.html * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * Copyright (C) 2010 Charles O Nutter * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the EPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the EPL, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ package org.jruby.ext.psych; import java.io.ByteArrayInputStream; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.Map; import org.jcodings.Encoding; import org.jcodings.specific.UTF8Encoding; import org.jcodings.unicode.UnicodeEncoding; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyClass; import org.jruby.RubyEncoding; import org.jruby.RubyIO; import org.jruby.RubyKernel; import org.jruby.RubyModule; import org.jruby.RubyObject; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import static org.jruby.ext.psych.PsychLibrary.YAMLEncoding.*; import org.jruby.runtime.Block; import org.jruby.runtime.Helpers; import org.jruby.runtime.ObjectAllocator; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.IOInputStream; import org.jruby.util.io.EncodingUtils; import org.jruby.util.log.Logger; import org.jruby.util.log.LoggerFactory; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.error.Mark; import org.yaml.snakeyaml.error.MarkedYAMLException; import org.yaml.snakeyaml.events.AliasEvent; import org.yaml.snakeyaml.events.DocumentEndEvent; import org.yaml.snakeyaml.events.DocumentStartEvent; import org.yaml.snakeyaml.events.Event; import org.yaml.snakeyaml.events.Event.ID; import org.yaml.snakeyaml.events.MappingStartEvent; import org.yaml.snakeyaml.events.ScalarEvent; import org.yaml.snakeyaml.events.SequenceStartEvent; import org.yaml.snakeyaml.parser.Parser; import org.yaml.snakeyaml.parser.ParserException; import org.yaml.snakeyaml.parser.ParserImpl; import org.yaml.snakeyaml.reader.ReaderException; import org.yaml.snakeyaml.reader.StreamReader; import org.yaml.snakeyaml.scanner.ScannerException; import static org.jruby.runtime.Helpers.invoke; import org.jruby.util.ByteList; public class PsychParser extends RubyObject { private static final Logger LOG = LoggerFactory.getLogger(PsychParser.class); public static void initPsychParser(Ruby runtime, RubyModule psych) { RubyClass psychParser = runtime.defineClassUnder("Parser", runtime.getObject(), new ObjectAllocator() { public IRubyObject allocate(Ruby runtime, RubyClass klazz) { return new PsychParser(runtime, klazz); } }, psych); RubyKernel.require(runtime.getNil(), runtime.newString("psych/syntax_error"), Block.NULL_BLOCK); psychParser.defineConstant("ANY", runtime.newFixnum(YAML_ANY_ENCODING.ordinal())); psychParser.defineConstant("UTF8", runtime.newFixnum(YAML_UTF8_ENCODING.ordinal())); psychParser.defineConstant("UTF16LE", runtime.newFixnum(YAML_UTF16LE_ENCODING.ordinal())); psychParser.defineConstant("UTF16BE", runtime.newFixnum(YAML_UTF16BE_ENCODING.ordinal())); psychParser.defineAnnotatedMethods(PsychParser.class); } public PsychParser(Ruby runtime, RubyClass klass) { super(runtime, klass); } @JRubyMethod public IRubyObject parse(ThreadContext context, IRubyObject yaml) { Ruby runtime = context.runtime; return parse(context, yaml, runtime.getNil()); } private IRubyObject stringOrNilFor(Ruby runtime, String value, boolean tainted) { if (value == null) return runtime.getNil(); // No need to taint nil return stringFor(runtime, value, tainted); } private RubyString stringFor(Ruby runtime, String value, boolean tainted) { Encoding encoding = runtime.getDefaultInternalEncoding(); if (encoding == null) { encoding = UTF8Encoding.INSTANCE; } Charset charset = RubyEncoding.UTF8; if (encoding.getCharset() != null) { charset = encoding.getCharset(); } ByteList bytes = new ByteList(value.getBytes(charset), encoding); RubyString string = RubyString.newString(runtime, bytes); string.setTaint(tainted); return string; } private StreamReader readerFor(ThreadContext context, IRubyObject yaml) { Ruby runtime = context.runtime; if (yaml instanceof RubyString) { ByteList byteList = ((RubyString)yaml).getByteList(); Encoding enc = byteList.getEncoding(); // if not unicode, transcode to UTF8 if (!(enc instanceof UnicodeEncoding)) { byteList = EncodingUtils.strConvEnc(context, byteList, enc, UTF8Encoding.INSTANCE); enc = UTF8Encoding.INSTANCE; } ByteArrayInputStream bais = new ByteArrayInputStream(byteList.getUnsafeBytes(), byteList.getBegin(), byteList.getRealSize()); Charset charset = enc.getCharset(); assert charset != null : "charset for encoding " + enc + " should not be null"; InputStreamReader isr = new InputStreamReader(bais, charset); return new StreamReader(isr); } // fall back on IOInputStream, using default charset if (yaml.respondsTo("read")) { Encoding enc = (yaml instanceof RubyIO) ? ((RubyIO)yaml).getReadEncoding() : UTF8Encoding.INSTANCE; Charset charset = enc.getCharset(); return new StreamReader(new InputStreamReader(new IOInputStream(yaml), charset)); } else { throw runtime.newTypeError(yaml, runtime.getIO()); } } @JRubyMethod public IRubyObject parse(ThreadContext context, IRubyObject yaml, IRubyObject path) { Ruby runtime = context.runtime; boolean tainted = yaml.isTaint() || yaml instanceof RubyIO; try { parser = new ParserImpl(readerFor(context, yaml)); if (path.isNil() && yaml.respondsTo("path")) { path = yaml.callMethod(context, "path"); } IRubyObject handler = getInstanceVariable("@handler"); while (true) { event = parser.getEvent(); // FIXME: Event should expose a getID, so it can be switched if (event.is(ID.StreamStart)) { invoke(context, handler, "start_stream", runtime.newFixnum(YAML_ANY_ENCODING.ordinal())); } else if (event.is(ID.DocumentStart)) { handleDocumentStart(context, (DocumentStartEvent) event, tainted, handler); } else if (event.is(ID.DocumentEnd)) { IRubyObject notExplicit = runtime.newBoolean(!((DocumentEndEvent) event).getExplicit()); invoke(context, handler, "end_document", notExplicit); } else if (event.is(ID.Alias)) { IRubyObject alias = stringOrNilFor(runtime, ((AliasEvent)event).getAnchor(), tainted); invoke(context, handler, "alias", alias); } else if (event.is(ID.Scalar)) { handleScalar(context, (ScalarEvent) event, tainted, handler); } else if (event.is(ID.SequenceStart)) { handleSequenceStart(context,(SequenceStartEvent) event, tainted, handler); } else if (event.is(ID.SequenceEnd)) { invoke(context, handler, "end_sequence"); } else if (event.is(ID.MappingStart)) { handleMappingStart(context, (MappingStartEvent) event, tainted, handler); } else if (event.is(ID.MappingEnd)) { invoke(context, handler, "end_mapping"); } else if (event.is(ID.StreamEnd)) { invoke(context, handler, "end_stream"); break; } } } catch (ParserException pe) { parser = null; raiseParserException(context, yaml, pe, path); } catch (ScannerException se) { parser = null; StringBuilder message = new StringBuilder("syntax error"); if (se.getProblemMark() != null) { message.append(se.getProblemMark().toString()); } raiseParserException(context, yaml, se, path); } catch (ReaderException re) { parser = null; raiseParserException(context, yaml, re, path); } catch (Throwable t) { Helpers.throwException(t); return this; } return this; } private void handleDocumentStart(ThreadContext context, DocumentStartEvent dse, boolean tainted, IRubyObject handler) { Ruby runtime = context.runtime; DumperOptions.Version _version = dse.getVersion(); IRubyObject version = _version == null ? RubyArray.newArray(runtime) : RubyArray.newArray(runtime, runtime.newFixnum(_version.major()), runtime.newFixnum(_version.minor())); Map tagsMap = dse.getTags(); RubyArray tags = RubyArray.newArray(runtime); if (tagsMap != null && tagsMap.size() > 0) { for (Map.Entry tag : tagsMap.entrySet()) { IRubyObject key = stringFor(runtime, tag.getKey(), tainted); IRubyObject value = stringFor(runtime, tag.getValue(), tainted); tags.append(RubyArray.newArray(runtime, key, value)); } } IRubyObject notExplicit = runtime.newBoolean(!dse.getExplicit()); invoke(context, handler, "start_document", version, tags, notExplicit); } private void handleMappingStart(ThreadContext context, MappingStartEvent mse, boolean tainted, IRubyObject handler) { Ruby runtime = context.runtime; IRubyObject anchor = stringOrNilFor(runtime, mse.getAnchor(), tainted); IRubyObject tag = stringOrNilFor(runtime, mse.getTag(), tainted); IRubyObject implicit = runtime.newBoolean(mse.getImplicit()); IRubyObject style = runtime.newFixnum(translateFlowStyle(mse.getFlowStyle())); invoke(context, handler, "start_mapping", anchor, tag, implicit, style); } private void handleScalar(ThreadContext context, ScalarEvent se, boolean tainted, IRubyObject handler) { Ruby runtime = context.runtime; IRubyObject anchor = stringOrNilFor(runtime, se.getAnchor(), tainted); IRubyObject tag = stringOrNilFor(runtime, se.getTag(), tainted); IRubyObject plain_implicit = runtime.newBoolean(se.getImplicit().canOmitTagInPlainScalar()); IRubyObject quoted_implicit = runtime.newBoolean(se.getImplicit().canOmitTagInNonPlainScalar()); IRubyObject style = runtime.newFixnum(translateStyle(se.getStyle())); IRubyObject val = stringFor(runtime, se.getValue(), tainted); invoke(context, handler, "scalar", val, anchor, tag, plain_implicit, quoted_implicit, style); } private void handleSequenceStart(ThreadContext context, SequenceStartEvent sse, boolean tainted, IRubyObject handler) { Ruby runtime = context.runtime; IRubyObject anchor = stringOrNilFor(runtime, sse.getAnchor(), tainted); IRubyObject tag = stringOrNilFor(runtime, sse.getTag(), tainted); IRubyObject implicit = runtime.newBoolean(sse.getImplicit()); IRubyObject style = runtime.newFixnum(translateFlowStyle(sse.getFlowStyle())); invoke(context, handler, "start_sequence", anchor, tag, implicit, style); } private static void raiseParserException(ThreadContext context, IRubyObject yaml, ReaderException re, IRubyObject rbPath) { Ruby runtime; RubyClass se; IRubyObject exception; runtime = context.runtime; se = (RubyClass)runtime.getModule("Psych").getConstant("SyntaxError"); exception = se.newInstance(context, new IRubyObject[] { rbPath, runtime.newFixnum(0), runtime.newFixnum(0), runtime.newFixnum(re.getPosition()), (null == re.getName() ? runtime.getNil() : runtime.newString(re.getName())), (null == re.toString() ? runtime.getNil() : runtime.newString(re.toString())) }, Block.NULL_BLOCK); RubyKernel.raise(context, runtime.getKernel(), new IRubyObject[] { exception }, Block.NULL_BLOCK); } private static void raiseParserException(ThreadContext context, IRubyObject yaml, MarkedYAMLException mye, IRubyObject rbPath) { Ruby runtime; Mark mark; RubyClass se; IRubyObject exception; runtime = context.runtime; se = (RubyClass)runtime.getModule("Psych").getConstant("SyntaxError"); mark = mye.getProblemMark(); exception = se.newInstance(context, new IRubyObject[] { rbPath, runtime.newFixnum(mark.getLine() + 1), runtime.newFixnum(mark.getColumn() + 1), runtime.newFixnum(mark.getIndex()), (null == mye.getProblem() ? runtime.getNil() : runtime.newString(mye.getProblem())), (null == mye.getContext() ? runtime.getNil() : runtime.newString(mye.getContext())) }, Block.NULL_BLOCK); RubyKernel.raise(context, runtime.getKernel(), new IRubyObject[] { exception }, Block.NULL_BLOCK); } private static int translateStyle(Character style) { if (style == null) return 0; // any switch (style) { case 0: return 1; // plain case '\'': return 2; // single-quoted case '"': return 3; // double-quoted case '|': return 4; // literal case '>': return 5; // folded default: return 0; // any } } private static int translateFlowStyle(Boolean flowStyle) { if (flowStyle == null) return 0; // any if (flowStyle) return 2; return 1; } @JRubyMethod public IRubyObject mark(ThreadContext context) { Ruby runtime = context.runtime; Event event = null; if (parser != null) { event = parser.peekEvent(); if (event == null) event = this.event; } if (event == null) { return ((RubyClass)context.runtime.getClassFromPath("Psych::Parser::Mark")).newInstance( context, runtime.newFixnum(0), runtime.newFixnum(0), runtime.newFixnum(0), Block.NULL_BLOCK ); } Mark mark = event.getStartMark(); return ((RubyClass)context.runtime.getClassFromPath("Psych::Parser::Mark")).newInstance( context, runtime.newFixnum(mark.getIndex()), runtime.newFixnum(mark.getLine()), runtime.newFixnum(mark.getColumn()), Block.NULL_BLOCK ); } private Parser parser; private Event event; } psych-2.2.4/ext/java/PsychToRuby.java000066400000000000000000000072051305404671600174760ustar00rootroot00000000000000/***** BEGIN LICENSE BLOCK ***** * Version: EPL 1.0/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Eclipse Public * License Version 1.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.eclipse.org/legal/epl-v10.html * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * Copyright (C) 2010 Charles O Nutter * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the EPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the EPL, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ package org.jruby.ext.psych; import org.jruby.Ruby; import org.jruby.RubyClass; import org.jruby.RubyModule; import org.jruby.RubyObject; import org.jruby.RubyException; import org.jruby.anno.JRubyMethod; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import static org.jruby.runtime.Visibility.*; public class PsychToRuby { public static void initPsychToRuby(Ruby runtime, RubyModule psych) { RubyClass classLoader = runtime.defineClassUnder("ClassLoader", runtime.getObject(), RubyObject.OBJECT_ALLOCATOR, psych); RubyModule visitors = runtime.defineModuleUnder("Visitors", psych); RubyClass visitor = runtime.defineClassUnder("Visitor", runtime.getObject(), runtime.getObject().getAllocator(), visitors); RubyClass psychToRuby = runtime.defineClassUnder("ToRuby", visitor, RubyObject.OBJECT_ALLOCATOR, visitors); psychToRuby.defineAnnotatedMethods(ToRuby.class); classLoader.defineAnnotatedMethods(ClassLoader.class); } public static class ToRuby { @JRubyMethod(visibility = PRIVATE) public static IRubyObject build_exception(ThreadContext context, IRubyObject self, IRubyObject klass, IRubyObject message) { if (klass instanceof RubyClass) { IRubyObject exception = ((RubyClass)klass).allocate(); ((RubyException)exception).setMessage(message); return exception; } else { throw context.runtime.newTypeError(klass, context.runtime.getClassClass()); } } } public static class ClassLoader { @JRubyMethod(visibility = PRIVATE) public static IRubyObject path2class(ThreadContext context, IRubyObject self, IRubyObject path) { try { return context.runtime.getClassFromPath(path.asJavaString()); } catch (RaiseException re) { if (re.getException().getMetaClass() == context.runtime.getNameError()) { throw context.runtime.newArgumentError("undefined class/module " + path); } throw re; } } } } psych-2.2.4/ext/java/PsychYamlTree.java000066400000000000000000000050451305404671600177740ustar00rootroot00000000000000/***** BEGIN LICENSE BLOCK ***** * Version: EPL 1.0/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Eclipse Public * License Version 1.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.eclipse.org/legal/epl-v10.html * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * Copyright (C) 2010 Charles O Nutter * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the EPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the EPL, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ package org.jruby.ext.psych; import org.jruby.Ruby; import org.jruby.RubyClass; import org.jruby.RubyModule; import org.jruby.RubyObject; import org.jruby.anno.JRubyMethod; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import static org.jruby.runtime.Visibility.*; public class PsychYamlTree { public static void initPsychYamlTree(Ruby runtime, RubyModule psych) { RubyModule visitors = (RubyModule)psych.getConstant("Visitors"); RubyClass visitor = (RubyClass)visitors.getConstant("Visitor"); RubyClass psychYamlTree = runtime.defineClassUnder("YAMLTree", visitor, RubyObject.OBJECT_ALLOCATOR, visitors); psychYamlTree.defineAnnotatedMethods(PsychYamlTree.class); } @JRubyMethod(visibility = PRIVATE) public static IRubyObject private_iv_get(ThreadContext context, IRubyObject self, IRubyObject target, IRubyObject prop) { IRubyObject obj = (IRubyObject)target.getInternalVariables().getInternalVariable(prop.asJavaString()); if (obj == null) obj = context.nil; return obj; } } psych-2.2.4/ext/psych/000077500000000000000000000000001305404671600146015ustar00rootroot00000000000000psych-2.2.4/ext/psych/.gitignore000066400000000000000000000001611305404671600165670ustar00rootroot00000000000000/api.c /config.h /dumper.c /emitter.c /loader.c /parser.c /reader.c /scanner.c /writer.c /yaml.h /yaml_private.h psych-2.2.4/ext/psych/depend000066400000000000000000000001361305404671600157630ustar00rootroot00000000000000$(OBJS): $(HDRS) $(ruby_headers) \ $(hdrdir)/ruby/encoding.h \ $(hdrdir)/ruby/oniguruma.h psych-2.2.4/ext/psych/extconf.rb000066400000000000000000000015451305404671600166010ustar00rootroot00000000000000# -*- coding: us-ascii -*- # frozen_string_literal: false require 'mkmf' require 'fileutils' # :stopdoc: dir_config 'libyaml' if enable_config("bundled-libyaml", false) || !(find_header('yaml.h') && find_library('yaml', 'yaml_get_version')) # Embed libyaml since we could not find it. $VPATH << "$(srcdir)/yaml" $INCFLAGS << " -I$(srcdir)/yaml" $srcs = Dir.glob("#{$srcdir}/{,yaml/}*.c").map {|n| File.basename(n)} if have_macro("_WIN32") $CPPFLAGS << " -DYAML_DECLARE_STATIC -DHAVE_CONFIG_H" end have_header 'dlfcn.h' have_header 'inttypes.h' have_header 'memory.h' have_header 'stdint.h' have_header 'stdlib.h' have_header 'strings.h' have_header 'string.h' have_header 'sys/stat.h' have_header 'sys/types.h' have_header 'unistd.h' find_header 'yaml.h' have_header 'config.h' end create_makefile 'psych' # :startdoc: psych-2.2.4/ext/psych/psych.c000066400000000000000000000012721305404671600160750ustar00rootroot00000000000000#include /* call-seq: Psych.libyaml_version * * Returns the version of libyaml being used */ static VALUE libyaml_version(VALUE module) { int major, minor, patch; VALUE list[3]; yaml_get_version(&major, &minor, &patch); list[0] = INT2NUM((long)major); list[1] = INT2NUM((long)minor); list[2] = INT2NUM((long)patch); return rb_ary_new4((long)3, list); } VALUE mPsych; void Init_psych(void) { mPsych = rb_define_module("Psych"); rb_define_singleton_method(mPsych, "libyaml_version", libyaml_version, 0); Init_psych_parser(); Init_psych_emitter(); Init_psych_to_ruby(); Init_psych_yaml_tree(); } /* vim: set noet sws=4 sw=4: */ psych-2.2.4/ext/psych/psych.h000066400000000000000000000003561305404671600161040ustar00rootroot00000000000000#ifndef PSYCH_H #define PSYCH_H #include #include #include #include #include #include #include extern VALUE mPsych; #endif psych-2.2.4/ext/psych/psych_emitter.c000066400000000000000000000330331305404671600176260ustar00rootroot00000000000000#include #if !defined(RARRAY_CONST_PTR) #define RARRAY_CONST_PTR(s) (const VALUE *)RARRAY_PTR(s) #endif #if !defined(RARRAY_AREF) #define RARRAY_AREF(a, i) RARRAY_CONST_PTR(a)[i] #endif VALUE cPsychEmitter; static ID id_io; static ID id_write; static ID id_line_width; static ID id_indentation; static ID id_canonical; static void emit(yaml_emitter_t * emitter, yaml_event_t * event) { if(!yaml_emitter_emit(emitter, event)) rb_raise(rb_eRuntimeError, "%s", emitter->problem); } static int writer(void *ctx, unsigned char *buffer, size_t size) { VALUE self = (VALUE)ctx, io = rb_attr_get(self, id_io); VALUE str = rb_enc_str_new((const char *)buffer, (long)size, rb_utf8_encoding()); VALUE wrote = rb_funcall(io, id_write, 1, str); return (int)NUM2INT(wrote); } static void dealloc(void * ptr) { yaml_emitter_t * emitter; emitter = (yaml_emitter_t *)ptr; yaml_emitter_delete(emitter); xfree(emitter); } #if 0 static size_t memsize(const void *ptr) { const yaml_emitter_t *emitter = ptr; /* TODO: calculate emitter's size */ return 0; } #endif static const rb_data_type_t psych_emitter_type = { "Psych/emitter", {0, dealloc, 0,}, 0, 0, #ifdef RUBY_TYPED_FREE_IMMEDIATELY RUBY_TYPED_FREE_IMMEDIATELY, #endif }; static VALUE allocate(VALUE klass) { yaml_emitter_t * emitter; VALUE obj = TypedData_Make_Struct(klass, yaml_emitter_t, &psych_emitter_type, emitter); yaml_emitter_initialize(emitter); yaml_emitter_set_unicode(emitter, 1); yaml_emitter_set_indent(emitter, 2); return obj; } /* call-seq: Psych::Emitter.new(io, options = Psych::Emitter::OPTIONS) * * Create a new Psych::Emitter that writes to +io+. */ static VALUE initialize(int argc, VALUE *argv, VALUE self) { yaml_emitter_t * emitter; VALUE io, options; VALUE line_width; VALUE indent; VALUE canonical; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); if (rb_scan_args(argc, argv, "11", &io, &options) == 2) { line_width = rb_funcall(options, id_line_width, 0); indent = rb_funcall(options, id_indentation, 0); canonical = rb_funcall(options, id_canonical, 0); yaml_emitter_set_width(emitter, NUM2INT(line_width)); yaml_emitter_set_indent(emitter, NUM2INT(indent)); yaml_emitter_set_canonical(emitter, Qtrue == canonical ? 1 : 0); } rb_ivar_set(self, id_io, io); yaml_emitter_set_output(emitter, writer, (void *)self); return self; } /* call-seq: emitter.start_stream(encoding) * * Start a stream emission with +encoding+ * * See Psych::Handler#start_stream */ static VALUE start_stream(VALUE self, VALUE encoding) { yaml_emitter_t * emitter; yaml_event_t event; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); Check_Type(encoding, T_FIXNUM); yaml_stream_start_event_initialize(&event, (yaml_encoding_t)NUM2INT(encoding)); emit(emitter, &event); return self; } /* call-seq: emitter.end_stream * * End a stream emission * * See Psych::Handler#end_stream */ static VALUE end_stream(VALUE self) { yaml_emitter_t * emitter; yaml_event_t event; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_stream_end_event_initialize(&event); emit(emitter, &event); return self; } /* call-seq: emitter.start_document(version, tags, implicit) * * Start a document emission with YAML +version+, +tags+, and an +implicit+ * start. * * See Psych::Handler#start_document */ static VALUE start_document(VALUE self, VALUE version, VALUE tags, VALUE imp) { yaml_emitter_t * emitter; yaml_tag_directive_t * head = NULL; yaml_tag_directive_t * tail = NULL; yaml_event_t event; yaml_version_directive_t version_directive; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); Check_Type(version, T_ARRAY); if(RARRAY_LEN(version) > 0) { VALUE major = rb_ary_entry(version, (long)0); VALUE minor = rb_ary_entry(version, (long)1); version_directive.major = NUM2INT(major); version_directive.minor = NUM2INT(minor); } if(RTEST(tags)) { long i = 0; long len; rb_encoding * encoding = rb_utf8_encoding(); Check_Type(tags, T_ARRAY); len = RARRAY_LEN(tags); head = xcalloc((size_t)len, sizeof(yaml_tag_directive_t)); tail = head; for(i = 0; i < len && i < RARRAY_LEN(tags); i++) { VALUE tuple = RARRAY_AREF(tags, i); VALUE name; VALUE value; Check_Type(tuple, T_ARRAY); if(RARRAY_LEN(tuple) < 2) { xfree(head); rb_raise(rb_eRuntimeError, "tag tuple must be of length 2"); } name = RARRAY_AREF(tuple, 0); value = RARRAY_AREF(tuple, 1); StringValue(name); StringValue(value); name = rb_str_export_to_enc(name, encoding); value = rb_str_export_to_enc(value, encoding); tail->handle = (yaml_char_t *)RSTRING_PTR(name); tail->prefix = (yaml_char_t *)RSTRING_PTR(value); tail++; } } yaml_document_start_event_initialize( &event, (RARRAY_LEN(version) > 0) ? &version_directive : NULL, head, tail, imp ? 1 : 0 ); emit(emitter, &event); if(head) xfree(head); return self; } /* call-seq: emitter.end_document(implicit) * * End a document emission with an +implicit+ ending. * * See Psych::Handler#end_document */ static VALUE end_document(VALUE self, VALUE imp) { yaml_emitter_t * emitter; yaml_event_t event; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_document_end_event_initialize(&event, imp ? 1 : 0); emit(emitter, &event); return self; } /* call-seq: emitter.scalar(value, anchor, tag, plain, quoted, style) * * Emit a scalar with +value+, +anchor+, +tag+, and a +plain+ or +quoted+ * string type with +style+. * * See Psych::Handler#scalar */ static VALUE scalar( VALUE self, VALUE value, VALUE anchor, VALUE tag, VALUE plain, VALUE quoted, VALUE style ) { yaml_emitter_t * emitter; yaml_event_t event; rb_encoding *encoding; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); Check_Type(value, T_STRING); encoding = rb_utf8_encoding(); value = rb_str_export_to_enc(value, encoding); if(!NIL_P(anchor)) { Check_Type(anchor, T_STRING); anchor = rb_str_export_to_enc(anchor, encoding); } if(!NIL_P(tag)) { Check_Type(tag, T_STRING); tag = rb_str_export_to_enc(tag, encoding); } yaml_scalar_event_initialize( &event, (yaml_char_t *)(NIL_P(anchor) ? NULL : StringValuePtr(anchor)), (yaml_char_t *)(NIL_P(tag) ? NULL : StringValuePtr(tag)), (yaml_char_t*)StringValuePtr(value), (int)RSTRING_LEN(value), plain ? 1 : 0, quoted ? 1 : 0, (yaml_scalar_style_t)NUM2INT(style) ); emit(emitter, &event); return self; } /* call-seq: emitter.start_sequence(anchor, tag, implicit, style) * * Start emitting a sequence with +anchor+, a +tag+, +implicit+ sequence * start and end, along with +style+. * * See Psych::Handler#start_sequence */ static VALUE start_sequence( VALUE self, VALUE anchor, VALUE tag, VALUE implicit, VALUE style ) { yaml_emitter_t * emitter; yaml_event_t event; rb_encoding * encoding = rb_utf8_encoding(); if(!NIL_P(anchor)) { Check_Type(anchor, T_STRING); anchor = rb_str_export_to_enc(anchor, encoding); } if(!NIL_P(tag)) { Check_Type(tag, T_STRING); tag = rb_str_export_to_enc(tag, encoding); } TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_sequence_start_event_initialize( &event, (yaml_char_t *)(NIL_P(anchor) ? NULL : StringValuePtr(anchor)), (yaml_char_t *)(NIL_P(tag) ? NULL : StringValuePtr(tag)), implicit ? 1 : 0, (yaml_sequence_style_t)NUM2INT(style) ); emit(emitter, &event); return self; } /* call-seq: emitter.end_sequence * * End sequence emission. * * See Psych::Handler#end_sequence */ static VALUE end_sequence(VALUE self) { yaml_emitter_t * emitter; yaml_event_t event; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_sequence_end_event_initialize(&event); emit(emitter, &event); return self; } /* call-seq: emitter.start_mapping(anchor, tag, implicit, style) * * Start emitting a YAML map with +anchor+, +tag+, an +implicit+ start * and end, and +style+. * * See Psych::Handler#start_mapping */ static VALUE start_mapping( VALUE self, VALUE anchor, VALUE tag, VALUE implicit, VALUE style ) { yaml_emitter_t * emitter; yaml_event_t event; rb_encoding *encoding; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); encoding = rb_utf8_encoding(); if(!NIL_P(anchor)) { Check_Type(anchor, T_STRING); anchor = rb_str_export_to_enc(anchor, encoding); } if(!NIL_P(tag)) { Check_Type(tag, T_STRING); tag = rb_str_export_to_enc(tag, encoding); } yaml_mapping_start_event_initialize( &event, (yaml_char_t *)(NIL_P(anchor) ? NULL : StringValuePtr(anchor)), (yaml_char_t *)(NIL_P(tag) ? NULL : StringValuePtr(tag)), implicit ? 1 : 0, (yaml_mapping_style_t)NUM2INT(style) ); emit(emitter, &event); return self; } /* call-seq: emitter.end_mapping * * Emit the end of a mapping. * * See Psych::Handler#end_mapping */ static VALUE end_mapping(VALUE self) { yaml_emitter_t * emitter; yaml_event_t event; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_mapping_end_event_initialize(&event); emit(emitter, &event); return self; } /* call-seq: emitter.alias(anchor) * * Emit an alias with +anchor+. * * See Psych::Handler#alias */ static VALUE alias(VALUE self, VALUE anchor) { yaml_emitter_t * emitter; yaml_event_t event; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); if(!NIL_P(anchor)) { Check_Type(anchor, T_STRING); anchor = rb_str_export_to_enc(anchor, rb_utf8_encoding()); } yaml_alias_event_initialize( &event, (yaml_char_t *)(NIL_P(anchor) ? NULL : StringValuePtr(anchor)) ); emit(emitter, &event); return self; } /* call-seq: emitter.canonical = true * * Set the output style to canonical, or not. */ static VALUE set_canonical(VALUE self, VALUE style) { yaml_emitter_t * emitter; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_emitter_set_canonical(emitter, Qtrue == style ? 1 : 0); return style; } /* call-seq: emitter.canonical * * Get the output style, canonical or not. */ static VALUE canonical(VALUE self) { yaml_emitter_t * emitter; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); return (emitter->canonical == 0) ? Qfalse : Qtrue; } /* call-seq: emitter.indentation = level * * Set the indentation level to +level+. The level must be less than 10 and * greater than 1. */ static VALUE set_indentation(VALUE self, VALUE level) { yaml_emitter_t * emitter; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_emitter_set_indent(emitter, NUM2INT(level)); return level; } /* call-seq: emitter.indentation * * Get the indentation level. */ static VALUE indentation(VALUE self) { yaml_emitter_t * emitter; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); return INT2NUM(emitter->best_indent); } /* call-seq: emitter.line_width * * Get the preferred line width. */ static VALUE line_width(VALUE self) { yaml_emitter_t * emitter; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); return INT2NUM(emitter->best_width); } /* call-seq: emitter.line_width = width * * Set the preferred line with to +width+. */ static VALUE set_line_width(VALUE self, VALUE width) { yaml_emitter_t * emitter; TypedData_Get_Struct(self, yaml_emitter_t, &psych_emitter_type, emitter); yaml_emitter_set_width(emitter, NUM2INT(width)); return width; } void Init_psych_emitter(void) { VALUE psych = rb_define_module("Psych"); VALUE handler = rb_define_class_under(psych, "Handler", rb_cObject); cPsychEmitter = rb_define_class_under(psych, "Emitter", handler); rb_define_alloc_func(cPsychEmitter, allocate); rb_define_method(cPsychEmitter, "initialize", initialize, -1); rb_define_method(cPsychEmitter, "start_stream", start_stream, 1); rb_define_method(cPsychEmitter, "end_stream", end_stream, 0); rb_define_method(cPsychEmitter, "start_document", start_document, 3); rb_define_method(cPsychEmitter, "end_document", end_document, 1); rb_define_method(cPsychEmitter, "scalar", scalar, 6); rb_define_method(cPsychEmitter, "start_sequence", start_sequence, 4); rb_define_method(cPsychEmitter, "end_sequence", end_sequence, 0); rb_define_method(cPsychEmitter, "start_mapping", start_mapping, 4); rb_define_method(cPsychEmitter, "end_mapping", end_mapping, 0); rb_define_method(cPsychEmitter, "alias", alias, 1); rb_define_method(cPsychEmitter, "canonical", canonical, 0); rb_define_method(cPsychEmitter, "canonical=", set_canonical, 1); rb_define_method(cPsychEmitter, "indentation", indentation, 0); rb_define_method(cPsychEmitter, "indentation=", set_indentation, 1); rb_define_method(cPsychEmitter, "line_width", line_width, 0); rb_define_method(cPsychEmitter, "line_width=", set_line_width, 1); id_io = rb_intern("io"); id_write = rb_intern("write"); id_line_width = rb_intern("line_width"); id_indentation = rb_intern("indentation"); id_canonical = rb_intern("canonical"); } /* vim: set noet sws=4 sw=4: */ psych-2.2.4/ext/psych/psych_emitter.h000066400000000000000000000001541305404671600176310ustar00rootroot00000000000000#ifndef PSYCH_EMITTER_H #define PSYCH_EMITTER_H #include void Init_psych_emitter(void); #endif psych-2.2.4/ext/psych/psych_parser.c000066400000000000000000000354051305404671600174560ustar00rootroot00000000000000#include VALUE cPsychParser; VALUE ePsychSyntaxError; static ID id_read; static ID id_path; static ID id_empty; static ID id_start_stream; static ID id_end_stream; static ID id_start_document; static ID id_end_document; static ID id_alias; static ID id_scalar; static ID id_start_sequence; static ID id_end_sequence; static ID id_start_mapping; static ID id_end_mapping; #define PSYCH_TRANSCODE(_str, _yaml_enc, _internal_enc) \ do { \ rb_enc_associate_index((_str), (_yaml_enc)); \ if(_internal_enc) \ (_str) = rb_str_export_to_enc((_str), (_internal_enc)); \ } while (0) static int io_reader(void * data, unsigned char *buf, size_t size, size_t *read) { VALUE io = (VALUE)data; VALUE string = rb_funcall(io, id_read, 1, INT2NUM(size)); *read = 0; if(! NIL_P(string)) { void * str = (void *)StringValuePtr(string); *read = (size_t)RSTRING_LEN(string); memcpy(buf, str, *read); } return 1; } static void dealloc(void * ptr) { yaml_parser_t * parser; parser = (yaml_parser_t *)ptr; yaml_parser_delete(parser); xfree(parser); } #if 0 static size_t memsize(const void *ptr) { const yaml_parser_t *parser = ptr; /* TODO: calculate parser's size */ return 0; } #endif static const rb_data_type_t psych_parser_type = { "Psych/parser", {0, dealloc, 0,}, 0, 0, #ifdef RUBY_TYPED_FREE_IMMEDIATELY RUBY_TYPED_FREE_IMMEDIATELY, #endif }; static VALUE allocate(VALUE klass) { yaml_parser_t * parser; VALUE obj = TypedData_Make_Struct(klass, yaml_parser_t, &psych_parser_type, parser); yaml_parser_initialize(parser); return obj; } static VALUE make_exception(yaml_parser_t * parser, VALUE path) { size_t line, column; line = parser->context_mark.line + 1; column = parser->context_mark.column + 1; return rb_funcall(ePsychSyntaxError, rb_intern("new"), 6, path, INT2NUM(line), INT2NUM(column), INT2NUM(parser->problem_offset), parser->problem ? rb_usascii_str_new2(parser->problem) : Qnil, parser->context ? rb_usascii_str_new2(parser->context) : Qnil); } static VALUE transcode_string(VALUE src, int * parser_encoding) { int utf8 = rb_utf8_encindex(); int utf16le = rb_enc_find_index("UTF-16LE"); int utf16be = rb_enc_find_index("UTF-16BE"); int source_encoding = rb_enc_get_index(src); if (source_encoding == utf8) { *parser_encoding = YAML_UTF8_ENCODING; return src; } if (source_encoding == utf16le) { *parser_encoding = YAML_UTF16LE_ENCODING; return src; } if (source_encoding == utf16be) { *parser_encoding = YAML_UTF16BE_ENCODING; return src; } src = rb_str_export_to_enc(src, rb_utf8_encoding()); RB_GC_GUARD(src); *parser_encoding = YAML_UTF8_ENCODING; return src; } static VALUE transcode_io(VALUE src, int * parser_encoding) { VALUE io_external_encoding; int io_external_enc_index; io_external_encoding = rb_funcall(src, rb_intern("external_encoding"), 0); /* if no encoding is returned, assume ascii8bit. */ if (NIL_P(io_external_encoding)) { io_external_enc_index = rb_ascii8bit_encindex(); } else { io_external_enc_index = rb_to_encoding_index(io_external_encoding); } /* Treat US-ASCII as utf_8 */ if (io_external_enc_index == rb_usascii_encindex()) { *parser_encoding = YAML_UTF8_ENCODING; return src; } if (io_external_enc_index == rb_utf8_encindex()) { *parser_encoding = YAML_UTF8_ENCODING; return src; } if (io_external_enc_index == rb_enc_find_index("UTF-16LE")) { *parser_encoding = YAML_UTF16LE_ENCODING; return src; } if (io_external_enc_index == rb_enc_find_index("UTF-16BE")) { *parser_encoding = YAML_UTF16BE_ENCODING; return src; } /* Just guess on ASCII-8BIT */ if (io_external_enc_index == rb_ascii8bit_encindex()) { *parser_encoding = YAML_ANY_ENCODING; return src; } /* If the external encoding is something we don't know how to handle, * fall back to YAML_ANY_ENCODING. */ *parser_encoding = YAML_ANY_ENCODING; return src; } static VALUE protected_start_stream(VALUE pointer) { VALUE *args = (VALUE *)pointer; return rb_funcall(args[0], id_start_stream, 1, args[1]); } static VALUE protected_start_document(VALUE pointer) { VALUE *args = (VALUE *)pointer; return rb_funcall3(args[0], id_start_document, 3, args + 1); } static VALUE protected_end_document(VALUE pointer) { VALUE *args = (VALUE *)pointer; return rb_funcall(args[0], id_end_document, 1, args[1]); } static VALUE protected_alias(VALUE pointer) { VALUE *args = (VALUE *)pointer; return rb_funcall(args[0], id_alias, 1, args[1]); } static VALUE protected_scalar(VALUE pointer) { VALUE *args = (VALUE *)pointer; return rb_funcall3(args[0], id_scalar, 6, args + 1); } static VALUE protected_start_sequence(VALUE pointer) { VALUE *args = (VALUE *)pointer; return rb_funcall3(args[0], id_start_sequence, 4, args + 1); } static VALUE protected_end_sequence(VALUE handler) { return rb_funcall(handler, id_end_sequence, 0); } static VALUE protected_start_mapping(VALUE pointer) { VALUE *args = (VALUE *)pointer; return rb_funcall3(args[0], id_start_mapping, 4, args + 1); } static VALUE protected_end_mapping(VALUE handler) { return rb_funcall(handler, id_end_mapping, 0); } static VALUE protected_empty(VALUE handler) { return rb_funcall(handler, id_empty, 0); } static VALUE protected_end_stream(VALUE handler) { return rb_funcall(handler, id_end_stream, 0); } /* * call-seq: * parser.parse(yaml) * * Parse the YAML document contained in +yaml+. Events will be called on * the handler set on the parser instance. * * See Psych::Parser and Psych::Parser#handler */ static VALUE parse(int argc, VALUE *argv, VALUE self) { VALUE yaml, path; yaml_parser_t * parser; yaml_event_t event; int done = 0; int tainted = 0; int state = 0; int parser_encoding = YAML_ANY_ENCODING; int encoding = rb_utf8_encindex(); rb_encoding * internal_enc = rb_default_internal_encoding(); VALUE handler = rb_iv_get(self, "@handler"); if (rb_scan_args(argc, argv, "11", &yaml, &path) == 1) { if(rb_respond_to(yaml, id_path)) path = rb_funcall(yaml, id_path, 0); else path = rb_str_new2(""); } TypedData_Get_Struct(self, yaml_parser_t, &psych_parser_type, parser); yaml_parser_delete(parser); yaml_parser_initialize(parser); if (OBJ_TAINTED(yaml)) tainted = 1; if (rb_respond_to(yaml, id_read)) { yaml = transcode_io(yaml, &parser_encoding); yaml_parser_set_encoding(parser, parser_encoding); yaml_parser_set_input(parser, io_reader, (void *)yaml); if (RTEST(rb_obj_is_kind_of(yaml, rb_cIO))) tainted = 1; } else { StringValue(yaml); yaml = transcode_string(yaml, &parser_encoding); yaml_parser_set_encoding(parser, parser_encoding); yaml_parser_set_input_string( parser, (const unsigned char *)RSTRING_PTR(yaml), (size_t)RSTRING_LEN(yaml) ); } while(!done) { if(!yaml_parser_parse(parser, &event)) { VALUE exception; exception = make_exception(parser, path); yaml_parser_delete(parser); yaml_parser_initialize(parser); rb_exc_raise(exception); } switch(event.type) { case YAML_STREAM_START_EVENT: { VALUE args[2]; args[0] = handler; args[1] = INT2NUM((long)event.data.stream_start.encoding); rb_protect(protected_start_stream, (VALUE)args, &state); } break; case YAML_DOCUMENT_START_EVENT: { VALUE args[4]; /* Get a list of tag directives (if any) */ VALUE tag_directives = rb_ary_new(); /* Grab the document version */ VALUE version = event.data.document_start.version_directive ? rb_ary_new3( (long)2, INT2NUM((long)event.data.document_start.version_directive->major), INT2NUM((long)event.data.document_start.version_directive->minor) ) : rb_ary_new(); if(event.data.document_start.tag_directives.start) { yaml_tag_directive_t *start = event.data.document_start.tag_directives.start; yaml_tag_directive_t *end = event.data.document_start.tag_directives.end; for(; start != end; start++) { VALUE handle = Qnil; VALUE prefix = Qnil; if(start->handle) { handle = rb_str_new2((const char *)start->handle); if (tainted) OBJ_TAINT(handle); PSYCH_TRANSCODE(handle, encoding, internal_enc); } if(start->prefix) { prefix = rb_str_new2((const char *)start->prefix); if (tainted) OBJ_TAINT(prefix); PSYCH_TRANSCODE(prefix, encoding, internal_enc); } rb_ary_push(tag_directives, rb_ary_new3((long)2, handle, prefix)); } } args[0] = handler; args[1] = version; args[2] = tag_directives; args[3] = event.data.document_start.implicit == 1 ? Qtrue : Qfalse; rb_protect(protected_start_document, (VALUE)args, &state); } break; case YAML_DOCUMENT_END_EVENT: { VALUE args[2]; args[0] = handler; args[1] = event.data.document_end.implicit == 1 ? Qtrue : Qfalse; rb_protect(protected_end_document, (VALUE)args, &state); } break; case YAML_ALIAS_EVENT: { VALUE args[2]; VALUE alias = Qnil; if(event.data.alias.anchor) { alias = rb_str_new2((const char *)event.data.alias.anchor); if (tainted) OBJ_TAINT(alias); PSYCH_TRANSCODE(alias, encoding, internal_enc); } args[0] = handler; args[1] = alias; rb_protect(protected_alias, (VALUE)args, &state); } break; case YAML_SCALAR_EVENT: { VALUE args[7]; VALUE anchor = Qnil; VALUE tag = Qnil; VALUE plain_implicit, quoted_implicit, style; VALUE val = rb_str_new( (const char *)event.data.scalar.value, (long)event.data.scalar.length ); if (tainted) OBJ_TAINT(val); PSYCH_TRANSCODE(val, encoding, internal_enc); if(event.data.scalar.anchor) { anchor = rb_str_new2((const char *)event.data.scalar.anchor); if (tainted) OBJ_TAINT(anchor); PSYCH_TRANSCODE(anchor, encoding, internal_enc); } if(event.data.scalar.tag) { tag = rb_str_new2((const char *)event.data.scalar.tag); if (tainted) OBJ_TAINT(tag); PSYCH_TRANSCODE(tag, encoding, internal_enc); } plain_implicit = event.data.scalar.plain_implicit == 0 ? Qfalse : Qtrue; quoted_implicit = event.data.scalar.quoted_implicit == 0 ? Qfalse : Qtrue; style = INT2NUM((long)event.data.scalar.style); args[0] = handler; args[1] = val; args[2] = anchor; args[3] = tag; args[4] = plain_implicit; args[5] = quoted_implicit; args[6] = style; rb_protect(protected_scalar, (VALUE)args, &state); } break; case YAML_SEQUENCE_START_EVENT: { VALUE args[5]; VALUE anchor = Qnil; VALUE tag = Qnil; VALUE implicit, style; if(event.data.sequence_start.anchor) { anchor = rb_str_new2((const char *)event.data.sequence_start.anchor); if (tainted) OBJ_TAINT(anchor); PSYCH_TRANSCODE(anchor, encoding, internal_enc); } tag = Qnil; if(event.data.sequence_start.tag) { tag = rb_str_new2((const char *)event.data.sequence_start.tag); if (tainted) OBJ_TAINT(tag); PSYCH_TRANSCODE(tag, encoding, internal_enc); } implicit = event.data.sequence_start.implicit == 0 ? Qfalse : Qtrue; style = INT2NUM((long)event.data.sequence_start.style); args[0] = handler; args[1] = anchor; args[2] = tag; args[3] = implicit; args[4] = style; rb_protect(protected_start_sequence, (VALUE)args, &state); } break; case YAML_SEQUENCE_END_EVENT: rb_protect(protected_end_sequence, handler, &state); break; case YAML_MAPPING_START_EVENT: { VALUE args[5]; VALUE anchor = Qnil; VALUE tag = Qnil; VALUE implicit, style; if(event.data.mapping_start.anchor) { anchor = rb_str_new2((const char *)event.data.mapping_start.anchor); if (tainted) OBJ_TAINT(anchor); PSYCH_TRANSCODE(anchor, encoding, internal_enc); } if(event.data.mapping_start.tag) { tag = rb_str_new2((const char *)event.data.mapping_start.tag); if (tainted) OBJ_TAINT(tag); PSYCH_TRANSCODE(tag, encoding, internal_enc); } implicit = event.data.mapping_start.implicit == 0 ? Qfalse : Qtrue; style = INT2NUM((long)event.data.mapping_start.style); args[0] = handler; args[1] = anchor; args[2] = tag; args[3] = implicit; args[4] = style; rb_protect(protected_start_mapping, (VALUE)args, &state); } break; case YAML_MAPPING_END_EVENT: rb_protect(protected_end_mapping, handler, &state); break; case YAML_NO_EVENT: rb_protect(protected_empty, handler, &state); break; case YAML_STREAM_END_EVENT: rb_protect(protected_end_stream, handler, &state); done = 1; break; } yaml_event_delete(&event); if (state) rb_jump_tag(state); } return self; } /* * call-seq: * parser.mark # => # * * Returns a Psych::Parser::Mark object that contains line, column, and index * information. */ static VALUE mark(VALUE self) { VALUE mark_klass; VALUE args[3]; yaml_parser_t * parser; TypedData_Get_Struct(self, yaml_parser_t, &psych_parser_type, parser); mark_klass = rb_const_get_at(cPsychParser, rb_intern("Mark")); args[0] = INT2NUM(parser->mark.index); args[1] = INT2NUM(parser->mark.line); args[2] = INT2NUM(parser->mark.column); return rb_class_new_instance(3, args, mark_klass); } void Init_psych_parser(void) { #if 0 mPsych = rb_define_module("Psych"); #endif cPsychParser = rb_define_class_under(mPsych, "Parser", rb_cObject); rb_define_alloc_func(cPsychParser, allocate); /* Any encoding: Let the parser choose the encoding */ rb_define_const(cPsychParser, "ANY", INT2NUM(YAML_ANY_ENCODING)); /* UTF-8 Encoding */ rb_define_const(cPsychParser, "UTF8", INT2NUM(YAML_UTF8_ENCODING)); /* UTF-16-LE Encoding with BOM */ rb_define_const(cPsychParser, "UTF16LE", INT2NUM(YAML_UTF16LE_ENCODING)); /* UTF-16-BE Encoding with BOM */ rb_define_const(cPsychParser, "UTF16BE", INT2NUM(YAML_UTF16BE_ENCODING)); rb_require("psych/syntax_error"); ePsychSyntaxError = rb_const_get(mPsych, rb_intern("SyntaxError")); rb_define_method(cPsychParser, "parse", parse, -1); rb_define_method(cPsychParser, "mark", mark, 0); id_read = rb_intern("read"); id_path = rb_intern("path"); id_empty = rb_intern("empty"); id_start_stream = rb_intern("start_stream"); id_end_stream = rb_intern("end_stream"); id_start_document = rb_intern("start_document"); id_end_document = rb_intern("end_document"); id_alias = rb_intern("alias"); id_scalar = rb_intern("scalar"); id_start_sequence = rb_intern("start_sequence"); id_end_sequence = rb_intern("end_sequence"); id_start_mapping = rb_intern("start_mapping"); id_end_mapping = rb_intern("end_mapping"); } /* vim: set noet sws=4 sw=4: */ psych-2.2.4/ext/psych/psych_parser.h000066400000000000000000000001251305404671600174520ustar00rootroot00000000000000#ifndef PSYCH_PARSER_H #define PSYCH_PARSER_H void Init_psych_parser(void); #endif psych-2.2.4/ext/psych/psych_to_ruby.c000066400000000000000000000020521305404671600176350ustar00rootroot00000000000000#include VALUE cPsychVisitorsToRuby; /* call-seq: vis.build_exception(klass, message) * * Create an exception with class +klass+ and +message+ */ static VALUE build_exception(VALUE self, VALUE klass, VALUE mesg) { VALUE e = rb_obj_alloc(klass); rb_iv_set(e, "mesg", mesg); return e; } /* call-seq: vis.path2class(path) * * Convert +path+ string to a class */ static VALUE path2class(VALUE self, VALUE path) { return rb_path_to_class(path); } void Init_psych_to_ruby(void) { VALUE psych = rb_define_module("Psych"); VALUE class_loader = rb_define_class_under(psych, "ClassLoader", rb_cObject); VALUE visitors = rb_define_module_under(psych, "Visitors"); VALUE visitor = rb_define_class_under(visitors, "Visitor", rb_cObject); cPsychVisitorsToRuby = rb_define_class_under(visitors, "ToRuby", visitor); rb_define_private_method(cPsychVisitorsToRuby, "build_exception", build_exception, 2); rb_define_private_method(class_loader, "path2class", path2class, 1); } /* vim: set noet sws=4 sw=4: */ psych-2.2.4/ext/psych/psych_to_ruby.h000066400000000000000000000001541305404671600176430ustar00rootroot00000000000000#ifndef PSYCH_TO_RUBY_H #define PSYCH_TO_RUBY_H #include void Init_psych_to_ruby(void); #endif psych-2.2.4/ext/psych/psych_yaml_tree.c000066400000000000000000000013341305404671600201350ustar00rootroot00000000000000#include VALUE cPsychVisitorsYamlTree; /* * call-seq: private_iv_get(target, prop) * * Get the private instance variable +prop+ from +target+ */ static VALUE private_iv_get(VALUE self, VALUE target, VALUE prop) { return rb_attr_get(target, rb_intern(StringValuePtr(prop))); } void Init_psych_yaml_tree(void) { VALUE psych = rb_define_module("Psych"); VALUE visitors = rb_define_module_under(psych, "Visitors"); VALUE visitor = rb_define_class_under(visitors, "Visitor", rb_cObject); cPsychVisitorsYamlTree = rb_define_class_under(visitors, "YAMLTree", visitor); rb_define_private_method(cPsychVisitorsYamlTree, "private_iv_get", private_iv_get, 2); } /* vim: set noet sws=4 sw=4: */ psych-2.2.4/ext/psych/psych_yaml_tree.h000066400000000000000000000001621305404671600201400ustar00rootroot00000000000000#ifndef PSYCH_YAML_TREE_H #define PSYCH_YAML_TREE_H #include void Init_psych_yaml_tree(void); #endif psych-2.2.4/ext/psych/yaml/000077500000000000000000000000001305404671600155435ustar00rootroot00000000000000psych-2.2.4/ext/psych/yaml/LICENSE000066400000000000000000000020421305404671600165460ustar00rootroot00000000000000Copyright (c) 2006 Kirill Simonov Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. psych-2.2.4/ext/psych/yaml/api.c000066400000000000000000001071411305404671600164640ustar00rootroot00000000000000 #include "yaml_private.h" /* * Get the library version. */ YAML_DECLARE(const char *) yaml_get_version_string(void) { return YAML_VERSION_STRING; } /* * Get the library version numbers. */ YAML_DECLARE(void) yaml_get_version(int *major, int *minor, int *patch) { *major = YAML_VERSION_MAJOR; *minor = YAML_VERSION_MINOR; *patch = YAML_VERSION_PATCH; } /* * Allocate a dynamic memory block. */ YAML_DECLARE(void *) yaml_malloc(size_t size) { return malloc(size ? size : 1); } /* * Reallocate a dynamic memory block. */ YAML_DECLARE(void *) yaml_realloc(void *ptr, size_t size) { return ptr ? realloc(ptr, size ? size : 1) : malloc(size ? size : 1); } /* * Free a dynamic memory block. */ YAML_DECLARE(void) yaml_free(void *ptr) { if (ptr) free(ptr); } /* * Duplicate a string. */ YAML_DECLARE(yaml_char_t *) yaml_strdup(const yaml_char_t *str) { if (!str) return NULL; return (yaml_char_t *)strdup((char *)str); } /* * Extend a string. */ YAML_DECLARE(int) yaml_string_extend(yaml_char_t **start, yaml_char_t **pointer, yaml_char_t **end) { yaml_char_t *new_start = yaml_realloc(*start, (*end - *start)*2); if (!new_start) return 0; memset(new_start + (*end - *start), 0, *end - *start); *pointer = new_start + (*pointer - *start); *end = new_start + (*end - *start)*2; *start = new_start; return 1; } /* * Append a string B to a string A. */ YAML_DECLARE(int) yaml_string_join( yaml_char_t **a_start, yaml_char_t **a_pointer, yaml_char_t **a_end, yaml_char_t **b_start, yaml_char_t **b_pointer, yaml_char_t **b_end) { if (*b_start == *b_pointer) return 1; while (*a_end - *a_pointer <= *b_pointer - *b_start) { if (!yaml_string_extend(a_start, a_pointer, a_end)) return 0; } memcpy(*a_pointer, *b_start, *b_pointer - *b_start); *a_pointer += *b_pointer - *b_start; return 1; } /* * Extend a stack. */ YAML_DECLARE(int) yaml_stack_extend(void **start, void **top, void **end) { void *new_start = yaml_realloc(*start, ((char *)*end - (char *)*start)*2); if (!new_start) return 0; *top = (char *)new_start + ((char *)*top - (char *)*start); *end = (char *)new_start + ((char *)*end - (char *)*start)*2; *start = new_start; return 1; } /* * Extend or move a queue. */ YAML_DECLARE(int) yaml_queue_extend(void **start, void **head, void **tail, void **end) { /* Check if we need to resize the queue. */ if (*start == *head && *tail == *end) { void *new_start = yaml_realloc(*start, ((char *)*end - (char *)*start)*2); if (!new_start) return 0; *head = (char *)new_start + ((char *)*head - (char *)*start); *tail = (char *)new_start + ((char *)*tail - (char *)*start); *end = (char *)new_start + ((char *)*end - (char *)*start)*2; *start = new_start; } /* Check if we need to move the queue at the beginning of the buffer. */ if (*tail == *end) { if (*head != *tail) { memmove(*start, *head, (char *)*tail - (char *)*head); } *tail = (char *)*tail - (char *)*head + (char *)*start; *head = *start; } return 1; } /* * Create a new parser object. */ YAML_DECLARE(int) yaml_parser_initialize(yaml_parser_t *parser) { assert(parser); /* Non-NULL parser object expected. */ memset(parser, 0, sizeof(yaml_parser_t)); if (!BUFFER_INIT(parser, parser->raw_buffer, INPUT_RAW_BUFFER_SIZE)) goto error; if (!BUFFER_INIT(parser, parser->buffer, INPUT_BUFFER_SIZE)) goto error; if (!QUEUE_INIT(parser, parser->tokens, INITIAL_QUEUE_SIZE)) goto error; if (!STACK_INIT(parser, parser->indents, INITIAL_STACK_SIZE)) goto error; if (!STACK_INIT(parser, parser->simple_keys, INITIAL_STACK_SIZE)) goto error; if (!STACK_INIT(parser, parser->states, INITIAL_STACK_SIZE)) goto error; if (!STACK_INIT(parser, parser->marks, INITIAL_STACK_SIZE)) goto error; if (!STACK_INIT(parser, parser->tag_directives, INITIAL_STACK_SIZE)) goto error; return 1; error: BUFFER_DEL(parser, parser->raw_buffer); BUFFER_DEL(parser, parser->buffer); QUEUE_DEL(parser, parser->tokens); STACK_DEL(parser, parser->indents); STACK_DEL(parser, parser->simple_keys); STACK_DEL(parser, parser->states); STACK_DEL(parser, parser->marks); STACK_DEL(parser, parser->tag_directives); return 0; } /* * Destroy a parser object. */ YAML_DECLARE(void) yaml_parser_delete(yaml_parser_t *parser) { assert(parser); /* Non-NULL parser object expected. */ BUFFER_DEL(parser, parser->raw_buffer); BUFFER_DEL(parser, parser->buffer); while (!QUEUE_EMPTY(parser, parser->tokens)) { yaml_token_delete(&DEQUEUE(parser, parser->tokens)); } QUEUE_DEL(parser, parser->tokens); STACK_DEL(parser, parser->indents); STACK_DEL(parser, parser->simple_keys); STACK_DEL(parser, parser->states); STACK_DEL(parser, parser->marks); while (!STACK_EMPTY(parser, parser->tag_directives)) { yaml_tag_directive_t tag_directive = POP(parser, parser->tag_directives); yaml_free(tag_directive.handle); yaml_free(tag_directive.prefix); } STACK_DEL(parser, parser->tag_directives); memset(parser, 0, sizeof(yaml_parser_t)); } /* * String read handler. */ static int yaml_string_read_handler(void *data, unsigned char *buffer, size_t size, size_t *size_read) { yaml_parser_t *parser = data; if (parser->input.string.current == parser->input.string.end) { *size_read = 0; return 1; } if (size > (size_t)(parser->input.string.end - parser->input.string.current)) { size = parser->input.string.end - parser->input.string.current; } memcpy(buffer, parser->input.string.current, size); parser->input.string.current += size; *size_read = size; return 1; } /* * File read handler. */ static int yaml_file_read_handler(void *data, unsigned char *buffer, size_t size, size_t *size_read) { yaml_parser_t *parser = data; *size_read = fread(buffer, 1, size, parser->input.file); return !ferror(parser->input.file); } /* * Set a string input. */ YAML_DECLARE(void) yaml_parser_set_input_string(yaml_parser_t *parser, const unsigned char *input, size_t size) { assert(parser); /* Non-NULL parser object expected. */ assert(!parser->read_handler); /* You can set the source only once. */ assert(input); /* Non-NULL input string expected. */ parser->read_handler = yaml_string_read_handler; parser->read_handler_data = parser; parser->input.string.start = input; parser->input.string.current = input; parser->input.string.end = input+size; } /* * Set a file input. */ YAML_DECLARE(void) yaml_parser_set_input_file(yaml_parser_t *parser, FILE *file) { assert(parser); /* Non-NULL parser object expected. */ assert(!parser->read_handler); /* You can set the source only once. */ assert(file); /* Non-NULL file object expected. */ parser->read_handler = yaml_file_read_handler; parser->read_handler_data = parser; parser->input.file = file; } /* * Set a generic input. */ YAML_DECLARE(void) yaml_parser_set_input(yaml_parser_t *parser, yaml_read_handler_t *handler, void *data) { assert(parser); /* Non-NULL parser object expected. */ assert(!parser->read_handler); /* You can set the source only once. */ assert(handler); /* Non-NULL read handler expected. */ parser->read_handler = handler; parser->read_handler_data = data; } /* * Set the source encoding. */ YAML_DECLARE(void) yaml_parser_set_encoding(yaml_parser_t *parser, yaml_encoding_t encoding) { assert(parser); /* Non-NULL parser object expected. */ assert(!parser->encoding); /* Encoding is already set or detected. */ parser->encoding = encoding; } /* * Create a new emitter object. */ YAML_DECLARE(int) yaml_emitter_initialize(yaml_emitter_t *emitter) { assert(emitter); /* Non-NULL emitter object expected. */ memset(emitter, 0, sizeof(yaml_emitter_t)); if (!BUFFER_INIT(emitter, emitter->buffer, OUTPUT_BUFFER_SIZE)) goto error; if (!BUFFER_INIT(emitter, emitter->raw_buffer, OUTPUT_RAW_BUFFER_SIZE)) goto error; if (!STACK_INIT(emitter, emitter->states, INITIAL_STACK_SIZE)) goto error; if (!QUEUE_INIT(emitter, emitter->events, INITIAL_QUEUE_SIZE)) goto error; if (!STACK_INIT(emitter, emitter->indents, INITIAL_STACK_SIZE)) goto error; if (!STACK_INIT(emitter, emitter->tag_directives, INITIAL_STACK_SIZE)) goto error; return 1; error: BUFFER_DEL(emitter, emitter->buffer); BUFFER_DEL(emitter, emitter->raw_buffer); STACK_DEL(emitter, emitter->states); QUEUE_DEL(emitter, emitter->events); STACK_DEL(emitter, emitter->indents); STACK_DEL(emitter, emitter->tag_directives); return 0; } /* * Destroy an emitter object. */ YAML_DECLARE(void) yaml_emitter_delete(yaml_emitter_t *emitter) { assert(emitter); /* Non-NULL emitter object expected. */ BUFFER_DEL(emitter, emitter->buffer); BUFFER_DEL(emitter, emitter->raw_buffer); STACK_DEL(emitter, emitter->states); while (!QUEUE_EMPTY(emitter, emitter->events)) { yaml_event_delete(&DEQUEUE(emitter, emitter->events)); } QUEUE_DEL(emitter, emitter->events); STACK_DEL(emitter, emitter->indents); while (!STACK_EMPTY(empty, emitter->tag_directives)) { yaml_tag_directive_t tag_directive = POP(emitter, emitter->tag_directives); yaml_free(tag_directive.handle); yaml_free(tag_directive.prefix); } STACK_DEL(emitter, emitter->tag_directives); yaml_free(emitter->anchors); memset(emitter, 0, sizeof(yaml_emitter_t)); } /* * String write handler. */ static int yaml_string_write_handler(void *data, unsigned char *buffer, size_t size) { yaml_emitter_t *emitter = data; if (emitter->output.string.size - *emitter->output.string.size_written < size) { memcpy(emitter->output.string.buffer + *emitter->output.string.size_written, buffer, emitter->output.string.size - *emitter->output.string.size_written); *emitter->output.string.size_written = emitter->output.string.size; return 0; } memcpy(emitter->output.string.buffer + *emitter->output.string.size_written, buffer, size); *emitter->output.string.size_written += size; return 1; } /* * File write handler. */ static int yaml_file_write_handler(void *data, unsigned char *buffer, size_t size) { yaml_emitter_t *emitter = data; return (fwrite(buffer, 1, size, emitter->output.file) == size); } /* * Set a string output. */ YAML_DECLARE(void) yaml_emitter_set_output_string(yaml_emitter_t *emitter, unsigned char *output, size_t size, size_t *size_written) { assert(emitter); /* Non-NULL emitter object expected. */ assert(!emitter->write_handler); /* You can set the output only once. */ assert(output); /* Non-NULL output string expected. */ emitter->write_handler = yaml_string_write_handler; emitter->write_handler_data = emitter; emitter->output.string.buffer = output; emitter->output.string.size = size; emitter->output.string.size_written = size_written; *size_written = 0; } /* * Set a file output. */ YAML_DECLARE(void) yaml_emitter_set_output_file(yaml_emitter_t *emitter, FILE *file) { assert(emitter); /* Non-NULL emitter object expected. */ assert(!emitter->write_handler); /* You can set the output only once. */ assert(file); /* Non-NULL file object expected. */ emitter->write_handler = yaml_file_write_handler; emitter->write_handler_data = emitter; emitter->output.file = file; } /* * Set a generic output handler. */ YAML_DECLARE(void) yaml_emitter_set_output(yaml_emitter_t *emitter, yaml_write_handler_t *handler, void *data) { assert(emitter); /* Non-NULL emitter object expected. */ assert(!emitter->write_handler); /* You can set the output only once. */ assert(handler); /* Non-NULL handler object expected. */ emitter->write_handler = handler; emitter->write_handler_data = data; } /* * Set the output encoding. */ YAML_DECLARE(void) yaml_emitter_set_encoding(yaml_emitter_t *emitter, yaml_encoding_t encoding) { assert(emitter); /* Non-NULL emitter object expected. */ assert(!emitter->encoding); /* You can set encoding only once. */ emitter->encoding = encoding; } /* * Set the canonical output style. */ YAML_DECLARE(void) yaml_emitter_set_canonical(yaml_emitter_t *emitter, int canonical) { assert(emitter); /* Non-NULL emitter object expected. */ emitter->canonical = (canonical != 0); } /* * Set the indentation increment. */ YAML_DECLARE(void) yaml_emitter_set_indent(yaml_emitter_t *emitter, int indent) { assert(emitter); /* Non-NULL emitter object expected. */ emitter->best_indent = (1 < indent && indent < 10) ? indent : 2; } /* * Set the preferred line width. */ YAML_DECLARE(void) yaml_emitter_set_width(yaml_emitter_t *emitter, int width) { assert(emitter); /* Non-NULL emitter object expected. */ emitter->best_width = (width >= 0) ? width : -1; } /* * Set if unescaped non-ASCII characters are allowed. */ YAML_DECLARE(void) yaml_emitter_set_unicode(yaml_emitter_t *emitter, int unicode) { assert(emitter); /* Non-NULL emitter object expected. */ emitter->unicode = (unicode != 0); } /* * Set the preferred line break character. */ YAML_DECLARE(void) yaml_emitter_set_break(yaml_emitter_t *emitter, yaml_break_t line_break) { assert(emitter); /* Non-NULL emitter object expected. */ emitter->line_break = line_break; } /* * Destroy a token object. */ YAML_DECLARE(void) yaml_token_delete(yaml_token_t *token) { assert(token); /* Non-NULL token object expected. */ switch (token->type) { case YAML_TAG_DIRECTIVE_TOKEN: yaml_free(token->data.tag_directive.handle); yaml_free(token->data.tag_directive.prefix); break; case YAML_ALIAS_TOKEN: yaml_free(token->data.alias.value); break; case YAML_ANCHOR_TOKEN: yaml_free(token->data.anchor.value); break; case YAML_TAG_TOKEN: yaml_free(token->data.tag.handle); yaml_free(token->data.tag.suffix); break; case YAML_SCALAR_TOKEN: yaml_free(token->data.scalar.value); break; default: break; } memset(token, 0, sizeof(yaml_token_t)); } /* * Check if a string is a valid UTF-8 sequence. * * Check 'reader.c' for more details on UTF-8 encoding. */ static int yaml_check_utf8(yaml_char_t *start, size_t length) { yaml_char_t *end = start+length; yaml_char_t *pointer = start; while (pointer < end) { unsigned char octet; unsigned int width; unsigned int value; size_t k; octet = pointer[0]; width = (octet & 0x80) == 0x00 ? 1 : (octet & 0xE0) == 0xC0 ? 2 : (octet & 0xF0) == 0xE0 ? 3 : (octet & 0xF8) == 0xF0 ? 4 : 0; value = (octet & 0x80) == 0x00 ? octet & 0x7F : (octet & 0xE0) == 0xC0 ? octet & 0x1F : (octet & 0xF0) == 0xE0 ? octet & 0x0F : (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; if (!width) return 0; if (pointer+width > end) return 0; for (k = 1; k < width; k ++) { octet = pointer[k]; if ((octet & 0xC0) != 0x80) return 0; value = (value << 6) + (octet & 0x3F); } if (!((width == 1) || (width == 2 && value >= 0x80) || (width == 3 && value >= 0x800) || (width == 4 && value >= 0x10000))) return 0; pointer += width; } return 1; } /* * Create STREAM-START. */ YAML_DECLARE(int) yaml_stream_start_event_initialize(yaml_event_t *event, yaml_encoding_t encoding) { yaml_mark_t mark = { 0, 0, 0 }; assert(event); /* Non-NULL event object is expected. */ STREAM_START_EVENT_INIT(*event, encoding, mark, mark); return 1; } /* * Create STREAM-END. */ YAML_DECLARE(int) yaml_stream_end_event_initialize(yaml_event_t *event) { yaml_mark_t mark = { 0, 0, 0 }; assert(event); /* Non-NULL event object is expected. */ STREAM_END_EVENT_INIT(*event, mark, mark); return 1; } /* * Create DOCUMENT-START. */ YAML_DECLARE(int) yaml_document_start_event_initialize(yaml_event_t *event, yaml_version_directive_t *version_directive, yaml_tag_directive_t *tag_directives_start, yaml_tag_directive_t *tag_directives_end, int implicit) { struct { yaml_error_type_t error; } context; yaml_mark_t mark = { 0, 0, 0 }; yaml_version_directive_t *version_directive_copy = NULL; struct { yaml_tag_directive_t *start; yaml_tag_directive_t *end; yaml_tag_directive_t *top; } tag_directives_copy = { NULL, NULL, NULL }; yaml_tag_directive_t value = { NULL, NULL }; assert(event); /* Non-NULL event object is expected. */ assert((tag_directives_start && tag_directives_end) || (tag_directives_start == tag_directives_end)); /* Valid tag directives are expected. */ if (version_directive) { version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)); if (!version_directive_copy) goto error; version_directive_copy->major = version_directive->major; version_directive_copy->minor = version_directive->minor; } if (tag_directives_start != tag_directives_end) { yaml_tag_directive_t *tag_directive; if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) goto error; for (tag_directive = tag_directives_start; tag_directive != tag_directives_end; tag_directive ++) { assert(tag_directive->handle); assert(tag_directive->prefix); if (!yaml_check_utf8(tag_directive->handle, strlen((char *)tag_directive->handle))) goto error; if (!yaml_check_utf8(tag_directive->prefix, strlen((char *)tag_directive->prefix))) goto error; value.handle = yaml_strdup(tag_directive->handle); value.prefix = yaml_strdup(tag_directive->prefix); if (!value.handle || !value.prefix) goto error; if (!PUSH(&context, tag_directives_copy, value)) goto error; value.handle = NULL; value.prefix = NULL; } } DOCUMENT_START_EVENT_INIT(*event, version_directive_copy, tag_directives_copy.start, tag_directives_copy.top, implicit, mark, mark); return 1; error: yaml_free(version_directive_copy); while (!STACK_EMPTY(context, tag_directives_copy)) { yaml_tag_directive_t value = POP(context, tag_directives_copy); yaml_free(value.handle); yaml_free(value.prefix); } STACK_DEL(context, tag_directives_copy); yaml_free(value.handle); yaml_free(value.prefix); return 0; } /* * Create DOCUMENT-END. */ YAML_DECLARE(int) yaml_document_end_event_initialize(yaml_event_t *event, int implicit) { yaml_mark_t mark = { 0, 0, 0 }; assert(event); /* Non-NULL emitter object is expected. */ DOCUMENT_END_EVENT_INIT(*event, implicit, mark, mark); return 1; } /* * Create ALIAS. */ YAML_DECLARE(int) yaml_alias_event_initialize(yaml_event_t *event, yaml_char_t *anchor) { yaml_mark_t mark = { 0, 0, 0 }; yaml_char_t *anchor_copy = NULL; assert(event); /* Non-NULL event object is expected. */ assert(anchor); /* Non-NULL anchor is expected. */ if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0; anchor_copy = yaml_strdup(anchor); if (!anchor_copy) return 0; ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark); return 1; } /* * Create SCALAR. */ YAML_DECLARE(int) yaml_scalar_event_initialize(yaml_event_t *event, yaml_char_t *anchor, yaml_char_t *tag, yaml_char_t *value, int length, int plain_implicit, int quoted_implicit, yaml_scalar_style_t style) { yaml_mark_t mark = { 0, 0, 0 }; yaml_char_t *anchor_copy = NULL; yaml_char_t *tag_copy = NULL; yaml_char_t *value_copy = NULL; assert(event); /* Non-NULL event object is expected. */ assert(value); /* Non-NULL anchor is expected. */ if (anchor) { if (!yaml_check_utf8(anchor, strlen((char *)anchor))) goto error; anchor_copy = yaml_strdup(anchor); if (!anchor_copy) goto error; } if (tag) { if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; tag_copy = yaml_strdup(tag); if (!tag_copy) goto error; } if (length < 0) { length = strlen((char *)value); } if (!yaml_check_utf8(value, length)) goto error; value_copy = yaml_malloc(length+1); if (!value_copy) goto error; memcpy(value_copy, value, length); value_copy[length] = '\0'; SCALAR_EVENT_INIT(*event, anchor_copy, tag_copy, value_copy, length, plain_implicit, quoted_implicit, style, mark, mark); return 1; error: yaml_free(anchor_copy); yaml_free(tag_copy); yaml_free(value_copy); return 0; } /* * Create SEQUENCE-START. */ YAML_DECLARE(int) yaml_sequence_start_event_initialize(yaml_event_t *event, yaml_char_t *anchor, yaml_char_t *tag, int implicit, yaml_sequence_style_t style) { yaml_mark_t mark = { 0, 0, 0 }; yaml_char_t *anchor_copy = NULL; yaml_char_t *tag_copy = NULL; assert(event); /* Non-NULL event object is expected. */ if (anchor) { if (!yaml_check_utf8(anchor, strlen((char *)anchor))) goto error; anchor_copy = yaml_strdup(anchor); if (!anchor_copy) goto error; } if (tag) { if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; tag_copy = yaml_strdup(tag); if (!tag_copy) goto error; } SEQUENCE_START_EVENT_INIT(*event, anchor_copy, tag_copy, implicit, style, mark, mark); return 1; error: yaml_free(anchor_copy); yaml_free(tag_copy); return 0; } /* * Create SEQUENCE-END. */ YAML_DECLARE(int) yaml_sequence_end_event_initialize(yaml_event_t *event) { yaml_mark_t mark = { 0, 0, 0 }; assert(event); /* Non-NULL event object is expected. */ SEQUENCE_END_EVENT_INIT(*event, mark, mark); return 1; } /* * Create MAPPING-START. */ YAML_DECLARE(int) yaml_mapping_start_event_initialize(yaml_event_t *event, yaml_char_t *anchor, yaml_char_t *tag, int implicit, yaml_mapping_style_t style) { yaml_mark_t mark = { 0, 0, 0 }; yaml_char_t *anchor_copy = NULL; yaml_char_t *tag_copy = NULL; assert(event); /* Non-NULL event object is expected. */ if (anchor) { if (!yaml_check_utf8(anchor, strlen((char *)anchor))) goto error; anchor_copy = yaml_strdup(anchor); if (!anchor_copy) goto error; } if (tag) { if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; tag_copy = yaml_strdup(tag); if (!tag_copy) goto error; } MAPPING_START_EVENT_INIT(*event, anchor_copy, tag_copy, implicit, style, mark, mark); return 1; error: yaml_free(anchor_copy); yaml_free(tag_copy); return 0; } /* * Create MAPPING-END. */ YAML_DECLARE(int) yaml_mapping_end_event_initialize(yaml_event_t *event) { yaml_mark_t mark = { 0, 0, 0 }; assert(event); /* Non-NULL event object is expected. */ MAPPING_END_EVENT_INIT(*event, mark, mark); return 1; } /* * Destroy an event object. */ YAML_DECLARE(void) yaml_event_delete(yaml_event_t *event) { yaml_tag_directive_t *tag_directive; assert(event); /* Non-NULL event object expected. */ switch (event->type) { case YAML_DOCUMENT_START_EVENT: yaml_free(event->data.document_start.version_directive); for (tag_directive = event->data.document_start.tag_directives.start; tag_directive != event->data.document_start.tag_directives.end; tag_directive++) { yaml_free(tag_directive->handle); yaml_free(tag_directive->prefix); } yaml_free(event->data.document_start.tag_directives.start); break; case YAML_ALIAS_EVENT: yaml_free(event->data.alias.anchor); break; case YAML_SCALAR_EVENT: yaml_free(event->data.scalar.anchor); yaml_free(event->data.scalar.tag); yaml_free(event->data.scalar.value); break; case YAML_SEQUENCE_START_EVENT: yaml_free(event->data.sequence_start.anchor); yaml_free(event->data.sequence_start.tag); break; case YAML_MAPPING_START_EVENT: yaml_free(event->data.mapping_start.anchor); yaml_free(event->data.mapping_start.tag); break; default: break; } memset(event, 0, sizeof(yaml_event_t)); } /* * Create a document object. */ YAML_DECLARE(int) yaml_document_initialize(yaml_document_t *document, yaml_version_directive_t *version_directive, yaml_tag_directive_t *tag_directives_start, yaml_tag_directive_t *tag_directives_end, int start_implicit, int end_implicit) { struct { yaml_error_type_t error; } context; struct { yaml_node_t *start; yaml_node_t *end; yaml_node_t *top; } nodes = { NULL, NULL, NULL }; yaml_version_directive_t *version_directive_copy = NULL; struct { yaml_tag_directive_t *start; yaml_tag_directive_t *end; yaml_tag_directive_t *top; } tag_directives_copy = { NULL, NULL, NULL }; yaml_tag_directive_t value = { NULL, NULL }; yaml_mark_t mark = { 0, 0, 0 }; assert(document); /* Non-NULL document object is expected. */ assert((tag_directives_start && tag_directives_end) || (tag_directives_start == tag_directives_end)); /* Valid tag directives are expected. */ if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error; if (version_directive) { version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)); if (!version_directive_copy) goto error; version_directive_copy->major = version_directive->major; version_directive_copy->minor = version_directive->minor; } if (tag_directives_start != tag_directives_end) { yaml_tag_directive_t *tag_directive; if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) goto error; for (tag_directive = tag_directives_start; tag_directive != tag_directives_end; tag_directive ++) { assert(tag_directive->handle); assert(tag_directive->prefix); if (!yaml_check_utf8(tag_directive->handle, strlen((char *)tag_directive->handle))) goto error; if (!yaml_check_utf8(tag_directive->prefix, strlen((char *)tag_directive->prefix))) goto error; value.handle = yaml_strdup(tag_directive->handle); value.prefix = yaml_strdup(tag_directive->prefix); if (!value.handle || !value.prefix) goto error; if (!PUSH(&context, tag_directives_copy, value)) goto error; value.handle = NULL; value.prefix = NULL; } } DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, tag_directives_copy.start, tag_directives_copy.top, start_implicit, end_implicit, mark, mark); return 1; error: STACK_DEL(&context, nodes); yaml_free(version_directive_copy); while (!STACK_EMPTY(&context, tag_directives_copy)) { yaml_tag_directive_t value = POP(&context, tag_directives_copy); yaml_free(value.handle); yaml_free(value.prefix); } STACK_DEL(&context, tag_directives_copy); yaml_free(value.handle); yaml_free(value.prefix); return 0; } /* * Destroy a document object. */ YAML_DECLARE(void) yaml_document_delete(yaml_document_t *document) { struct { yaml_error_type_t error; } context; yaml_tag_directive_t *tag_directive; context.error = YAML_NO_ERROR; /* Eliminate a compliler warning. */ assert(document); /* Non-NULL document object is expected. */ while (!STACK_EMPTY(&context, document->nodes)) { yaml_node_t node = POP(&context, document->nodes); yaml_free(node.tag); switch (node.type) { case YAML_SCALAR_NODE: yaml_free(node.data.scalar.value); break; case YAML_SEQUENCE_NODE: STACK_DEL(&context, node.data.sequence.items); break; case YAML_MAPPING_NODE: STACK_DEL(&context, node.data.mapping.pairs); break; default: assert(0); /* Should not happen. */ } } STACK_DEL(&context, document->nodes); yaml_free(document->version_directive); for (tag_directive = document->tag_directives.start; tag_directive != document->tag_directives.end; tag_directive++) { yaml_free(tag_directive->handle); yaml_free(tag_directive->prefix); } yaml_free(document->tag_directives.start); memset(document, 0, sizeof(yaml_document_t)); } /** * Get a document node. */ YAML_DECLARE(yaml_node_t *) yaml_document_get_node(yaml_document_t *document, int index) { assert(document); /* Non-NULL document object is expected. */ if (index > 0 && document->nodes.start + index <= document->nodes.top) { return document->nodes.start + index - 1; } return NULL; } /** * Get the root object. */ YAML_DECLARE(yaml_node_t *) yaml_document_get_root_node(yaml_document_t *document) { assert(document); /* Non-NULL document object is expected. */ if (document->nodes.top != document->nodes.start) { return document->nodes.start; } return NULL; } /* * Add a scalar node to a document. */ YAML_DECLARE(int) yaml_document_add_scalar(yaml_document_t *document, yaml_char_t *tag, yaml_char_t *value, int length, yaml_scalar_style_t style) { struct { yaml_error_type_t error; } context; yaml_mark_t mark = { 0, 0, 0 }; yaml_char_t *tag_copy = NULL; yaml_char_t *value_copy = NULL; yaml_node_t node; assert(document); /* Non-NULL document object is expected. */ assert(value); /* Non-NULL value is expected. */ if (!tag) { tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG; } if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; tag_copy = yaml_strdup(tag); if (!tag_copy) goto error; if (length < 0) { length = strlen((char *)value); } if (!yaml_check_utf8(value, length)) goto error; value_copy = yaml_malloc(length+1); if (!value_copy) goto error; memcpy(value_copy, value, length); value_copy[length] = '\0'; SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark); if (!PUSH(&context, document->nodes, node)) goto error; return document->nodes.top - document->nodes.start; error: yaml_free(tag_copy); yaml_free(value_copy); return 0; } /* * Add a sequence node to a document. */ YAML_DECLARE(int) yaml_document_add_sequence(yaml_document_t *document, yaml_char_t *tag, yaml_sequence_style_t style) { struct { yaml_error_type_t error; } context; yaml_mark_t mark = { 0, 0, 0 }; yaml_char_t *tag_copy = NULL; struct { yaml_node_item_t *start; yaml_node_item_t *end; yaml_node_item_t *top; } items = { NULL, NULL, NULL }; yaml_node_t node; assert(document); /* Non-NULL document object is expected. */ if (!tag) { tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG; } if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; tag_copy = yaml_strdup(tag); if (!tag_copy) goto error; if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error; SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, style, mark, mark); if (!PUSH(&context, document->nodes, node)) goto error; return document->nodes.top - document->nodes.start; error: STACK_DEL(&context, items); yaml_free(tag_copy); return 0; } /* * Add a mapping node to a document. */ YAML_DECLARE(int) yaml_document_add_mapping(yaml_document_t *document, yaml_char_t *tag, yaml_mapping_style_t style) { struct { yaml_error_type_t error; } context; yaml_mark_t mark = { 0, 0, 0 }; yaml_char_t *tag_copy = NULL; struct { yaml_node_pair_t *start; yaml_node_pair_t *end; yaml_node_pair_t *top; } pairs = { NULL, NULL, NULL }; yaml_node_t node; assert(document); /* Non-NULL document object is expected. */ if (!tag) { tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG; } if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error; tag_copy = yaml_strdup(tag); if (!tag_copy) goto error; if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error; MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, style, mark, mark); if (!PUSH(&context, document->nodes, node)) goto error; return document->nodes.top - document->nodes.start; error: STACK_DEL(&context, pairs); yaml_free(tag_copy); return 0; } /* * Append an item to a sequence node. */ YAML_DECLARE(int) yaml_document_append_sequence_item(yaml_document_t *document, int sequence, int item) { struct { yaml_error_type_t error; } context; assert(document); /* Non-NULL document is required. */ assert(sequence > 0 && document->nodes.start + sequence <= document->nodes.top); /* Valid sequence id is required. */ assert(document->nodes.start[sequence-1].type == YAML_SEQUENCE_NODE); /* A sequence node is required. */ assert(item > 0 && document->nodes.start + item <= document->nodes.top); /* Valid item id is required. */ if (!PUSH(&context, document->nodes.start[sequence-1].data.sequence.items, item)) return 0; return 1; } /* * Append a pair of a key and a value to a mapping node. */ YAML_DECLARE(int) yaml_document_append_mapping_pair(yaml_document_t *document, int mapping, int key, int value) { struct { yaml_error_type_t error; } context; yaml_node_pair_t pair; assert(document); /* Non-NULL document is required. */ assert(mapping > 0 && document->nodes.start + mapping <= document->nodes.top); /* Valid mapping id is required. */ assert(document->nodes.start[mapping-1].type == YAML_MAPPING_NODE); /* A mapping node is required. */ assert(key > 0 && document->nodes.start + key <= document->nodes.top); /* Valid key id is required. */ assert(value > 0 && document->nodes.start + value <= document->nodes.top); /* Valid value id is required. */ pair.key = key; pair.value = value; if (!PUSH(&context, document->nodes.start[mapping-1].data.mapping.pairs, pair)) return 0; return 1; } psych-2.2.4/ext/psych/yaml/config.h000066400000000000000000000005631305404671600171650ustar00rootroot00000000000000#define PACKAGE_NAME "yaml" #define PACKAGE_TARNAME "yaml" #define PACKAGE_VERSION "0.1.7" #define PACKAGE_STRING "yaml 0.1.7" #define PACKAGE_BUGREPORT "https://github.com/yaml/libyaml/issues" #define PACKAGE_URL "https://github.com/yaml/libyaml" #define YAML_VERSION_MAJOR 0 #define YAML_VERSION_MINOR 1 #define YAML_VERSION_PATCH 7 #define YAML_VERSION_STRING "0.1.7" psych-2.2.4/ext/psych/yaml/dumper.c000066400000000000000000000234741305404671600172150ustar00rootroot00000000000000 #include "yaml_private.h" /* * API functions. */ YAML_DECLARE(int) yaml_emitter_open(yaml_emitter_t *emitter); YAML_DECLARE(int) yaml_emitter_close(yaml_emitter_t *emitter); YAML_DECLARE(int) yaml_emitter_dump(yaml_emitter_t *emitter, yaml_document_t *document); /* * Clean up functions. */ static void yaml_emitter_delete_document_and_anchors(yaml_emitter_t *emitter); /* * Anchor functions. */ static void yaml_emitter_anchor_node(yaml_emitter_t *emitter, int index); static yaml_char_t * yaml_emitter_generate_anchor(yaml_emitter_t *emitter, int anchor_id); /* * Serialize functions. */ static int yaml_emitter_dump_node(yaml_emitter_t *emitter, int index); static int yaml_emitter_dump_alias(yaml_emitter_t *emitter, yaml_char_t *anchor); static int yaml_emitter_dump_scalar(yaml_emitter_t *emitter, yaml_node_t *node, yaml_char_t *anchor); static int yaml_emitter_dump_sequence(yaml_emitter_t *emitter, yaml_node_t *node, yaml_char_t *anchor); static int yaml_emitter_dump_mapping(yaml_emitter_t *emitter, yaml_node_t *node, yaml_char_t *anchor); /* * Issue a STREAM-START event. */ YAML_DECLARE(int) yaml_emitter_open(yaml_emitter_t *emitter) { yaml_event_t event; yaml_mark_t mark = { 0, 0, 0 }; assert(emitter); /* Non-NULL emitter object is required. */ assert(!emitter->opened); /* Emitter should not be opened yet. */ STREAM_START_EVENT_INIT(event, YAML_ANY_ENCODING, mark, mark); if (!yaml_emitter_emit(emitter, &event)) { return 0; } emitter->opened = 1; return 1; } /* * Issue a STREAM-END event. */ YAML_DECLARE(int) yaml_emitter_close(yaml_emitter_t *emitter) { yaml_event_t event; yaml_mark_t mark = { 0, 0, 0 }; assert(emitter); /* Non-NULL emitter object is required. */ assert(emitter->opened); /* Emitter should be opened. */ if (emitter->closed) return 1; STREAM_END_EVENT_INIT(event, mark, mark); if (!yaml_emitter_emit(emitter, &event)) { return 0; } emitter->closed = 1; return 1; } /* * Dump a YAML document. */ YAML_DECLARE(int) yaml_emitter_dump(yaml_emitter_t *emitter, yaml_document_t *document) { yaml_event_t event; yaml_mark_t mark = { 0, 0, 0 }; assert(emitter); /* Non-NULL emitter object is required. */ assert(document); /* Non-NULL emitter object is expected. */ emitter->document = document; if (!emitter->opened) { if (!yaml_emitter_open(emitter)) goto error; } if (STACK_EMPTY(emitter, document->nodes)) { if (!yaml_emitter_close(emitter)) goto error; yaml_emitter_delete_document_and_anchors(emitter); return 1; } assert(emitter->opened); /* Emitter should be opened. */ emitter->anchors = yaml_malloc(sizeof(*(emitter->anchors)) * (document->nodes.top - document->nodes.start)); if (!emitter->anchors) goto error; memset(emitter->anchors, 0, sizeof(*(emitter->anchors)) * (document->nodes.top - document->nodes.start)); DOCUMENT_START_EVENT_INIT(event, document->version_directive, document->tag_directives.start, document->tag_directives.end, document->start_implicit, mark, mark); if (!yaml_emitter_emit(emitter, &event)) goto error; yaml_emitter_anchor_node(emitter, 1); if (!yaml_emitter_dump_node(emitter, 1)) goto error; DOCUMENT_END_EVENT_INIT(event, document->end_implicit, mark, mark); if (!yaml_emitter_emit(emitter, &event)) goto error; yaml_emitter_delete_document_and_anchors(emitter); return 1; error: yaml_emitter_delete_document_and_anchors(emitter); return 0; } /* * Clean up the emitter object after a document is dumped. */ static void yaml_emitter_delete_document_and_anchors(yaml_emitter_t *emitter) { int index; if (!emitter->anchors) { yaml_document_delete(emitter->document); emitter->document = NULL; return; } for (index = 0; emitter->document->nodes.start + index < emitter->document->nodes.top; index ++) { yaml_node_t node = emitter->document->nodes.start[index]; if (!emitter->anchors[index].serialized) { yaml_free(node.tag); if (node.type == YAML_SCALAR_NODE) { yaml_free(node.data.scalar.value); } } if (node.type == YAML_SEQUENCE_NODE) { STACK_DEL(emitter, node.data.sequence.items); } if (node.type == YAML_MAPPING_NODE) { STACK_DEL(emitter, node.data.mapping.pairs); } } STACK_DEL(emitter, emitter->document->nodes); yaml_free(emitter->anchors); emitter->anchors = NULL; emitter->last_anchor_id = 0; emitter->document = NULL; } /* * Check the references of a node and assign the anchor id if needed. */ static void yaml_emitter_anchor_node(yaml_emitter_t *emitter, int index) { yaml_node_t *node = emitter->document->nodes.start + index - 1; yaml_node_item_t *item; yaml_node_pair_t *pair; emitter->anchors[index-1].references ++; if (emitter->anchors[index-1].references == 1) { switch (node->type) { case YAML_SEQUENCE_NODE: for (item = node->data.sequence.items.start; item < node->data.sequence.items.top; item ++) { yaml_emitter_anchor_node(emitter, *item); } break; case YAML_MAPPING_NODE: for (pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair ++) { yaml_emitter_anchor_node(emitter, pair->key); yaml_emitter_anchor_node(emitter, pair->value); } break; default: break; } } else if (emitter->anchors[index-1].references == 2) { emitter->anchors[index-1].anchor = (++ emitter->last_anchor_id); } } /* * Generate a textual representation for an anchor. */ #define ANCHOR_TEMPLATE "id%03d" #define ANCHOR_TEMPLATE_LENGTH 16 static yaml_char_t * yaml_emitter_generate_anchor(yaml_emitter_t *emitter, int anchor_id) { yaml_char_t *anchor = yaml_malloc(ANCHOR_TEMPLATE_LENGTH); if (!anchor) return NULL; sprintf((char *)anchor, ANCHOR_TEMPLATE, anchor_id); return anchor; } /* * Serialize a node. */ static int yaml_emitter_dump_node(yaml_emitter_t *emitter, int index) { yaml_node_t *node = emitter->document->nodes.start + index - 1; int anchor_id = emitter->anchors[index-1].anchor; yaml_char_t *anchor = NULL; if (anchor_id) { anchor = yaml_emitter_generate_anchor(emitter, anchor_id); if (!anchor) return 0; } if (emitter->anchors[index-1].serialized) { return yaml_emitter_dump_alias(emitter, anchor); } emitter->anchors[index-1].serialized = 1; switch (node->type) { case YAML_SCALAR_NODE: return yaml_emitter_dump_scalar(emitter, node, anchor); case YAML_SEQUENCE_NODE: return yaml_emitter_dump_sequence(emitter, node, anchor); case YAML_MAPPING_NODE: return yaml_emitter_dump_mapping(emitter, node, anchor); default: assert(0); /* Could not happen. */ break; } return 0; /* Could not happen. */ } /* * Serialize an alias. */ static int yaml_emitter_dump_alias(yaml_emitter_t *emitter, yaml_char_t *anchor) { yaml_event_t event; yaml_mark_t mark = { 0, 0, 0 }; ALIAS_EVENT_INIT(event, anchor, mark, mark); return yaml_emitter_emit(emitter, &event); } /* * Serialize a scalar. */ static int yaml_emitter_dump_scalar(yaml_emitter_t *emitter, yaml_node_t *node, yaml_char_t *anchor) { yaml_event_t event; yaml_mark_t mark = { 0, 0, 0 }; int plain_implicit = (strcmp((char *)node->tag, YAML_DEFAULT_SCALAR_TAG) == 0); int quoted_implicit = (strcmp((char *)node->tag, YAML_DEFAULT_SCALAR_TAG) == 0); SCALAR_EVENT_INIT(event, anchor, node->tag, node->data.scalar.value, node->data.scalar.length, plain_implicit, quoted_implicit, node->data.scalar.style, mark, mark); return yaml_emitter_emit(emitter, &event); } /* * Serialize a sequence. */ static int yaml_emitter_dump_sequence(yaml_emitter_t *emitter, yaml_node_t *node, yaml_char_t *anchor) { yaml_event_t event; yaml_mark_t mark = { 0, 0, 0 }; int implicit = (strcmp((char *)node->tag, YAML_DEFAULT_SEQUENCE_TAG) == 0); yaml_node_item_t *item; SEQUENCE_START_EVENT_INIT(event, anchor, node->tag, implicit, node->data.sequence.style, mark, mark); if (!yaml_emitter_emit(emitter, &event)) return 0; for (item = node->data.sequence.items.start; item < node->data.sequence.items.top; item ++) { if (!yaml_emitter_dump_node(emitter, *item)) return 0; } SEQUENCE_END_EVENT_INIT(event, mark, mark); if (!yaml_emitter_emit(emitter, &event)) return 0; return 1; } /* * Serialize a mapping. */ static int yaml_emitter_dump_mapping(yaml_emitter_t *emitter, yaml_node_t *node, yaml_char_t *anchor) { yaml_event_t event; yaml_mark_t mark = { 0, 0, 0 }; int implicit = (strcmp((char *)node->tag, YAML_DEFAULT_MAPPING_TAG) == 0); yaml_node_pair_t *pair; MAPPING_START_EVENT_INIT(event, anchor, node->tag, implicit, node->data.mapping.style, mark, mark); if (!yaml_emitter_emit(emitter, &event)) return 0; for (pair = node->data.mapping.pairs.start; pair < node->data.mapping.pairs.top; pair ++) { if (!yaml_emitter_dump_node(emitter, pair->key)) return 0; if (!yaml_emitter_dump_node(emitter, pair->value)) return 0; } MAPPING_END_EVENT_INIT(event, mark, mark); if (!yaml_emitter_emit(emitter, &event)) return 0; return 1; } psych-2.2.4/ext/psych/yaml/emitter.c000066400000000000000000002002501305404671600173570ustar00rootroot00000000000000 #include "yaml_private.h" /* * Flush the buffer if needed. */ #define FLUSH(emitter) \ ((emitter->buffer.pointer+5 < emitter->buffer.end) \ || yaml_emitter_flush(emitter)) /* * Put a character to the output buffer. */ #define PUT(emitter,value) \ (FLUSH(emitter) \ && (*(emitter->buffer.pointer++) = (yaml_char_t)(value), \ emitter->column ++, \ 1)) /* * Put a line break to the output buffer. */ #define PUT_BREAK(emitter) \ (FLUSH(emitter) \ && ((emitter->line_break == YAML_CR_BREAK ? \ (*(emitter->buffer.pointer++) = (yaml_char_t) '\r') : \ emitter->line_break == YAML_LN_BREAK ? \ (*(emitter->buffer.pointer++) = (yaml_char_t) '\n') : \ emitter->line_break == YAML_CRLN_BREAK ? \ (*(emitter->buffer.pointer++) = (yaml_char_t) '\r', \ *(emitter->buffer.pointer++) = (yaml_char_t) '\n') : 0), \ emitter->column = 0, \ emitter->line ++, \ 1)) /* * Copy a character from a string into buffer. */ #define WRITE(emitter,string) \ (FLUSH(emitter) \ && (COPY(emitter->buffer,string), \ emitter->column ++, \ 1)) /* * Copy a line break character from a string into buffer. */ #define WRITE_BREAK(emitter,string) \ (FLUSH(emitter) \ && (CHECK(string,'\n') ? \ (PUT_BREAK(emitter), \ string.pointer ++, \ 1) : \ (COPY(emitter->buffer,string), \ emitter->column = 0, \ emitter->line ++, \ 1))) /* * API functions. */ YAML_DECLARE(int) yaml_emitter_emit(yaml_emitter_t *emitter, yaml_event_t *event); /* * Utility functions. */ static int yaml_emitter_set_emitter_error(yaml_emitter_t *emitter, const char *problem); static int yaml_emitter_need_more_events(yaml_emitter_t *emitter); static int yaml_emitter_append_tag_directive(yaml_emitter_t *emitter, yaml_tag_directive_t value, int allow_duplicates); static int yaml_emitter_increase_indent(yaml_emitter_t *emitter, int flow, int indentless); /* * State functions. */ static int yaml_emitter_state_machine(yaml_emitter_t *emitter, yaml_event_t *event); static int yaml_emitter_emit_stream_start(yaml_emitter_t *emitter, yaml_event_t *event); static int yaml_emitter_emit_document_start(yaml_emitter_t *emitter, yaml_event_t *event, int first); static int yaml_emitter_emit_document_content(yaml_emitter_t *emitter, yaml_event_t *event); static int yaml_emitter_emit_document_end(yaml_emitter_t *emitter, yaml_event_t *event); static int yaml_emitter_emit_flow_sequence_item(yaml_emitter_t *emitter, yaml_event_t *event, int first); static int yaml_emitter_emit_flow_mapping_key(yaml_emitter_t *emitter, yaml_event_t *event, int first); static int yaml_emitter_emit_flow_mapping_value(yaml_emitter_t *emitter, yaml_event_t *event, int simple); static int yaml_emitter_emit_block_sequence_item(yaml_emitter_t *emitter, yaml_event_t *event, int first); static int yaml_emitter_emit_block_mapping_key(yaml_emitter_t *emitter, yaml_event_t *event, int first); static int yaml_emitter_emit_block_mapping_value(yaml_emitter_t *emitter, yaml_event_t *event, int simple); static int yaml_emitter_emit_node(yaml_emitter_t *emitter, yaml_event_t *event, int root, int sequence, int mapping, int simple_key); static int yaml_emitter_emit_alias(yaml_emitter_t *emitter, yaml_event_t *event); static int yaml_emitter_emit_scalar(yaml_emitter_t *emitter, yaml_event_t *event); static int yaml_emitter_emit_sequence_start(yaml_emitter_t *emitter, yaml_event_t *event); static int yaml_emitter_emit_mapping_start(yaml_emitter_t *emitter, yaml_event_t *event); /* * Checkers. */ static int yaml_emitter_check_empty_document(yaml_emitter_t *emitter); static int yaml_emitter_check_empty_sequence(yaml_emitter_t *emitter); static int yaml_emitter_check_empty_mapping(yaml_emitter_t *emitter); static int yaml_emitter_check_simple_key(yaml_emitter_t *emitter); static int yaml_emitter_select_scalar_style(yaml_emitter_t *emitter, yaml_event_t *event); /* * Processors. */ static int yaml_emitter_process_anchor(yaml_emitter_t *emitter); static int yaml_emitter_process_tag(yaml_emitter_t *emitter); static int yaml_emitter_process_scalar(yaml_emitter_t *emitter); /* * Analyzers. */ static int yaml_emitter_analyze_version_directive(yaml_emitter_t *emitter, yaml_version_directive_t version_directive); static int yaml_emitter_analyze_tag_directive(yaml_emitter_t *emitter, yaml_tag_directive_t tag_directive); static int yaml_emitter_analyze_anchor(yaml_emitter_t *emitter, yaml_char_t *anchor, int alias); static int yaml_emitter_analyze_tag(yaml_emitter_t *emitter, yaml_char_t *tag); static int yaml_emitter_analyze_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length); static int yaml_emitter_analyze_event(yaml_emitter_t *emitter, yaml_event_t *event); /* * Writers. */ static int yaml_emitter_write_bom(yaml_emitter_t *emitter); static int yaml_emitter_write_indent(yaml_emitter_t *emitter); static int yaml_emitter_write_indicator(yaml_emitter_t *emitter, char *indicator, int need_whitespace, int is_whitespace, int is_indention); static int yaml_emitter_write_anchor(yaml_emitter_t *emitter, yaml_char_t *value, size_t length); static int yaml_emitter_write_tag_handle(yaml_emitter_t *emitter, yaml_char_t *value, size_t length); static int yaml_emitter_write_tag_content(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int need_whitespace); static int yaml_emitter_write_plain_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int allow_breaks); static int yaml_emitter_write_single_quoted_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int allow_breaks); static int yaml_emitter_write_double_quoted_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int allow_breaks); static int yaml_emitter_write_block_scalar_hints(yaml_emitter_t *emitter, yaml_string_t string); static int yaml_emitter_write_literal_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length); static int yaml_emitter_write_folded_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length); /* * Set an emitter error and return 0. */ static int yaml_emitter_set_emitter_error(yaml_emitter_t *emitter, const char *problem) { emitter->error = YAML_EMITTER_ERROR; emitter->problem = problem; return 0; } /* * Emit an event. */ YAML_DECLARE(int) yaml_emitter_emit(yaml_emitter_t *emitter, yaml_event_t *event) { if (!ENQUEUE(emitter, emitter->events, *event)) { yaml_event_delete(event); return 0; } while (!yaml_emitter_need_more_events(emitter)) { if (!yaml_emitter_analyze_event(emitter, emitter->events.head)) return 0; if (!yaml_emitter_state_machine(emitter, emitter->events.head)) return 0; yaml_event_delete(&DEQUEUE(emitter, emitter->events)); } return 1; } /* * Check if we need to accumulate more events before emitting. * * We accumulate extra * - 1 event for DOCUMENT-START * - 2 events for SEQUENCE-START * - 3 events for MAPPING-START */ static int yaml_emitter_need_more_events(yaml_emitter_t *emitter) { int level = 0; int accumulate = 0; yaml_event_t *event; if (QUEUE_EMPTY(emitter, emitter->events)) return 1; switch (emitter->events.head->type) { case YAML_DOCUMENT_START_EVENT: accumulate = 1; break; case YAML_SEQUENCE_START_EVENT: accumulate = 2; break; case YAML_MAPPING_START_EVENT: accumulate = 3; break; default: return 0; } if (emitter->events.tail - emitter->events.head > accumulate) return 0; for (event = emitter->events.head; event != emitter->events.tail; event ++) { switch (event->type) { case YAML_STREAM_START_EVENT: case YAML_DOCUMENT_START_EVENT: case YAML_SEQUENCE_START_EVENT: case YAML_MAPPING_START_EVENT: level += 1; break; case YAML_STREAM_END_EVENT: case YAML_DOCUMENT_END_EVENT: case YAML_SEQUENCE_END_EVENT: case YAML_MAPPING_END_EVENT: level -= 1; break; default: break; } if (!level) return 0; } return 1; } /* * Append a directive to the directives stack. */ static int yaml_emitter_append_tag_directive(yaml_emitter_t *emitter, yaml_tag_directive_t value, int allow_duplicates) { yaml_tag_directive_t *tag_directive; yaml_tag_directive_t copy = { NULL, NULL }; for (tag_directive = emitter->tag_directives.start; tag_directive != emitter->tag_directives.top; tag_directive ++) { if (strcmp((char *)value.handle, (char *)tag_directive->handle) == 0) { if (allow_duplicates) return 1; return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive"); } } copy.handle = yaml_strdup(value.handle); copy.prefix = yaml_strdup(value.prefix); if (!copy.handle || !copy.prefix) { emitter->error = YAML_MEMORY_ERROR; goto error; } if (!PUSH(emitter, emitter->tag_directives, copy)) goto error; return 1; error: yaml_free(copy.handle); yaml_free(copy.prefix); return 0; } /* * Increase the indentation level. */ static int yaml_emitter_increase_indent(yaml_emitter_t *emitter, int flow, int indentless) { if (!PUSH(emitter, emitter->indents, emitter->indent)) return 0; if (emitter->indent < 0) { emitter->indent = flow ? emitter->best_indent : 0; } else if (!indentless) { emitter->indent += emitter->best_indent; } return 1; } /* * State dispatcher. */ static int yaml_emitter_state_machine(yaml_emitter_t *emitter, yaml_event_t *event) { switch (emitter->state) { case YAML_EMIT_STREAM_START_STATE: return yaml_emitter_emit_stream_start(emitter, event); case YAML_EMIT_FIRST_DOCUMENT_START_STATE: return yaml_emitter_emit_document_start(emitter, event, 1); case YAML_EMIT_DOCUMENT_START_STATE: return yaml_emitter_emit_document_start(emitter, event, 0); case YAML_EMIT_DOCUMENT_CONTENT_STATE: return yaml_emitter_emit_document_content(emitter, event); case YAML_EMIT_DOCUMENT_END_STATE: return yaml_emitter_emit_document_end(emitter, event); case YAML_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: return yaml_emitter_emit_flow_sequence_item(emitter, event, 1); case YAML_EMIT_FLOW_SEQUENCE_ITEM_STATE: return yaml_emitter_emit_flow_sequence_item(emitter, event, 0); case YAML_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: return yaml_emitter_emit_flow_mapping_key(emitter, event, 1); case YAML_EMIT_FLOW_MAPPING_KEY_STATE: return yaml_emitter_emit_flow_mapping_key(emitter, event, 0); case YAML_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: return yaml_emitter_emit_flow_mapping_value(emitter, event, 1); case YAML_EMIT_FLOW_MAPPING_VALUE_STATE: return yaml_emitter_emit_flow_mapping_value(emitter, event, 0); case YAML_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: return yaml_emitter_emit_block_sequence_item(emitter, event, 1); case YAML_EMIT_BLOCK_SEQUENCE_ITEM_STATE: return yaml_emitter_emit_block_sequence_item(emitter, event, 0); case YAML_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: return yaml_emitter_emit_block_mapping_key(emitter, event, 1); case YAML_EMIT_BLOCK_MAPPING_KEY_STATE: return yaml_emitter_emit_block_mapping_key(emitter, event, 0); case YAML_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: return yaml_emitter_emit_block_mapping_value(emitter, event, 1); case YAML_EMIT_BLOCK_MAPPING_VALUE_STATE: return yaml_emitter_emit_block_mapping_value(emitter, event, 0); case YAML_EMIT_END_STATE: return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END"); default: assert(1); /* Invalid state. */ } return 0; } /* * Expect STREAM-START. */ static int yaml_emitter_emit_stream_start(yaml_emitter_t *emitter, yaml_event_t *event) { if (event->type == YAML_STREAM_START_EVENT) { if (!emitter->encoding) { emitter->encoding = event->data.stream_start.encoding; } if (!emitter->encoding) { emitter->encoding = YAML_UTF8_ENCODING; } if (emitter->best_indent < 2 || emitter->best_indent > 9) { emitter->best_indent = 2; } if (emitter->best_width >= 0 && emitter->best_width <= emitter->best_indent*2) { emitter->best_width = 80; } if (emitter->best_width < 0) { emitter->best_width = INT_MAX; } if (!emitter->line_break) { emitter->line_break = YAML_LN_BREAK; } emitter->indent = -1; emitter->line = 0; emitter->column = 0; emitter->whitespace = 1; emitter->indention = 1; if (emitter->encoding != YAML_UTF8_ENCODING) { if (!yaml_emitter_write_bom(emitter)) return 0; } emitter->state = YAML_EMIT_FIRST_DOCUMENT_START_STATE; return 1; } return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START"); } /* * Expect DOCUMENT-START or STREAM-END. */ static int yaml_emitter_emit_document_start(yaml_emitter_t *emitter, yaml_event_t *event, int first) { if (event->type == YAML_DOCUMENT_START_EVENT) { yaml_tag_directive_t default_tag_directives[] = { {(yaml_char_t *)"!", (yaml_char_t *)"!"}, {(yaml_char_t *)"!!", (yaml_char_t *)"tag:yaml.org,2002:"}, {NULL, NULL} }; yaml_tag_directive_t *tag_directive; int implicit; if (event->data.document_start.version_directive) { if (!yaml_emitter_analyze_version_directive(emitter, *event->data.document_start.version_directive)) return 0; } for (tag_directive = event->data.document_start.tag_directives.start; tag_directive != event->data.document_start.tag_directives.end; tag_directive ++) { if (!yaml_emitter_analyze_tag_directive(emitter, *tag_directive)) return 0; if (!yaml_emitter_append_tag_directive(emitter, *tag_directive, 0)) return 0; } for (tag_directive = default_tag_directives; tag_directive->handle; tag_directive ++) { if (!yaml_emitter_append_tag_directive(emitter, *tag_directive, 1)) return 0; } implicit = event->data.document_start.implicit; if (!first || emitter->canonical) { implicit = 0; } if ((event->data.document_start.version_directive || (event->data.document_start.tag_directives.start != event->data.document_start.tag_directives.end)) && emitter->open_ended) { if (!yaml_emitter_write_indicator(emitter, "...", 1, 0, 0)) return 0; if (!yaml_emitter_write_indent(emitter)) return 0; } if (event->data.document_start.version_directive) { implicit = 0; if (!yaml_emitter_write_indicator(emitter, "%YAML", 1, 0, 0)) return 0; if (!yaml_emitter_write_indicator(emitter, "1.1", 1, 0, 0)) return 0; if (!yaml_emitter_write_indent(emitter)) return 0; } if (event->data.document_start.tag_directives.start != event->data.document_start.tag_directives.end) { implicit = 0; for (tag_directive = event->data.document_start.tag_directives.start; tag_directive != event->data.document_start.tag_directives.end; tag_directive ++) { if (!yaml_emitter_write_indicator(emitter, "%TAG", 1, 0, 0)) return 0; if (!yaml_emitter_write_tag_handle(emitter, tag_directive->handle, strlen((char *)tag_directive->handle))) return 0; if (!yaml_emitter_write_tag_content(emitter, tag_directive->prefix, strlen((char *)tag_directive->prefix), 1)) return 0; if (!yaml_emitter_write_indent(emitter)) return 0; } } if (yaml_emitter_check_empty_document(emitter)) { implicit = 0; } if (!implicit) { if (!yaml_emitter_write_indent(emitter)) return 0; if (!yaml_emitter_write_indicator(emitter, "---", 1, 0, 0)) return 0; if (emitter->canonical) { if (!yaml_emitter_write_indent(emitter)) return 0; } } emitter->state = YAML_EMIT_DOCUMENT_CONTENT_STATE; return 1; } else if (event->type == YAML_STREAM_END_EVENT) { if (emitter->open_ended) { if (!yaml_emitter_write_indicator(emitter, "...", 1, 0, 0)) return 0; if (!yaml_emitter_write_indent(emitter)) return 0; } if (!yaml_emitter_flush(emitter)) return 0; emitter->state = YAML_EMIT_END_STATE; return 1; } return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END"); } /* * Expect the root node. */ static int yaml_emitter_emit_document_content(yaml_emitter_t *emitter, yaml_event_t *event) { if (!PUSH(emitter, emitter->states, YAML_EMIT_DOCUMENT_END_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 1, 0, 0, 0); } /* * Expect DOCUMENT-END. */ static int yaml_emitter_emit_document_end(yaml_emitter_t *emitter, yaml_event_t *event) { if (event->type == YAML_DOCUMENT_END_EVENT) { if (!yaml_emitter_write_indent(emitter)) return 0; if (!event->data.document_end.implicit) { if (!yaml_emitter_write_indicator(emitter, "...", 1, 0, 0)) return 0; if (!yaml_emitter_write_indent(emitter)) return 0; } if (!yaml_emitter_flush(emitter)) return 0; emitter->state = YAML_EMIT_DOCUMENT_START_STATE; while (!STACK_EMPTY(emitter, emitter->tag_directives)) { yaml_tag_directive_t tag_directive = POP(emitter, emitter->tag_directives); yaml_free(tag_directive.handle); yaml_free(tag_directive.prefix); } return 1; } return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END"); } /* * * Expect a flow item node. */ static int yaml_emitter_emit_flow_sequence_item(yaml_emitter_t *emitter, yaml_event_t *event, int first) { if (first) { if (!yaml_emitter_write_indicator(emitter, "[", 1, 1, 0)) return 0; if (!yaml_emitter_increase_indent(emitter, 1, 0)) return 0; emitter->flow_level ++; } if (event->type == YAML_SEQUENCE_END_EVENT) { emitter->flow_level --; emitter->indent = POP(emitter, emitter->indents); if (emitter->canonical && !first) { if (!yaml_emitter_write_indicator(emitter, ",", 0, 0, 0)) return 0; if (!yaml_emitter_write_indent(emitter)) return 0; } if (!yaml_emitter_write_indicator(emitter, "]", 0, 0, 0)) return 0; emitter->state = POP(emitter, emitter->states); return 1; } if (!first) { if (!yaml_emitter_write_indicator(emitter, ",", 0, 0, 0)) return 0; } if (emitter->canonical || emitter->column > emitter->best_width) { if (!yaml_emitter_write_indent(emitter)) return 0; } if (!PUSH(emitter, emitter->states, YAML_EMIT_FLOW_SEQUENCE_ITEM_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 1, 0, 0); } /* * Expect a flow key node. */ static int yaml_emitter_emit_flow_mapping_key(yaml_emitter_t *emitter, yaml_event_t *event, int first) { if (first) { if (!yaml_emitter_write_indicator(emitter, "{", 1, 1, 0)) return 0; if (!yaml_emitter_increase_indent(emitter, 1, 0)) return 0; emitter->flow_level ++; } if (event->type == YAML_MAPPING_END_EVENT) { emitter->flow_level --; emitter->indent = POP(emitter, emitter->indents); if (emitter->canonical && !first) { if (!yaml_emitter_write_indicator(emitter, ",", 0, 0, 0)) return 0; if (!yaml_emitter_write_indent(emitter)) return 0; } if (!yaml_emitter_write_indicator(emitter, "}", 0, 0, 0)) return 0; emitter->state = POP(emitter, emitter->states); return 1; } if (!first) { if (!yaml_emitter_write_indicator(emitter, ",", 0, 0, 0)) return 0; } if (emitter->canonical || emitter->column > emitter->best_width) { if (!yaml_emitter_write_indent(emitter)) return 0; } if (!emitter->canonical && yaml_emitter_check_simple_key(emitter)) { if (!PUSH(emitter, emitter->states, YAML_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 0, 1, 1); } else { if (!yaml_emitter_write_indicator(emitter, "?", 1, 0, 0)) return 0; if (!PUSH(emitter, emitter->states, YAML_EMIT_FLOW_MAPPING_VALUE_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 0, 1, 0); } } /* * Expect a flow value node. */ static int yaml_emitter_emit_flow_mapping_value(yaml_emitter_t *emitter, yaml_event_t *event, int simple) { if (simple) { if (!yaml_emitter_write_indicator(emitter, ":", 0, 0, 0)) return 0; } else { if (emitter->canonical || emitter->column > emitter->best_width) { if (!yaml_emitter_write_indent(emitter)) return 0; } if (!yaml_emitter_write_indicator(emitter, ":", 1, 0, 0)) return 0; } if (!PUSH(emitter, emitter->states, YAML_EMIT_FLOW_MAPPING_KEY_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 0, 1, 0); } /* * Expect a block item node. */ static int yaml_emitter_emit_block_sequence_item(yaml_emitter_t *emitter, yaml_event_t *event, int first) { if (first) { if (!yaml_emitter_increase_indent(emitter, 0, (emitter->mapping_context && !emitter->indention))) return 0; } if (event->type == YAML_SEQUENCE_END_EVENT) { emitter->indent = POP(emitter, emitter->indents); emitter->state = POP(emitter, emitter->states); return 1; } if (!yaml_emitter_write_indent(emitter)) return 0; if (!yaml_emitter_write_indicator(emitter, "-", 1, 0, 1)) return 0; if (!PUSH(emitter, emitter->states, YAML_EMIT_BLOCK_SEQUENCE_ITEM_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 1, 0, 0); } /* * Expect a block key node. */ static int yaml_emitter_emit_block_mapping_key(yaml_emitter_t *emitter, yaml_event_t *event, int first) { if (first) { if (!yaml_emitter_increase_indent(emitter, 0, 0)) return 0; } if (event->type == YAML_MAPPING_END_EVENT) { emitter->indent = POP(emitter, emitter->indents); emitter->state = POP(emitter, emitter->states); return 1; } if (!yaml_emitter_write_indent(emitter)) return 0; if (yaml_emitter_check_simple_key(emitter)) { if (!PUSH(emitter, emitter->states, YAML_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 0, 1, 1); } else { if (!yaml_emitter_write_indicator(emitter, "?", 1, 0, 1)) return 0; if (!PUSH(emitter, emitter->states, YAML_EMIT_BLOCK_MAPPING_VALUE_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 0, 1, 0); } } /* * Expect a block value node. */ static int yaml_emitter_emit_block_mapping_value(yaml_emitter_t *emitter, yaml_event_t *event, int simple) { if (simple) { if (!yaml_emitter_write_indicator(emitter, ":", 0, 0, 0)) return 0; } else { if (!yaml_emitter_write_indent(emitter)) return 0; if (!yaml_emitter_write_indicator(emitter, ":", 1, 0, 1)) return 0; } if (!PUSH(emitter, emitter->states, YAML_EMIT_BLOCK_MAPPING_KEY_STATE)) return 0; return yaml_emitter_emit_node(emitter, event, 0, 0, 1, 0); } /* * Expect a node. */ static int yaml_emitter_emit_node(yaml_emitter_t *emitter, yaml_event_t *event, int root, int sequence, int mapping, int simple_key) { emitter->root_context = root; emitter->sequence_context = sequence; emitter->mapping_context = mapping; emitter->simple_key_context = simple_key; switch (event->type) { case YAML_ALIAS_EVENT: return yaml_emitter_emit_alias(emitter, event); case YAML_SCALAR_EVENT: return yaml_emitter_emit_scalar(emitter, event); case YAML_SEQUENCE_START_EVENT: return yaml_emitter_emit_sequence_start(emitter, event); case YAML_MAPPING_START_EVENT: return yaml_emitter_emit_mapping_start(emitter, event); default: return yaml_emitter_set_emitter_error(emitter, "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS"); } return 0; } /* * Expect ALIAS. */ static int yaml_emitter_emit_alias(yaml_emitter_t *emitter, yaml_event_t *event) { if (!yaml_emitter_process_anchor(emitter)) return 0; emitter->state = POP(emitter, emitter->states); return 1; } /* * Expect SCALAR. */ static int yaml_emitter_emit_scalar(yaml_emitter_t *emitter, yaml_event_t *event) { if (!yaml_emitter_select_scalar_style(emitter, event)) return 0; if (!yaml_emitter_process_anchor(emitter)) return 0; if (!yaml_emitter_process_tag(emitter)) return 0; if (!yaml_emitter_increase_indent(emitter, 1, 0)) return 0; if (!yaml_emitter_process_scalar(emitter)) return 0; emitter->indent = POP(emitter, emitter->indents); emitter->state = POP(emitter, emitter->states); return 1; } /* * Expect SEQUENCE-START. */ static int yaml_emitter_emit_sequence_start(yaml_emitter_t *emitter, yaml_event_t *event) { if (!yaml_emitter_process_anchor(emitter)) return 0; if (!yaml_emitter_process_tag(emitter)) return 0; if (emitter->flow_level || emitter->canonical || event->data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE || yaml_emitter_check_empty_sequence(emitter)) { emitter->state = YAML_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE; } else { emitter->state = YAML_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE; } return 1; } /* * Expect MAPPING-START. */ static int yaml_emitter_emit_mapping_start(yaml_emitter_t *emitter, yaml_event_t *event) { if (!yaml_emitter_process_anchor(emitter)) return 0; if (!yaml_emitter_process_tag(emitter)) return 0; if (emitter->flow_level || emitter->canonical || event->data.mapping_start.style == YAML_FLOW_MAPPING_STYLE || yaml_emitter_check_empty_mapping(emitter)) { emitter->state = YAML_EMIT_FLOW_MAPPING_FIRST_KEY_STATE; } else { emitter->state = YAML_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE; } return 1; } /* * Check if the document content is an empty scalar. */ static int yaml_emitter_check_empty_document(yaml_emitter_t *emitter) { return 0; } /* * Check if the next events represent an empty sequence. */ static int yaml_emitter_check_empty_sequence(yaml_emitter_t *emitter) { if (emitter->events.tail - emitter->events.head < 2) return 0; return (emitter->events.head[0].type == YAML_SEQUENCE_START_EVENT && emitter->events.head[1].type == YAML_SEQUENCE_END_EVENT); } /* * Check if the next events represent an empty mapping. */ static int yaml_emitter_check_empty_mapping(yaml_emitter_t *emitter) { if (emitter->events.tail - emitter->events.head < 2) return 0; return (emitter->events.head[0].type == YAML_MAPPING_START_EVENT && emitter->events.head[1].type == YAML_MAPPING_END_EVENT); } /* * Check if the next node can be expressed as a simple key. */ static int yaml_emitter_check_simple_key(yaml_emitter_t *emitter) { yaml_event_t *event = emitter->events.head; size_t length = 0; switch (event->type) { case YAML_ALIAS_EVENT: length += emitter->anchor_data.anchor_length; break; case YAML_SCALAR_EVENT: if (emitter->scalar_data.multiline) return 0; length += emitter->anchor_data.anchor_length + emitter->tag_data.handle_length + emitter->tag_data.suffix_length + emitter->scalar_data.length; break; case YAML_SEQUENCE_START_EVENT: if (!yaml_emitter_check_empty_sequence(emitter)) return 0; length += emitter->anchor_data.anchor_length + emitter->tag_data.handle_length + emitter->tag_data.suffix_length; break; case YAML_MAPPING_START_EVENT: if (!yaml_emitter_check_empty_mapping(emitter)) return 0; length += emitter->anchor_data.anchor_length + emitter->tag_data.handle_length + emitter->tag_data.suffix_length; break; default: return 0; } if (length > 128) return 0; return 1; } /* * Determine an acceptable scalar style. */ static int yaml_emitter_select_scalar_style(yaml_emitter_t *emitter, yaml_event_t *event) { yaml_scalar_style_t style = event->data.scalar.style; int no_tag = (!emitter->tag_data.handle && !emitter->tag_data.suffix); if (no_tag && !event->data.scalar.plain_implicit && !event->data.scalar.quoted_implicit) { return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified"); } if (style == YAML_ANY_SCALAR_STYLE) style = YAML_PLAIN_SCALAR_STYLE; if (emitter->canonical) style = YAML_DOUBLE_QUOTED_SCALAR_STYLE; if (emitter->simple_key_context && emitter->scalar_data.multiline) style = YAML_DOUBLE_QUOTED_SCALAR_STYLE; if (style == YAML_PLAIN_SCALAR_STYLE) { if ((emitter->flow_level && !emitter->scalar_data.flow_plain_allowed) || (!emitter->flow_level && !emitter->scalar_data.block_plain_allowed)) style = YAML_SINGLE_QUOTED_SCALAR_STYLE; if (!emitter->scalar_data.length && (emitter->flow_level || emitter->simple_key_context)) style = YAML_SINGLE_QUOTED_SCALAR_STYLE; if (no_tag && !event->data.scalar.plain_implicit) style = YAML_SINGLE_QUOTED_SCALAR_STYLE; } if (style == YAML_SINGLE_QUOTED_SCALAR_STYLE) { if (!emitter->scalar_data.single_quoted_allowed) style = YAML_DOUBLE_QUOTED_SCALAR_STYLE; } if (style == YAML_LITERAL_SCALAR_STYLE || style == YAML_FOLDED_SCALAR_STYLE) { if (!emitter->scalar_data.block_allowed || emitter->flow_level || emitter->simple_key_context) style = YAML_DOUBLE_QUOTED_SCALAR_STYLE; } if (no_tag && !event->data.scalar.quoted_implicit && style != YAML_PLAIN_SCALAR_STYLE) { emitter->tag_data.handle = (yaml_char_t *)"!"; emitter->tag_data.handle_length = 1; } emitter->scalar_data.style = style; return 1; } /* * Write an achor. */ static int yaml_emitter_process_anchor(yaml_emitter_t *emitter) { if (!emitter->anchor_data.anchor) return 1; if (!yaml_emitter_write_indicator(emitter, (emitter->anchor_data.alias ? "*" : "&"), 1, 0, 0)) return 0; return yaml_emitter_write_anchor(emitter, emitter->anchor_data.anchor, emitter->anchor_data.anchor_length); } /* * Write a tag. */ static int yaml_emitter_process_tag(yaml_emitter_t *emitter) { if (!emitter->tag_data.handle && !emitter->tag_data.suffix) return 1; if (emitter->tag_data.handle) { if (!yaml_emitter_write_tag_handle(emitter, emitter->tag_data.handle, emitter->tag_data.handle_length)) return 0; if (emitter->tag_data.suffix) { if (!yaml_emitter_write_tag_content(emitter, emitter->tag_data.suffix, emitter->tag_data.suffix_length, 0)) return 0; } } else { if (!yaml_emitter_write_indicator(emitter, "!<", 1, 0, 0)) return 0; if (!yaml_emitter_write_tag_content(emitter, emitter->tag_data.suffix, emitter->tag_data.suffix_length, 0)) return 0; if (!yaml_emitter_write_indicator(emitter, ">", 0, 0, 0)) return 0; } return 1; } /* * Write a scalar. */ static int yaml_emitter_process_scalar(yaml_emitter_t *emitter) { switch (emitter->scalar_data.style) { case YAML_PLAIN_SCALAR_STYLE: return yaml_emitter_write_plain_scalar(emitter, emitter->scalar_data.value, emitter->scalar_data.length, !emitter->simple_key_context); case YAML_SINGLE_QUOTED_SCALAR_STYLE: return yaml_emitter_write_single_quoted_scalar(emitter, emitter->scalar_data.value, emitter->scalar_data.length, !emitter->simple_key_context); case YAML_DOUBLE_QUOTED_SCALAR_STYLE: return yaml_emitter_write_double_quoted_scalar(emitter, emitter->scalar_data.value, emitter->scalar_data.length, !emitter->simple_key_context); case YAML_LITERAL_SCALAR_STYLE: return yaml_emitter_write_literal_scalar(emitter, emitter->scalar_data.value, emitter->scalar_data.length); case YAML_FOLDED_SCALAR_STYLE: return yaml_emitter_write_folded_scalar(emitter, emitter->scalar_data.value, emitter->scalar_data.length); default: assert(1); /* Impossible. */ } return 0; } /* * Check if a %YAML directive is valid. */ static int yaml_emitter_analyze_version_directive(yaml_emitter_t *emitter, yaml_version_directive_t version_directive) { if (version_directive.major != 1 || version_directive.minor != 1) { return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive"); } return 1; } /* * Check if a %TAG directive is valid. */ static int yaml_emitter_analyze_tag_directive(yaml_emitter_t *emitter, yaml_tag_directive_t tag_directive) { yaml_string_t handle; yaml_string_t prefix; size_t handle_length; size_t prefix_length; handle_length = strlen((char *)tag_directive.handle); prefix_length = strlen((char *)tag_directive.prefix); STRING_ASSIGN(handle, tag_directive.handle, handle_length); STRING_ASSIGN(prefix, tag_directive.prefix, prefix_length); if (handle.start == handle.end) { return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty"); } if (handle.start[0] != '!') { return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'"); } if (handle.end[-1] != '!') { return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'"); } handle.pointer ++; while (handle.pointer < handle.end-1) { if (!IS_ALPHA(handle)) { return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only"); } MOVE(handle); } if (prefix.start == prefix.end) { return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty"); } return 1; } /* * Check if an anchor is valid. */ static int yaml_emitter_analyze_anchor(yaml_emitter_t *emitter, yaml_char_t *anchor, int alias) { size_t anchor_length; yaml_string_t string; anchor_length = strlen((char *)anchor); STRING_ASSIGN(string, anchor, anchor_length); if (string.start == string.end) { return yaml_emitter_set_emitter_error(emitter, alias ? "alias value must not be empty" : "anchor value must not be empty"); } while (string.pointer != string.end) { if (!IS_ALPHA(string)) { return yaml_emitter_set_emitter_error(emitter, alias ? "alias value must contain alphanumerical characters only" : "anchor value must contain alphanumerical characters only"); } MOVE(string); } emitter->anchor_data.anchor = string.start; emitter->anchor_data.anchor_length = string.end - string.start; emitter->anchor_data.alias = alias; return 1; } /* * Check if a tag is valid. */ static int yaml_emitter_analyze_tag(yaml_emitter_t *emitter, yaml_char_t *tag) { size_t tag_length; yaml_string_t string; yaml_tag_directive_t *tag_directive; tag_length = strlen((char *)tag); STRING_ASSIGN(string, tag, tag_length); if (string.start == string.end) { return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty"); } for (tag_directive = emitter->tag_directives.start; tag_directive != emitter->tag_directives.top; tag_directive ++) { size_t prefix_length = strlen((char *)tag_directive->prefix); if (prefix_length < (size_t)(string.end - string.start) && strncmp((char *)tag_directive->prefix, (char *)string.start, prefix_length) == 0) { emitter->tag_data.handle = tag_directive->handle; emitter->tag_data.handle_length = strlen((char *)tag_directive->handle); emitter->tag_data.suffix = string.start + prefix_length; emitter->tag_data.suffix_length = (string.end - string.start) - prefix_length; return 1; } } emitter->tag_data.suffix = string.start; emitter->tag_data.suffix_length = string.end - string.start; return 1; } /* * Check if a scalar is valid. */ static int yaml_emitter_analyze_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length) { yaml_string_t string; int block_indicators = 0; int flow_indicators = 0; int line_breaks = 0; int special_characters = 0; int leading_space = 0; int leading_break = 0; int trailing_space = 0; int trailing_break = 0; int break_space = 0; int space_break = 0; int preceded_by_whitespace = 0; int followed_by_whitespace = 0; int previous_space = 0; int previous_break = 0; STRING_ASSIGN(string, value, length); emitter->scalar_data.value = value; emitter->scalar_data.length = length; if (string.start == string.end) { emitter->scalar_data.multiline = 0; emitter->scalar_data.flow_plain_allowed = 0; emitter->scalar_data.block_plain_allowed = 1; emitter->scalar_data.single_quoted_allowed = 1; emitter->scalar_data.block_allowed = 0; return 1; } if ((CHECK_AT(string, '-', 0) && CHECK_AT(string, '-', 1) && CHECK_AT(string, '-', 2)) || (CHECK_AT(string, '.', 0) && CHECK_AT(string, '.', 1) && CHECK_AT(string, '.', 2))) { block_indicators = 1; flow_indicators = 1; } preceded_by_whitespace = 1; followed_by_whitespace = IS_BLANKZ_AT(string, WIDTH(string)); while (string.pointer != string.end) { if (string.start == string.pointer) { if (CHECK(string, '#') || CHECK(string, ',') || CHECK(string, '[') || CHECK(string, ']') || CHECK(string, '{') || CHECK(string, '}') || CHECK(string, '&') || CHECK(string, '*') || CHECK(string, '!') || CHECK(string, '|') || CHECK(string, '>') || CHECK(string, '\'') || CHECK(string, '"') || CHECK(string, '%') || CHECK(string, '@') || CHECK(string, '`')) { flow_indicators = 1; block_indicators = 1; } if (CHECK(string, '?') || CHECK(string, ':')) { flow_indicators = 1; if (followed_by_whitespace) { block_indicators = 1; } } if (CHECK(string, '-') && followed_by_whitespace) { flow_indicators = 1; block_indicators = 1; } } else { if (CHECK(string, ',') || CHECK(string, '?') || CHECK(string, '[') || CHECK(string, ']') || CHECK(string, '{') || CHECK(string, '}')) { flow_indicators = 1; } if (CHECK(string, ':')) { flow_indicators = 1; if (followed_by_whitespace) { block_indicators = 1; } } if (CHECK(string, '#') && preceded_by_whitespace) { flow_indicators = 1; block_indicators = 1; } } if (!IS_PRINTABLE(string) || (!IS_ASCII(string) && !emitter->unicode)) { special_characters = 1; } if (IS_BREAK(string)) { line_breaks = 1; } if (IS_SPACE(string)) { if (string.start == string.pointer) { leading_space = 1; } if (string.pointer+WIDTH(string) == string.end) { trailing_space = 1; } if (previous_break) { break_space = 1; } previous_space = 1; previous_break = 0; } else if (IS_BREAK(string)) { if (string.start == string.pointer) { leading_break = 1; } if (string.pointer+WIDTH(string) == string.end) { trailing_break = 1; } if (previous_space) { space_break = 1; } previous_space = 0; previous_break = 1; } else { previous_space = 0; previous_break = 0; } preceded_by_whitespace = IS_BLANKZ(string); MOVE(string); if (string.pointer != string.end) { followed_by_whitespace = IS_BLANKZ_AT(string, WIDTH(string)); } } emitter->scalar_data.multiline = line_breaks; emitter->scalar_data.flow_plain_allowed = 1; emitter->scalar_data.block_plain_allowed = 1; emitter->scalar_data.single_quoted_allowed = 1; emitter->scalar_data.block_allowed = 1; if (leading_space || leading_break || trailing_space || trailing_break) { emitter->scalar_data.flow_plain_allowed = 0; emitter->scalar_data.block_plain_allowed = 0; } if (trailing_space) { emitter->scalar_data.block_allowed = 0; } if (break_space) { emitter->scalar_data.flow_plain_allowed = 0; emitter->scalar_data.block_plain_allowed = 0; emitter->scalar_data.single_quoted_allowed = 0; } if (space_break || special_characters) { emitter->scalar_data.flow_plain_allowed = 0; emitter->scalar_data.block_plain_allowed = 0; emitter->scalar_data.single_quoted_allowed = 0; emitter->scalar_data.block_allowed = 0; } if (line_breaks) { emitter->scalar_data.flow_plain_allowed = 0; emitter->scalar_data.block_plain_allowed = 0; } if (flow_indicators) { emitter->scalar_data.flow_plain_allowed = 0; } if (block_indicators) { emitter->scalar_data.block_plain_allowed = 0; } return 1; } /* * Check if the event data is valid. */ static int yaml_emitter_analyze_event(yaml_emitter_t *emitter, yaml_event_t *event) { emitter->anchor_data.anchor = NULL; emitter->anchor_data.anchor_length = 0; emitter->tag_data.handle = NULL; emitter->tag_data.handle_length = 0; emitter->tag_data.suffix = NULL; emitter->tag_data.suffix_length = 0; emitter->scalar_data.value = NULL; emitter->scalar_data.length = 0; switch (event->type) { case YAML_ALIAS_EVENT: if (!yaml_emitter_analyze_anchor(emitter, event->data.alias.anchor, 1)) return 0; return 1; case YAML_SCALAR_EVENT: if (event->data.scalar.anchor) { if (!yaml_emitter_analyze_anchor(emitter, event->data.scalar.anchor, 0)) return 0; } if (event->data.scalar.tag && (emitter->canonical || (!event->data.scalar.plain_implicit && !event->data.scalar.quoted_implicit))) { if (!yaml_emitter_analyze_tag(emitter, event->data.scalar.tag)) return 0; } if (!yaml_emitter_analyze_scalar(emitter, event->data.scalar.value, event->data.scalar.length)) return 0; return 1; case YAML_SEQUENCE_START_EVENT: if (event->data.sequence_start.anchor) { if (!yaml_emitter_analyze_anchor(emitter, event->data.sequence_start.anchor, 0)) return 0; } if (event->data.sequence_start.tag && (emitter->canonical || !event->data.sequence_start.implicit)) { if (!yaml_emitter_analyze_tag(emitter, event->data.sequence_start.tag)) return 0; } return 1; case YAML_MAPPING_START_EVENT: if (event->data.mapping_start.anchor) { if (!yaml_emitter_analyze_anchor(emitter, event->data.mapping_start.anchor, 0)) return 0; } if (event->data.mapping_start.tag && (emitter->canonical || !event->data.mapping_start.implicit)) { if (!yaml_emitter_analyze_tag(emitter, event->data.mapping_start.tag)) return 0; } return 1; default: return 1; } } /* * Write the BOM character. */ static int yaml_emitter_write_bom(yaml_emitter_t *emitter) { if (!FLUSH(emitter)) return 0; *(emitter->buffer.pointer++) = (yaml_char_t) '\xEF'; *(emitter->buffer.pointer++) = (yaml_char_t) '\xBB'; *(emitter->buffer.pointer++) = (yaml_char_t) '\xBF'; return 1; } static int yaml_emitter_write_indent(yaml_emitter_t *emitter) { int indent = (emitter->indent >= 0) ? emitter->indent : 0; if (!emitter->indention || emitter->column > indent || (emitter->column == indent && !emitter->whitespace)) { if (!PUT_BREAK(emitter)) return 0; } while (emitter->column < indent) { if (!PUT(emitter, ' ')) return 0; } emitter->whitespace = 1; emitter->indention = 1; return 1; } static int yaml_emitter_write_indicator(yaml_emitter_t *emitter, char *indicator, int need_whitespace, int is_whitespace, int is_indention) { size_t indicator_length; yaml_string_t string; indicator_length = strlen(indicator); STRING_ASSIGN(string, (yaml_char_t *)indicator, indicator_length); if (need_whitespace && !emitter->whitespace) { if (!PUT(emitter, ' ')) return 0; } while (string.pointer != string.end) { if (!WRITE(emitter, string)) return 0; } emitter->whitespace = is_whitespace; emitter->indention = (emitter->indention && is_indention); emitter->open_ended = 0; return 1; } static int yaml_emitter_write_anchor(yaml_emitter_t *emitter, yaml_char_t *value, size_t length) { yaml_string_t string; STRING_ASSIGN(string, value, length); while (string.pointer != string.end) { if (!WRITE(emitter, string)) return 0; } emitter->whitespace = 0; emitter->indention = 0; return 1; } static int yaml_emitter_write_tag_handle(yaml_emitter_t *emitter, yaml_char_t *value, size_t length) { yaml_string_t string; STRING_ASSIGN(string, value, length); if (!emitter->whitespace) { if (!PUT(emitter, ' ')) return 0; } while (string.pointer != string.end) { if (!WRITE(emitter, string)) return 0; } emitter->whitespace = 0; emitter->indention = 0; return 1; } static int yaml_emitter_write_tag_content(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int need_whitespace) { yaml_string_t string; STRING_ASSIGN(string, value, length); if (need_whitespace && !emitter->whitespace) { if (!PUT(emitter, ' ')) return 0; } while (string.pointer != string.end) { if (IS_ALPHA(string) || CHECK(string, ';') || CHECK(string, '/') || CHECK(string, '?') || CHECK(string, ':') || CHECK(string, '@') || CHECK(string, '&') || CHECK(string, '=') || CHECK(string, '+') || CHECK(string, '$') || CHECK(string, ',') || CHECK(string, '_') || CHECK(string, '.') || CHECK(string, '~') || CHECK(string, '*') || CHECK(string, '\'') || CHECK(string, '(') || CHECK(string, ')') || CHECK(string, '[') || CHECK(string, ']')) { if (!WRITE(emitter, string)) return 0; } else { int width = WIDTH(string); unsigned int value; while (width --) { value = *(string.pointer++); if (!PUT(emitter, '%')) return 0; if (!PUT(emitter, (value >> 4) + ((value >> 4) < 10 ? '0' : 'A' - 10))) return 0; if (!PUT(emitter, (value & 0x0F) + ((value & 0x0F) < 10 ? '0' : 'A' - 10))) return 0; } } } emitter->whitespace = 0; emitter->indention = 0; return 1; } static int yaml_emitter_write_plain_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int allow_breaks) { yaml_string_t string; int spaces = 0; int breaks = 0; STRING_ASSIGN(string, value, length); if (!emitter->whitespace) { if (!PUT(emitter, ' ')) return 0; } while (string.pointer != string.end) { if (IS_SPACE(string)) { if (allow_breaks && !spaces && emitter->column > emitter->best_width && !IS_SPACE_AT(string, 1)) { if (!yaml_emitter_write_indent(emitter)) return 0; MOVE(string); } else { if (!WRITE(emitter, string)) return 0; } spaces = 1; } else if (IS_BREAK(string)) { if (!breaks && CHECK(string, '\n')) { if (!PUT_BREAK(emitter)) return 0; } if (!WRITE_BREAK(emitter, string)) return 0; emitter->indention = 1; breaks = 1; } else { if (breaks) { if (!yaml_emitter_write_indent(emitter)) return 0; } if (!WRITE(emitter, string)) return 0; emitter->indention = 0; spaces = 0; breaks = 0; } } emitter->whitespace = 0; emitter->indention = 0; if (emitter->root_context) { emitter->open_ended = 1; } return 1; } static int yaml_emitter_write_single_quoted_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int allow_breaks) { yaml_string_t string; int spaces = 0; int breaks = 0; STRING_ASSIGN(string, value, length); if (!yaml_emitter_write_indicator(emitter, "'", 1, 0, 0)) return 0; while (string.pointer != string.end) { if (IS_SPACE(string)) { if (allow_breaks && !spaces && emitter->column > emitter->best_width && string.pointer != string.start && string.pointer != string.end - 1 && !IS_SPACE_AT(string, 1)) { if (!yaml_emitter_write_indent(emitter)) return 0; MOVE(string); } else { if (!WRITE(emitter, string)) return 0; } spaces = 1; } else if (IS_BREAK(string)) { if (!breaks && CHECK(string, '\n')) { if (!PUT_BREAK(emitter)) return 0; } if (!WRITE_BREAK(emitter, string)) return 0; emitter->indention = 1; breaks = 1; } else { if (breaks) { if (!yaml_emitter_write_indent(emitter)) return 0; } if (CHECK(string, '\'')) { if (!PUT(emitter, '\'')) return 0; } if (!WRITE(emitter, string)) return 0; emitter->indention = 0; spaces = 0; breaks = 0; } } if (!yaml_emitter_write_indicator(emitter, "'", 0, 0, 0)) return 0; emitter->whitespace = 0; emitter->indention = 0; return 1; } static int yaml_emitter_write_double_quoted_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length, int allow_breaks) { yaml_string_t string; int spaces = 0; STRING_ASSIGN(string, value, length); if (!yaml_emitter_write_indicator(emitter, "\"", 1, 0, 0)) return 0; while (string.pointer != string.end) { if (!IS_PRINTABLE(string) || (!emitter->unicode && !IS_ASCII(string)) || IS_BOM(string) || IS_BREAK(string) || CHECK(string, '"') || CHECK(string, '\\')) { unsigned char octet; unsigned int width; unsigned int value; int k; octet = string.pointer[0]; width = (octet & 0x80) == 0x00 ? 1 : (octet & 0xE0) == 0xC0 ? 2 : (octet & 0xF0) == 0xE0 ? 3 : (octet & 0xF8) == 0xF0 ? 4 : 0; value = (octet & 0x80) == 0x00 ? octet & 0x7F : (octet & 0xE0) == 0xC0 ? octet & 0x1F : (octet & 0xF0) == 0xE0 ? octet & 0x0F : (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; for (k = 1; k < (int)width; k ++) { octet = string.pointer[k]; value = (value << 6) + (octet & 0x3F); } string.pointer += width; if (!PUT(emitter, '\\')) return 0; switch (value) { case 0x00: if (!PUT(emitter, '0')) return 0; break; case 0x07: if (!PUT(emitter, 'a')) return 0; break; case 0x08: if (!PUT(emitter, 'b')) return 0; break; case 0x09: if (!PUT(emitter, 't')) return 0; break; case 0x0A: if (!PUT(emitter, 'n')) return 0; break; case 0x0B: if (!PUT(emitter, 'v')) return 0; break; case 0x0C: if (!PUT(emitter, 'f')) return 0; break; case 0x0D: if (!PUT(emitter, 'r')) return 0; break; case 0x1B: if (!PUT(emitter, 'e')) return 0; break; case 0x22: if (!PUT(emitter, '\"')) return 0; break; case 0x5C: if (!PUT(emitter, '\\')) return 0; break; case 0x85: if (!PUT(emitter, 'N')) return 0; break; case 0xA0: if (!PUT(emitter, '_')) return 0; break; case 0x2028: if (!PUT(emitter, 'L')) return 0; break; case 0x2029: if (!PUT(emitter, 'P')) return 0; break; default: if (value <= 0xFF) { if (!PUT(emitter, 'x')) return 0; width = 2; } else if (value <= 0xFFFF) { if (!PUT(emitter, 'u')) return 0; width = 4; } else { if (!PUT(emitter, 'U')) return 0; width = 8; } for (k = (width-1)*4; k >= 0; k -= 4) { int digit = (value >> k) & 0x0F; if (!PUT(emitter, digit + (digit < 10 ? '0' : 'A'-10))) return 0; } } spaces = 0; } else if (IS_SPACE(string)) { if (allow_breaks && !spaces && emitter->column > emitter->best_width && string.pointer != string.start && string.pointer != string.end - 1) { if (!yaml_emitter_write_indent(emitter)) return 0; if (IS_SPACE_AT(string, 1)) { if (!PUT(emitter, '\\')) return 0; } MOVE(string); } else { if (!WRITE(emitter, string)) return 0; } spaces = 1; } else { if (!WRITE(emitter, string)) return 0; spaces = 0; } } if (!yaml_emitter_write_indicator(emitter, "\"", 0, 0, 0)) return 0; emitter->whitespace = 0; emitter->indention = 0; return 1; } static int yaml_emitter_write_block_scalar_hints(yaml_emitter_t *emitter, yaml_string_t string) { char indent_hint[2]; char *chomp_hint = NULL; if (IS_SPACE(string) || IS_BREAK(string)) { indent_hint[0] = '0' + (char)emitter->best_indent; indent_hint[1] = '\0'; if (!yaml_emitter_write_indicator(emitter, indent_hint, 0, 0, 0)) return 0; } emitter->open_ended = 0; string.pointer = string.end; if (string.start == string.pointer) { chomp_hint = "-"; } else { do { string.pointer --; } while ((*string.pointer & 0xC0) == 0x80); if (!IS_BREAK(string)) { chomp_hint = "-"; } else if (string.start == string.pointer) { chomp_hint = "+"; emitter->open_ended = 1; } else { do { string.pointer --; } while ((*string.pointer & 0xC0) == 0x80); if (IS_BREAK(string)) { chomp_hint = "+"; emitter->open_ended = 1; } } } if (chomp_hint) { if (!yaml_emitter_write_indicator(emitter, chomp_hint, 0, 0, 0)) return 0; } return 1; } static int yaml_emitter_write_literal_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length) { yaml_string_t string; int breaks = 1; STRING_ASSIGN(string, value, length); if (!yaml_emitter_write_indicator(emitter, "|", 1, 0, 0)) return 0; if (!yaml_emitter_write_block_scalar_hints(emitter, string)) return 0; if (!PUT_BREAK(emitter)) return 0; emitter->indention = 1; emitter->whitespace = 1; while (string.pointer != string.end) { if (IS_BREAK(string)) { if (!WRITE_BREAK(emitter, string)) return 0; emitter->indention = 1; breaks = 1; } else { if (breaks) { if (!yaml_emitter_write_indent(emitter)) return 0; } if (!WRITE(emitter, string)) return 0; emitter->indention = 0; breaks = 0; } } return 1; } static int yaml_emitter_write_folded_scalar(yaml_emitter_t *emitter, yaml_char_t *value, size_t length) { yaml_string_t string; int breaks = 1; int leading_spaces = 1; STRING_ASSIGN(string, value, length); if (!yaml_emitter_write_indicator(emitter, ">", 1, 0, 0)) return 0; if (!yaml_emitter_write_block_scalar_hints(emitter, string)) return 0; if (!PUT_BREAK(emitter)) return 0; emitter->indention = 1; emitter->whitespace = 1; while (string.pointer != string.end) { if (IS_BREAK(string)) { if (!breaks && !leading_spaces && CHECK(string, '\n')) { int k = 0; while (IS_BREAK_AT(string, k)) { k += WIDTH_AT(string, k); } if (!IS_BLANKZ_AT(string, k)) { if (!PUT_BREAK(emitter)) return 0; } } if (!WRITE_BREAK(emitter, string)) return 0; emitter->indention = 1; breaks = 1; } else { if (breaks) { if (!yaml_emitter_write_indent(emitter)) return 0; leading_spaces = IS_BLANK(string); } if (!breaks && IS_SPACE(string) && !IS_SPACE_AT(string, 1) && emitter->column > emitter->best_width) { if (!yaml_emitter_write_indent(emitter)) return 0; MOVE(string); } else { if (!WRITE(emitter, string)) return 0; } emitter->indention = 0; breaks = 0; } } return 1; } psych-2.2.4/ext/psych/yaml/loader.c000066400000000000000000000271711305404671600171650ustar00rootroot00000000000000 #include "yaml_private.h" /* * API functions. */ YAML_DECLARE(int) yaml_parser_load(yaml_parser_t *parser, yaml_document_t *document); /* * Error handling. */ static int yaml_parser_set_composer_error(yaml_parser_t *parser, const char *problem, yaml_mark_t problem_mark); static int yaml_parser_set_composer_error_context(yaml_parser_t *parser, const char *context, yaml_mark_t context_mark, const char *problem, yaml_mark_t problem_mark); /* * Alias handling. */ static int yaml_parser_register_anchor(yaml_parser_t *parser, int index, yaml_char_t *anchor); /* * Clean up functions. */ static void yaml_parser_delete_aliases(yaml_parser_t *parser); /* * Composer functions. */ static int yaml_parser_load_document(yaml_parser_t *parser, yaml_event_t *first_event); static int yaml_parser_load_node(yaml_parser_t *parser, yaml_event_t *first_event); static int yaml_parser_load_alias(yaml_parser_t *parser, yaml_event_t *first_event); static int yaml_parser_load_scalar(yaml_parser_t *parser, yaml_event_t *first_event); static int yaml_parser_load_sequence(yaml_parser_t *parser, yaml_event_t *first_event); static int yaml_parser_load_mapping(yaml_parser_t *parser, yaml_event_t *first_event); /* * Load the next document of the stream. */ YAML_DECLARE(int) yaml_parser_load(yaml_parser_t *parser, yaml_document_t *document) { yaml_event_t event; assert(parser); /* Non-NULL parser object is expected. */ assert(document); /* Non-NULL document object is expected. */ memset(document, 0, sizeof(yaml_document_t)); if (!STACK_INIT(parser, document->nodes, INITIAL_STACK_SIZE)) goto error; if (!parser->stream_start_produced) { if (!yaml_parser_parse(parser, &event)) goto error; assert(event.type == YAML_STREAM_START_EVENT); /* STREAM-START is expected. */ } if (parser->stream_end_produced) { return 1; } if (!yaml_parser_parse(parser, &event)) goto error; if (event.type == YAML_STREAM_END_EVENT) { return 1; } if (!STACK_INIT(parser, parser->aliases, INITIAL_STACK_SIZE)) goto error; parser->document = document; if (!yaml_parser_load_document(parser, &event)) goto error; yaml_parser_delete_aliases(parser); parser->document = NULL; return 1; error: yaml_parser_delete_aliases(parser); yaml_document_delete(document); parser->document = NULL; return 0; } /* * Set composer error. */ static int yaml_parser_set_composer_error(yaml_parser_t *parser, const char *problem, yaml_mark_t problem_mark) { parser->error = YAML_COMPOSER_ERROR; parser->problem = problem; parser->problem_mark = problem_mark; return 0; } /* * Set composer error with context. */ static int yaml_parser_set_composer_error_context(yaml_parser_t *parser, const char *context, yaml_mark_t context_mark, const char *problem, yaml_mark_t problem_mark) { parser->error = YAML_COMPOSER_ERROR; parser->context = context; parser->context_mark = context_mark; parser->problem = problem; parser->problem_mark = problem_mark; return 0; } /* * Delete the stack of aliases. */ static void yaml_parser_delete_aliases(yaml_parser_t *parser) { while (!STACK_EMPTY(parser, parser->aliases)) { yaml_free(POP(parser, parser->aliases).anchor); } STACK_DEL(parser, parser->aliases); } /* * Compose a document object. */ static int yaml_parser_load_document(yaml_parser_t *parser, yaml_event_t *first_event) { yaml_event_t event; assert(first_event->type == YAML_DOCUMENT_START_EVENT); /* DOCUMENT-START is expected. */ parser->document->version_directive = first_event->data.document_start.version_directive; parser->document->tag_directives.start = first_event->data.document_start.tag_directives.start; parser->document->tag_directives.end = first_event->data.document_start.tag_directives.end; parser->document->start_implicit = first_event->data.document_start.implicit; parser->document->start_mark = first_event->start_mark; if (!yaml_parser_parse(parser, &event)) return 0; if (!yaml_parser_load_node(parser, &event)) return 0; if (!yaml_parser_parse(parser, &event)) return 0; assert(event.type == YAML_DOCUMENT_END_EVENT); /* DOCUMENT-END is expected. */ parser->document->end_implicit = event.data.document_end.implicit; parser->document->end_mark = event.end_mark; return 1; } /* * Compose a node. */ static int yaml_parser_load_node(yaml_parser_t *parser, yaml_event_t *first_event) { switch (first_event->type) { case YAML_ALIAS_EVENT: return yaml_parser_load_alias(parser, first_event); case YAML_SCALAR_EVENT: return yaml_parser_load_scalar(parser, first_event); case YAML_SEQUENCE_START_EVENT: return yaml_parser_load_sequence(parser, first_event); case YAML_MAPPING_START_EVENT: return yaml_parser_load_mapping(parser, first_event); default: assert(0); /* Could not happen. */ return 0; } return 0; } /* * Add an anchor. */ static int yaml_parser_register_anchor(yaml_parser_t *parser, int index, yaml_char_t *anchor) { yaml_alias_data_t data; yaml_alias_data_t *alias_data; if (!anchor) return 1; data.anchor = anchor; data.index = index; data.mark = parser->document->nodes.start[index-1].start_mark; for (alias_data = parser->aliases.start; alias_data != parser->aliases.top; alias_data ++) { if (strcmp((char *)alias_data->anchor, (char *)anchor) == 0) { yaml_free(anchor); return yaml_parser_set_composer_error_context(parser, "found duplicate anchor; first occurrence", alias_data->mark, "second occurrence", data.mark); } } if (!PUSH(parser, parser->aliases, data)) { yaml_free(anchor); return 0; } return 1; } /* * Compose a node corresponding to an alias. */ static int yaml_parser_load_alias(yaml_parser_t *parser, yaml_event_t *first_event) { yaml_char_t *anchor = first_event->data.alias.anchor; yaml_alias_data_t *alias_data; for (alias_data = parser->aliases.start; alias_data != parser->aliases.top; alias_data ++) { if (strcmp((char *)alias_data->anchor, (char *)anchor) == 0) { yaml_free(anchor); return alias_data->index; } } yaml_free(anchor); return yaml_parser_set_composer_error(parser, "found undefined alias", first_event->start_mark); } /* * Compose a scalar node. */ static int yaml_parser_load_scalar(yaml_parser_t *parser, yaml_event_t *first_event) { yaml_node_t node; int index; yaml_char_t *tag = first_event->data.scalar.tag; if (!STACK_LIMIT(parser, parser->document->nodes, INT_MAX-1)) goto error; if (!tag || strcmp((char *)tag, "!") == 0) { yaml_free(tag); tag = yaml_strdup((yaml_char_t *)YAML_DEFAULT_SCALAR_TAG); if (!tag) goto error; } SCALAR_NODE_INIT(node, tag, first_event->data.scalar.value, first_event->data.scalar.length, first_event->data.scalar.style, first_event->start_mark, first_event->end_mark); if (!PUSH(parser, parser->document->nodes, node)) goto error; index = parser->document->nodes.top - parser->document->nodes.start; if (!yaml_parser_register_anchor(parser, index, first_event->data.scalar.anchor)) return 0; return index; error: yaml_free(tag); yaml_free(first_event->data.scalar.anchor); yaml_free(first_event->data.scalar.value); return 0; } /* * Compose a sequence node. */ static int yaml_parser_load_sequence(yaml_parser_t *parser, yaml_event_t *first_event) { yaml_event_t event; yaml_node_t node; struct { yaml_node_item_t *start; yaml_node_item_t *end; yaml_node_item_t *top; } items = { NULL, NULL, NULL }; int index, item_index; yaml_char_t *tag = first_event->data.sequence_start.tag; if (!STACK_LIMIT(parser, parser->document->nodes, INT_MAX-1)) goto error; if (!tag || strcmp((char *)tag, "!") == 0) { yaml_free(tag); tag = yaml_strdup((yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG); if (!tag) goto error; } if (!STACK_INIT(parser, items, INITIAL_STACK_SIZE)) goto error; SEQUENCE_NODE_INIT(node, tag, items.start, items.end, first_event->data.sequence_start.style, first_event->start_mark, first_event->end_mark); if (!PUSH(parser, parser->document->nodes, node)) goto error; index = parser->document->nodes.top - parser->document->nodes.start; if (!yaml_parser_register_anchor(parser, index, first_event->data.sequence_start.anchor)) return 0; if (!yaml_parser_parse(parser, &event)) return 0; while (event.type != YAML_SEQUENCE_END_EVENT) { if (!STACK_LIMIT(parser, parser->document->nodes.start[index-1].data.sequence.items, INT_MAX-1)) return 0; item_index = yaml_parser_load_node(parser, &event); if (!item_index) return 0; if (!PUSH(parser, parser->document->nodes.start[index-1].data.sequence.items, item_index)) return 0; if (!yaml_parser_parse(parser, &event)) return 0; } parser->document->nodes.start[index-1].end_mark = event.end_mark; return index; error: yaml_free(tag); yaml_free(first_event->data.sequence_start.anchor); return 0; } /* * Compose a mapping node. */ static int yaml_parser_load_mapping(yaml_parser_t *parser, yaml_event_t *first_event) { yaml_event_t event; yaml_node_t node; struct { yaml_node_pair_t *start; yaml_node_pair_t *end; yaml_node_pair_t *top; } pairs = { NULL, NULL, NULL }; int index; yaml_node_pair_t pair; yaml_char_t *tag = first_event->data.mapping_start.tag; if (!STACK_LIMIT(parser, parser->document->nodes, INT_MAX-1)) goto error; if (!tag || strcmp((char *)tag, "!") == 0) { yaml_free(tag); tag = yaml_strdup((yaml_char_t *)YAML_DEFAULT_MAPPING_TAG); if (!tag) goto error; } if (!STACK_INIT(parser, pairs, INITIAL_STACK_SIZE)) goto error; MAPPING_NODE_INIT(node, tag, pairs.start, pairs.end, first_event->data.mapping_start.style, first_event->start_mark, first_event->end_mark); if (!PUSH(parser, parser->document->nodes, node)) goto error; index = parser->document->nodes.top - parser->document->nodes.start; if (!yaml_parser_register_anchor(parser, index, first_event->data.mapping_start.anchor)) return 0; if (!yaml_parser_parse(parser, &event)) return 0; while (event.type != YAML_MAPPING_END_EVENT) { if (!STACK_LIMIT(parser, parser->document->nodes.start[index-1].data.mapping.pairs, INT_MAX-1)) return 0; pair.key = yaml_parser_load_node(parser, &event); if (!pair.key) return 0; if (!yaml_parser_parse(parser, &event)) return 0; pair.value = yaml_parser_load_node(parser, &event); if (!pair.value) return 0; if (!PUSH(parser, parser->document->nodes.start[index-1].data.mapping.pairs, pair)) return 0; if (!yaml_parser_parse(parser, &event)) return 0; } parser->document->nodes.start[index-1].end_mark = event.end_mark; return index; error: yaml_free(tag); yaml_free(first_event->data.mapping_start.anchor); return 0; } psych-2.2.4/ext/psych/yaml/parser.c000066400000000000000000001302071305404671600172060ustar00rootroot00000000000000 /* * The parser implements the following grammar: * * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END * implicit_document ::= block_node DOCUMENT-END* * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* * block_node_or_indentless_sequence ::= * ALIAS * | properties (block_content | indentless_block_sequence)? * | block_content * | indentless_block_sequence * block_node ::= ALIAS * | properties block_content? * | block_content * flow_node ::= ALIAS * | properties flow_content? * | flow_content * properties ::= TAG ANCHOR? | ANCHOR TAG? * block_content ::= block_collection | flow_collection | SCALAR * flow_content ::= flow_collection | SCALAR * block_collection ::= block_sequence | block_mapping * flow_collection ::= flow_sequence | flow_mapping * block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END * indentless_sequence ::= (BLOCK-ENTRY block_node?)+ * block_mapping ::= BLOCK-MAPPING_START * ((KEY block_node_or_indentless_sequence?)? * (VALUE block_node_or_indentless_sequence?)?)* * BLOCK-END * flow_sequence ::= FLOW-SEQUENCE-START * (flow_sequence_entry FLOW-ENTRY)* * flow_sequence_entry? * FLOW-SEQUENCE-END * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * flow_mapping ::= FLOW-MAPPING-START * (flow_mapping_entry FLOW-ENTRY)* * flow_mapping_entry? * FLOW-MAPPING-END * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? */ #include "yaml_private.h" /* * Peek the next token in the token queue. */ #define PEEK_TOKEN(parser) \ ((parser->token_available || yaml_parser_fetch_more_tokens(parser)) ? \ parser->tokens.head : NULL) /* * Remove the next token from the queue (must be called after PEEK_TOKEN). */ #define SKIP_TOKEN(parser) \ (parser->token_available = 0, \ parser->tokens_parsed ++, \ parser->stream_end_produced = \ (parser->tokens.head->type == YAML_STREAM_END_TOKEN), \ parser->tokens.head ++) /* * Public API declarations. */ YAML_DECLARE(int) yaml_parser_parse(yaml_parser_t *parser, yaml_event_t *event); /* * Error handling. */ static int yaml_parser_set_parser_error(yaml_parser_t *parser, const char *problem, yaml_mark_t problem_mark); static int yaml_parser_set_parser_error_context(yaml_parser_t *parser, const char *context, yaml_mark_t context_mark, const char *problem, yaml_mark_t problem_mark); /* * State functions. */ static int yaml_parser_state_machine(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_stream_start(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_document_start(yaml_parser_t *parser, yaml_event_t *event, int implicit); static int yaml_parser_parse_document_content(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_document_end(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_node(yaml_parser_t *parser, yaml_event_t *event, int block, int indentless_sequence); static int yaml_parser_parse_block_sequence_entry(yaml_parser_t *parser, yaml_event_t *event, int first); static int yaml_parser_parse_indentless_sequence_entry(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_block_mapping_key(yaml_parser_t *parser, yaml_event_t *event, int first); static int yaml_parser_parse_block_mapping_value(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_flow_sequence_entry(yaml_parser_t *parser, yaml_event_t *event, int first); static int yaml_parser_parse_flow_sequence_entry_mapping_key(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_flow_sequence_entry_mapping_value(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_flow_sequence_entry_mapping_end(yaml_parser_t *parser, yaml_event_t *event); static int yaml_parser_parse_flow_mapping_key(yaml_parser_t *parser, yaml_event_t *event, int first); static int yaml_parser_parse_flow_mapping_value(yaml_parser_t *parser, yaml_event_t *event, int empty); /* * Utility functions. */ static int yaml_parser_process_empty_scalar(yaml_parser_t *parser, yaml_event_t *event, yaml_mark_t mark); static int yaml_parser_process_directives(yaml_parser_t *parser, yaml_version_directive_t **version_directive_ref, yaml_tag_directive_t **tag_directives_start_ref, yaml_tag_directive_t **tag_directives_end_ref); static int yaml_parser_append_tag_directive(yaml_parser_t *parser, yaml_tag_directive_t value, int allow_duplicates, yaml_mark_t mark); /* * Get the next event. */ YAML_DECLARE(int) yaml_parser_parse(yaml_parser_t *parser, yaml_event_t *event) { assert(parser); /* Non-NULL parser object is expected. */ assert(event); /* Non-NULL event object is expected. */ /* Erase the event object. */ memset(event, 0, sizeof(yaml_event_t)); /* No events after the end of the stream or error. */ if (parser->stream_end_produced || parser->error || parser->state == YAML_PARSE_END_STATE) { return 1; } /* Generate the next event. */ return yaml_parser_state_machine(parser, event); } /* * Set parser error. */ static int yaml_parser_set_parser_error(yaml_parser_t *parser, const char *problem, yaml_mark_t problem_mark) { parser->error = YAML_PARSER_ERROR; parser->problem = problem; parser->problem_mark = problem_mark; return 0; } static int yaml_parser_set_parser_error_context(yaml_parser_t *parser, const char *context, yaml_mark_t context_mark, const char *problem, yaml_mark_t problem_mark) { parser->error = YAML_PARSER_ERROR; parser->context = context; parser->context_mark = context_mark; parser->problem = problem; parser->problem_mark = problem_mark; return 0; } /* * State dispatcher. */ static int yaml_parser_state_machine(yaml_parser_t *parser, yaml_event_t *event) { switch (parser->state) { case YAML_PARSE_STREAM_START_STATE: return yaml_parser_parse_stream_start(parser, event); case YAML_PARSE_IMPLICIT_DOCUMENT_START_STATE: return yaml_parser_parse_document_start(parser, event, 1); case YAML_PARSE_DOCUMENT_START_STATE: return yaml_parser_parse_document_start(parser, event, 0); case YAML_PARSE_DOCUMENT_CONTENT_STATE: return yaml_parser_parse_document_content(parser, event); case YAML_PARSE_DOCUMENT_END_STATE: return yaml_parser_parse_document_end(parser, event); case YAML_PARSE_BLOCK_NODE_STATE: return yaml_parser_parse_node(parser, event, 1, 0); case YAML_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: return yaml_parser_parse_node(parser, event, 1, 1); case YAML_PARSE_FLOW_NODE_STATE: return yaml_parser_parse_node(parser, event, 0, 0); case YAML_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: return yaml_parser_parse_block_sequence_entry(parser, event, 1); case YAML_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: return yaml_parser_parse_block_sequence_entry(parser, event, 0); case YAML_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: return yaml_parser_parse_indentless_sequence_entry(parser, event); case YAML_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: return yaml_parser_parse_block_mapping_key(parser, event, 1); case YAML_PARSE_BLOCK_MAPPING_KEY_STATE: return yaml_parser_parse_block_mapping_key(parser, event, 0); case YAML_PARSE_BLOCK_MAPPING_VALUE_STATE: return yaml_parser_parse_block_mapping_value(parser, event); case YAML_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: return yaml_parser_parse_flow_sequence_entry(parser, event, 1); case YAML_PARSE_FLOW_SEQUENCE_ENTRY_STATE: return yaml_parser_parse_flow_sequence_entry(parser, event, 0); case YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event); case YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event); case YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event); case YAML_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: return yaml_parser_parse_flow_mapping_key(parser, event, 1); case YAML_PARSE_FLOW_MAPPING_KEY_STATE: return yaml_parser_parse_flow_mapping_key(parser, event, 0); case YAML_PARSE_FLOW_MAPPING_VALUE_STATE: return yaml_parser_parse_flow_mapping_value(parser, event, 0); case YAML_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: return yaml_parser_parse_flow_mapping_value(parser, event, 1); default: assert(1); /* Invalid state. */ } return 0; } /* * Parse the production: * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END * ************ */ static int yaml_parser_parse_stream_start(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_STREAM_START_TOKEN) { return yaml_parser_set_parser_error(parser, "did not find expected ", token->start_mark); } parser->state = YAML_PARSE_IMPLICIT_DOCUMENT_START_STATE; STREAM_START_EVENT_INIT(*event, token->data.stream_start.encoding, token->start_mark, token->start_mark); SKIP_TOKEN(parser); return 1; } /* * Parse the productions: * implicit_document ::= block_node DOCUMENT-END* * * * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* * ************************* */ static int yaml_parser_parse_document_start(yaml_parser_t *parser, yaml_event_t *event, int implicit) { yaml_token_t *token; yaml_version_directive_t *version_directive = NULL; struct { yaml_tag_directive_t *start; yaml_tag_directive_t *end; } tag_directives = { NULL, NULL }; token = PEEK_TOKEN(parser); if (!token) return 0; /* Parse extra document end indicators. */ if (!implicit) { while (token->type == YAML_DOCUMENT_END_TOKEN) { SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; } } /* Parse an implicit document. */ if (implicit && token->type != YAML_VERSION_DIRECTIVE_TOKEN && token->type != YAML_TAG_DIRECTIVE_TOKEN && token->type != YAML_DOCUMENT_START_TOKEN && token->type != YAML_STREAM_END_TOKEN) { if (!yaml_parser_process_directives(parser, NULL, NULL, NULL)) return 0; if (!PUSH(parser, parser->states, YAML_PARSE_DOCUMENT_END_STATE)) return 0; parser->state = YAML_PARSE_BLOCK_NODE_STATE; DOCUMENT_START_EVENT_INIT(*event, NULL, NULL, NULL, 1, token->start_mark, token->start_mark); return 1; } /* Parse an explicit document. */ else if (token->type != YAML_STREAM_END_TOKEN) { yaml_mark_t start_mark, end_mark; start_mark = token->start_mark; if (!yaml_parser_process_directives(parser, &version_directive, &tag_directives.start, &tag_directives.end)) return 0; token = PEEK_TOKEN(parser); if (!token) goto error; if (token->type != YAML_DOCUMENT_START_TOKEN) { yaml_parser_set_parser_error(parser, "did not find expected ", token->start_mark); goto error; } if (!PUSH(parser, parser->states, YAML_PARSE_DOCUMENT_END_STATE)) goto error; parser->state = YAML_PARSE_DOCUMENT_CONTENT_STATE; end_mark = token->end_mark; DOCUMENT_START_EVENT_INIT(*event, version_directive, tag_directives.start, tag_directives.end, 0, start_mark, end_mark); SKIP_TOKEN(parser); version_directive = NULL; tag_directives.start = tag_directives.end = NULL; return 1; } /* Parse the stream end. */ else { parser->state = YAML_PARSE_END_STATE; STREAM_END_EVENT_INIT(*event, token->start_mark, token->end_mark); SKIP_TOKEN(parser); return 1; } error: yaml_free(version_directive); while (tag_directives.start != tag_directives.end) { yaml_free(tag_directives.end[-1].handle); yaml_free(tag_directives.end[-1].prefix); tag_directives.end --; } yaml_free(tag_directives.start); return 0; } /* * Parse the productions: * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* * *********** */ static int yaml_parser_parse_document_content(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type == YAML_VERSION_DIRECTIVE_TOKEN || token->type == YAML_TAG_DIRECTIVE_TOKEN || token->type == YAML_DOCUMENT_START_TOKEN || token->type == YAML_DOCUMENT_END_TOKEN || token->type == YAML_STREAM_END_TOKEN) { parser->state = POP(parser, parser->states); return yaml_parser_process_empty_scalar(parser, event, token->start_mark); } else { return yaml_parser_parse_node(parser, event, 1, 0); } } /* * Parse the productions: * implicit_document ::= block_node DOCUMENT-END* * ************* * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* * ************* */ static int yaml_parser_parse_document_end(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; yaml_mark_t start_mark, end_mark; int implicit = 1; token = PEEK_TOKEN(parser); if (!token) return 0; start_mark = end_mark = token->start_mark; if (token->type == YAML_DOCUMENT_END_TOKEN) { end_mark = token->end_mark; SKIP_TOKEN(parser); implicit = 0; } while (!STACK_EMPTY(parser, parser->tag_directives)) { yaml_tag_directive_t tag_directive = POP(parser, parser->tag_directives); yaml_free(tag_directive.handle); yaml_free(tag_directive.prefix); } parser->state = YAML_PARSE_DOCUMENT_START_STATE; DOCUMENT_END_EVENT_INIT(*event, implicit, start_mark, end_mark); return 1; } /* * Parse the productions: * block_node_or_indentless_sequence ::= * ALIAS * ***** * | properties (block_content | indentless_block_sequence)? * ********** * * | block_content | indentless_block_sequence * * * block_node ::= ALIAS * ***** * | properties block_content? * ********** * * | block_content * * * flow_node ::= ALIAS * ***** * | properties flow_content? * ********** * * | flow_content * * * properties ::= TAG ANCHOR? | ANCHOR TAG? * ************************* * block_content ::= block_collection | flow_collection | SCALAR * ****** * flow_content ::= flow_collection | SCALAR * ****** */ static int yaml_parser_parse_node(yaml_parser_t *parser, yaml_event_t *event, int block, int indentless_sequence) { yaml_token_t *token; yaml_char_t *anchor = NULL; yaml_char_t *tag_handle = NULL; yaml_char_t *tag_suffix = NULL; yaml_char_t *tag = NULL; yaml_mark_t start_mark, end_mark, tag_mark; int implicit; token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type == YAML_ALIAS_TOKEN) { parser->state = POP(parser, parser->states); ALIAS_EVENT_INIT(*event, token->data.alias.value, token->start_mark, token->end_mark); SKIP_TOKEN(parser); return 1; } else { start_mark = end_mark = token->start_mark; if (token->type == YAML_ANCHOR_TOKEN) { anchor = token->data.anchor.value; start_mark = token->start_mark; end_mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) goto error; if (token->type == YAML_TAG_TOKEN) { tag_handle = token->data.tag.handle; tag_suffix = token->data.tag.suffix; tag_mark = token->start_mark; end_mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) goto error; } } else if (token->type == YAML_TAG_TOKEN) { tag_handle = token->data.tag.handle; tag_suffix = token->data.tag.suffix; start_mark = tag_mark = token->start_mark; end_mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) goto error; if (token->type == YAML_ANCHOR_TOKEN) { anchor = token->data.anchor.value; end_mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) goto error; } } if (tag_handle) { if (!*tag_handle) { tag = tag_suffix; yaml_free(tag_handle); tag_handle = tag_suffix = NULL; } else { yaml_tag_directive_t *tag_directive; for (tag_directive = parser->tag_directives.start; tag_directive != parser->tag_directives.top; tag_directive ++) { if (strcmp((char *)tag_directive->handle, (char *)tag_handle) == 0) { size_t prefix_len = strlen((char *)tag_directive->prefix); size_t suffix_len = strlen((char *)tag_suffix); tag = yaml_malloc(prefix_len+suffix_len+1); if (!tag) { parser->error = YAML_MEMORY_ERROR; goto error; } memcpy(tag, tag_directive->prefix, prefix_len); memcpy(tag+prefix_len, tag_suffix, suffix_len); tag[prefix_len+suffix_len] = '\0'; yaml_free(tag_handle); yaml_free(tag_suffix); tag_handle = tag_suffix = NULL; break; } } if (!tag) { yaml_parser_set_parser_error_context(parser, "while parsing a node", start_mark, "found undefined tag handle", tag_mark); goto error; } } } implicit = (!tag || !*tag); if (indentless_sequence && token->type == YAML_BLOCK_ENTRY_TOKEN) { end_mark = token->end_mark; parser->state = YAML_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE; SEQUENCE_START_EVENT_INIT(*event, anchor, tag, implicit, YAML_BLOCK_SEQUENCE_STYLE, start_mark, end_mark); return 1; } else { if (token->type == YAML_SCALAR_TOKEN) { int plain_implicit = 0; int quoted_implicit = 0; end_mark = token->end_mark; if ((token->data.scalar.style == YAML_PLAIN_SCALAR_STYLE && !tag) || (tag && strcmp((char *)tag, "!") == 0)) { plain_implicit = 1; } else if (!tag) { quoted_implicit = 1; } parser->state = POP(parser, parser->states); SCALAR_EVENT_INIT(*event, anchor, tag, token->data.scalar.value, token->data.scalar.length, plain_implicit, quoted_implicit, token->data.scalar.style, start_mark, end_mark); SKIP_TOKEN(parser); return 1; } else if (token->type == YAML_FLOW_SEQUENCE_START_TOKEN) { end_mark = token->end_mark; parser->state = YAML_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE; SEQUENCE_START_EVENT_INIT(*event, anchor, tag, implicit, YAML_FLOW_SEQUENCE_STYLE, start_mark, end_mark); return 1; } else if (token->type == YAML_FLOW_MAPPING_START_TOKEN) { end_mark = token->end_mark; parser->state = YAML_PARSE_FLOW_MAPPING_FIRST_KEY_STATE; MAPPING_START_EVENT_INIT(*event, anchor, tag, implicit, YAML_FLOW_MAPPING_STYLE, start_mark, end_mark); return 1; } else if (block && token->type == YAML_BLOCK_SEQUENCE_START_TOKEN) { end_mark = token->end_mark; parser->state = YAML_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE; SEQUENCE_START_EVENT_INIT(*event, anchor, tag, implicit, YAML_BLOCK_SEQUENCE_STYLE, start_mark, end_mark); return 1; } else if (block && token->type == YAML_BLOCK_MAPPING_START_TOKEN) { end_mark = token->end_mark; parser->state = YAML_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE; MAPPING_START_EVENT_INIT(*event, anchor, tag, implicit, YAML_BLOCK_MAPPING_STYLE, start_mark, end_mark); return 1; } else if (anchor || tag) { yaml_char_t *value = yaml_malloc(1); if (!value) { parser->error = YAML_MEMORY_ERROR; goto error; } value[0] = '\0'; parser->state = POP(parser, parser->states); SCALAR_EVENT_INIT(*event, anchor, tag, value, 0, implicit, 0, YAML_PLAIN_SCALAR_STYLE, start_mark, end_mark); return 1; } else { yaml_parser_set_parser_error_context(parser, (block ? "while parsing a block node" : "while parsing a flow node"), start_mark, "did not find expected node content", token->start_mark); goto error; } } } error: yaml_free(anchor); yaml_free(tag_handle); yaml_free(tag_suffix); yaml_free(tag); return 0; } /* * Parse the productions: * block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END * ******************** *********** * ********* */ static int yaml_parser_parse_block_sequence_entry(yaml_parser_t *parser, yaml_event_t *event, int first) { yaml_token_t *token; if (first) { token = PEEK_TOKEN(parser); if (!PUSH(parser, parser->marks, token->start_mark)) return 0; SKIP_TOKEN(parser); } token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type == YAML_BLOCK_ENTRY_TOKEN) { yaml_mark_t mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_BLOCK_ENTRY_TOKEN && token->type != YAML_BLOCK_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_BLOCK_SEQUENCE_ENTRY_STATE)) return 0; return yaml_parser_parse_node(parser, event, 1, 0); } else { parser->state = YAML_PARSE_BLOCK_SEQUENCE_ENTRY_STATE; return yaml_parser_process_empty_scalar(parser, event, mark); } } else if (token->type == YAML_BLOCK_END_TOKEN) { yaml_mark_t dummy_mark; /* Used to eliminate a compiler warning. */ parser->state = POP(parser, parser->states); dummy_mark = POP(parser, parser->marks); SEQUENCE_END_EVENT_INIT(*event, token->start_mark, token->end_mark); SKIP_TOKEN(parser); return 1; } else { return yaml_parser_set_parser_error_context(parser, "while parsing a block collection", POP(parser, parser->marks), "did not find expected '-' indicator", token->start_mark); } } /* * Parse the productions: * indentless_sequence ::= (BLOCK-ENTRY block_node?)+ * *********** * */ static int yaml_parser_parse_indentless_sequence_entry(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type == YAML_BLOCK_ENTRY_TOKEN) { yaml_mark_t mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_BLOCK_ENTRY_TOKEN && token->type != YAML_KEY_TOKEN && token->type != YAML_VALUE_TOKEN && token->type != YAML_BLOCK_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE)) return 0; return yaml_parser_parse_node(parser, event, 1, 0); } else { parser->state = YAML_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE; return yaml_parser_process_empty_scalar(parser, event, mark); } } else { parser->state = POP(parser, parser->states); SEQUENCE_END_EVENT_INIT(*event, token->start_mark, token->start_mark); return 1; } } /* * Parse the productions: * block_mapping ::= BLOCK-MAPPING_START * ******************* * ((KEY block_node_or_indentless_sequence?)? * *** * * (VALUE block_node_or_indentless_sequence?)?)* * * BLOCK-END * ********* */ static int yaml_parser_parse_block_mapping_key(yaml_parser_t *parser, yaml_event_t *event, int first) { yaml_token_t *token; if (first) { token = PEEK_TOKEN(parser); if (!PUSH(parser, parser->marks, token->start_mark)) return 0; SKIP_TOKEN(parser); } token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type == YAML_KEY_TOKEN) { yaml_mark_t mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_KEY_TOKEN && token->type != YAML_VALUE_TOKEN && token->type != YAML_BLOCK_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_BLOCK_MAPPING_VALUE_STATE)) return 0; return yaml_parser_parse_node(parser, event, 1, 1); } else { parser->state = YAML_PARSE_BLOCK_MAPPING_VALUE_STATE; return yaml_parser_process_empty_scalar(parser, event, mark); } } else if (token->type == YAML_BLOCK_END_TOKEN) { yaml_mark_t dummy_mark; /* Used to eliminate a compiler warning. */ parser->state = POP(parser, parser->states); dummy_mark = POP(parser, parser->marks); MAPPING_END_EVENT_INIT(*event, token->start_mark, token->end_mark); SKIP_TOKEN(parser); return 1; } else { return yaml_parser_set_parser_error_context(parser, "while parsing a block mapping", POP(parser, parser->marks), "did not find expected key", token->start_mark); } } /* * Parse the productions: * block_mapping ::= BLOCK-MAPPING_START * * ((KEY block_node_or_indentless_sequence?)? * * (VALUE block_node_or_indentless_sequence?)?)* * ***** * * BLOCK-END * */ static int yaml_parser_parse_block_mapping_value(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type == YAML_VALUE_TOKEN) { yaml_mark_t mark = token->end_mark; SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_KEY_TOKEN && token->type != YAML_VALUE_TOKEN && token->type != YAML_BLOCK_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_BLOCK_MAPPING_KEY_STATE)) return 0; return yaml_parser_parse_node(parser, event, 1, 1); } else { parser->state = YAML_PARSE_BLOCK_MAPPING_KEY_STATE; return yaml_parser_process_empty_scalar(parser, event, mark); } } else { parser->state = YAML_PARSE_BLOCK_MAPPING_KEY_STATE; return yaml_parser_process_empty_scalar(parser, event, token->start_mark); } } /* * Parse the productions: * flow_sequence ::= FLOW-SEQUENCE-START * ******************* * (flow_sequence_entry FLOW-ENTRY)* * * ********** * flow_sequence_entry? * * * FLOW-SEQUENCE-END * ***************** * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * * */ static int yaml_parser_parse_flow_sequence_entry(yaml_parser_t *parser, yaml_event_t *event, int first) { yaml_token_t *token; yaml_mark_t dummy_mark; /* Used to eliminate a compiler warning. */ if (first) { token = PEEK_TOKEN(parser); if (!PUSH(parser, parser->marks, token->start_mark)) return 0; SKIP_TOKEN(parser); } token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_FLOW_SEQUENCE_END_TOKEN) { if (!first) { if (token->type == YAML_FLOW_ENTRY_TOKEN) { SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; } else { return yaml_parser_set_parser_error_context(parser, "while parsing a flow sequence", POP(parser, parser->marks), "did not find expected ',' or ']'", token->start_mark); } } if (token->type == YAML_KEY_TOKEN) { parser->state = YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE; MAPPING_START_EVENT_INIT(*event, NULL, NULL, 1, YAML_FLOW_MAPPING_STYLE, token->start_mark, token->end_mark); SKIP_TOKEN(parser); return 1; } else if (token->type != YAML_FLOW_SEQUENCE_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_FLOW_SEQUENCE_ENTRY_STATE)) return 0; return yaml_parser_parse_node(parser, event, 0, 0); } } parser->state = POP(parser, parser->states); dummy_mark = POP(parser, parser->marks); SEQUENCE_END_EVENT_INIT(*event, token->start_mark, token->end_mark); SKIP_TOKEN(parser); return 1; } /* * Parse the productions: * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * *** * */ static int yaml_parser_parse_flow_sequence_entry_mapping_key(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_VALUE_TOKEN && token->type != YAML_FLOW_ENTRY_TOKEN && token->type != YAML_FLOW_SEQUENCE_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE)) return 0; return yaml_parser_parse_node(parser, event, 0, 0); } else { yaml_mark_t mark = token->end_mark; SKIP_TOKEN(parser); parser->state = YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE; return yaml_parser_process_empty_scalar(parser, event, mark); } } /* * Parse the productions: * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * ***** * */ static int yaml_parser_parse_flow_sequence_entry_mapping_value(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type == YAML_VALUE_TOKEN) { SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_FLOW_ENTRY_TOKEN && token->type != YAML_FLOW_SEQUENCE_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE)) return 0; return yaml_parser_parse_node(parser, event, 0, 0); } } parser->state = YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE; return yaml_parser_process_empty_scalar(parser, event, token->start_mark); } /* * Parse the productions: * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * * */ static int yaml_parser_parse_flow_sequence_entry_mapping_end(yaml_parser_t *parser, yaml_event_t *event) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; parser->state = YAML_PARSE_FLOW_SEQUENCE_ENTRY_STATE; MAPPING_END_EVENT_INIT(*event, token->start_mark, token->start_mark); return 1; } /* * Parse the productions: * flow_mapping ::= FLOW-MAPPING-START * ****************** * (flow_mapping_entry FLOW-ENTRY)* * * ********** * flow_mapping_entry? * ****************** * FLOW-MAPPING-END * **************** * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * * *** * */ static int yaml_parser_parse_flow_mapping_key(yaml_parser_t *parser, yaml_event_t *event, int first) { yaml_token_t *token; yaml_mark_t dummy_mark; /* Used to eliminate a compiler warning. */ if (first) { token = PEEK_TOKEN(parser); if (!PUSH(parser, parser->marks, token->start_mark)) return 0; SKIP_TOKEN(parser); } token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_FLOW_MAPPING_END_TOKEN) { if (!first) { if (token->type == YAML_FLOW_ENTRY_TOKEN) { SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; } else { return yaml_parser_set_parser_error_context(parser, "while parsing a flow mapping", POP(parser, parser->marks), "did not find expected ',' or '}'", token->start_mark); } } if (token->type == YAML_KEY_TOKEN) { SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_VALUE_TOKEN && token->type != YAML_FLOW_ENTRY_TOKEN && token->type != YAML_FLOW_MAPPING_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_FLOW_MAPPING_VALUE_STATE)) return 0; return yaml_parser_parse_node(parser, event, 0, 0); } else { parser->state = YAML_PARSE_FLOW_MAPPING_VALUE_STATE; return yaml_parser_process_empty_scalar(parser, event, token->start_mark); } } else if (token->type != YAML_FLOW_MAPPING_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE)) return 0; return yaml_parser_parse_node(parser, event, 0, 0); } } parser->state = POP(parser, parser->states); dummy_mark = POP(parser, parser->marks); MAPPING_END_EVENT_INIT(*event, token->start_mark, token->end_mark); SKIP_TOKEN(parser); return 1; } /* * Parse the productions: * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * * ***** * */ static int yaml_parser_parse_flow_mapping_value(yaml_parser_t *parser, yaml_event_t *event, int empty) { yaml_token_t *token; token = PEEK_TOKEN(parser); if (!token) return 0; if (empty) { parser->state = YAML_PARSE_FLOW_MAPPING_KEY_STATE; return yaml_parser_process_empty_scalar(parser, event, token->start_mark); } if (token->type == YAML_VALUE_TOKEN) { SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) return 0; if (token->type != YAML_FLOW_ENTRY_TOKEN && token->type != YAML_FLOW_MAPPING_END_TOKEN) { if (!PUSH(parser, parser->states, YAML_PARSE_FLOW_MAPPING_KEY_STATE)) return 0; return yaml_parser_parse_node(parser, event, 0, 0); } } parser->state = YAML_PARSE_FLOW_MAPPING_KEY_STATE; return yaml_parser_process_empty_scalar(parser, event, token->start_mark); } /* * Generate an empty scalar event. */ static int yaml_parser_process_empty_scalar(yaml_parser_t *parser, yaml_event_t *event, yaml_mark_t mark) { yaml_char_t *value; value = yaml_malloc(1); if (!value) { parser->error = YAML_MEMORY_ERROR; return 0; } value[0] = '\0'; SCALAR_EVENT_INIT(*event, NULL, NULL, value, 0, 1, 0, YAML_PLAIN_SCALAR_STYLE, mark, mark); return 1; } /* * Parse directives. */ static int yaml_parser_process_directives(yaml_parser_t *parser, yaml_version_directive_t **version_directive_ref, yaml_tag_directive_t **tag_directives_start_ref, yaml_tag_directive_t **tag_directives_end_ref) { yaml_tag_directive_t default_tag_directives[] = { {(yaml_char_t *)"!", (yaml_char_t *)"!"}, {(yaml_char_t *)"!!", (yaml_char_t *)"tag:yaml.org,2002:"}, {NULL, NULL} }; yaml_tag_directive_t *default_tag_directive; yaml_version_directive_t *version_directive = NULL; struct { yaml_tag_directive_t *start; yaml_tag_directive_t *end; yaml_tag_directive_t *top; } tag_directives = { NULL, NULL, NULL }; yaml_token_t *token; if (!STACK_INIT(parser, tag_directives, INITIAL_STACK_SIZE)) goto error; token = PEEK_TOKEN(parser); if (!token) goto error; while (token->type == YAML_VERSION_DIRECTIVE_TOKEN || token->type == YAML_TAG_DIRECTIVE_TOKEN) { if (token->type == YAML_VERSION_DIRECTIVE_TOKEN) { if (version_directive) { yaml_parser_set_parser_error(parser, "found duplicate %YAML directive", token->start_mark); goto error; } if (token->data.version_directive.major != 1 || token->data.version_directive.minor != 1) { yaml_parser_set_parser_error(parser, "found incompatible YAML document", token->start_mark); goto error; } version_directive = yaml_malloc(sizeof(yaml_version_directive_t)); if (!version_directive) { parser->error = YAML_MEMORY_ERROR; goto error; } version_directive->major = token->data.version_directive.major; version_directive->minor = token->data.version_directive.minor; } else if (token->type == YAML_TAG_DIRECTIVE_TOKEN) { yaml_tag_directive_t value; value.handle = token->data.tag_directive.handle; value.prefix = token->data.tag_directive.prefix; if (!yaml_parser_append_tag_directive(parser, value, 0, token->start_mark)) goto error; if (!PUSH(parser, tag_directives, value)) goto error; } SKIP_TOKEN(parser); token = PEEK_TOKEN(parser); if (!token) goto error; } for (default_tag_directive = default_tag_directives; default_tag_directive->handle; default_tag_directive++) { if (!yaml_parser_append_tag_directive(parser, *default_tag_directive, 1, token->start_mark)) goto error; } if (version_directive_ref) { *version_directive_ref = version_directive; } if (tag_directives_start_ref) { if (STACK_EMPTY(parser, tag_directives)) { *tag_directives_start_ref = *tag_directives_end_ref = NULL; STACK_DEL(parser, tag_directives); } else { *tag_directives_start_ref = tag_directives.start; *tag_directives_end_ref = tag_directives.top; } } else { STACK_DEL(parser, tag_directives); } return 1; error: yaml_free(version_directive); while (!STACK_EMPTY(parser, tag_directives)) { yaml_tag_directive_t tag_directive = POP(parser, tag_directives); yaml_free(tag_directive.handle); yaml_free(tag_directive.prefix); } STACK_DEL(parser, tag_directives); return 0; } /* * Append a tag directive to the directives stack. */ static int yaml_parser_append_tag_directive(yaml_parser_t *parser, yaml_tag_directive_t value, int allow_duplicates, yaml_mark_t mark) { yaml_tag_directive_t *tag_directive; yaml_tag_directive_t copy = { NULL, NULL }; for (tag_directive = parser->tag_directives.start; tag_directive != parser->tag_directives.top; tag_directive ++) { if (strcmp((char *)value.handle, (char *)tag_directive->handle) == 0) { if (allow_duplicates) return 1; return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark); } } copy.handle = yaml_strdup(value.handle); copy.prefix = yaml_strdup(value.prefix); if (!copy.handle || !copy.prefix) { parser->error = YAML_MEMORY_ERROR; goto error; } if (!PUSH(parser, parser->tag_directives, copy)) goto error; return 1; error: yaml_free(copy.handle); yaml_free(copy.prefix); return 0; } psych-2.2.4/ext/psych/yaml/reader.c000066400000000000000000000404401305404671600171530ustar00rootroot00000000000000 #include "yaml_private.h" /* * Declarations. */ static int yaml_parser_set_reader_error(yaml_parser_t *parser, const char *problem, size_t offset, int value); static int yaml_parser_update_raw_buffer(yaml_parser_t *parser); static int yaml_parser_determine_encoding(yaml_parser_t *parser); YAML_DECLARE(int) yaml_parser_update_buffer(yaml_parser_t *parser, size_t length); /* * Set the reader error and return 0. */ static int yaml_parser_set_reader_error(yaml_parser_t *parser, const char *problem, size_t offset, int value) { parser->error = YAML_READER_ERROR; parser->problem = problem; parser->problem_offset = offset; parser->problem_value = value; return 0; } /* * Byte order marks. */ #define BOM_UTF8 "\xef\xbb\xbf" #define BOM_UTF16LE "\xff\xfe" #define BOM_UTF16BE "\xfe\xff" /* * Determine the input stream encoding by checking the BOM symbol. If no BOM is * found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. */ static int yaml_parser_determine_encoding(yaml_parser_t *parser) { /* Ensure that we had enough bytes in the raw buffer. */ while (!parser->eof && parser->raw_buffer.last - parser->raw_buffer.pointer < 3) { if (!yaml_parser_update_raw_buffer(parser)) { return 0; } } /* Determine the encoding. */ if (parser->raw_buffer.last - parser->raw_buffer.pointer >= 2 && !memcmp(parser->raw_buffer.pointer, BOM_UTF16LE, 2)) { parser->encoding = YAML_UTF16LE_ENCODING; parser->raw_buffer.pointer += 2; parser->offset += 2; } else if (parser->raw_buffer.last - parser->raw_buffer.pointer >= 2 && !memcmp(parser->raw_buffer.pointer, BOM_UTF16BE, 2)) { parser->encoding = YAML_UTF16BE_ENCODING; parser->raw_buffer.pointer += 2; parser->offset += 2; } else if (parser->raw_buffer.last - parser->raw_buffer.pointer >= 3 && !memcmp(parser->raw_buffer.pointer, BOM_UTF8, 3)) { parser->encoding = YAML_UTF8_ENCODING; parser->raw_buffer.pointer += 3; parser->offset += 3; } else { parser->encoding = YAML_UTF8_ENCODING; } return 1; } /* * Update the raw buffer. */ static int yaml_parser_update_raw_buffer(yaml_parser_t *parser) { size_t size_read = 0; /* Return if the raw buffer is full. */ if (parser->raw_buffer.start == parser->raw_buffer.pointer && parser->raw_buffer.last == parser->raw_buffer.end) return 1; /* Return on EOF. */ if (parser->eof) return 1; /* Move the remaining bytes in the raw buffer to the beginning. */ if (parser->raw_buffer.start < parser->raw_buffer.pointer && parser->raw_buffer.pointer < parser->raw_buffer.last) { memmove(parser->raw_buffer.start, parser->raw_buffer.pointer, parser->raw_buffer.last - parser->raw_buffer.pointer); } parser->raw_buffer.last -= parser->raw_buffer.pointer - parser->raw_buffer.start; parser->raw_buffer.pointer = parser->raw_buffer.start; /* Call the read handler to fill the buffer. */ if (!parser->read_handler(parser->read_handler_data, parser->raw_buffer.last, parser->raw_buffer.end - parser->raw_buffer.last, &size_read)) { return yaml_parser_set_reader_error(parser, "input error", parser->offset, -1); } parser->raw_buffer.last += size_read; if (!size_read) { parser->eof = 1; } return 1; } /* * Ensure that the buffer contains at least `length` characters. * Return 1 on success, 0 on failure. * * The length is supposed to be significantly less that the buffer size. */ YAML_DECLARE(int) yaml_parser_update_buffer(yaml_parser_t *parser, size_t length) { int first = 1; assert(parser->read_handler); /* Read handler must be set. */ /* If the EOF flag is set and the raw buffer is empty, do nothing. */ if (parser->eof && parser->raw_buffer.pointer == parser->raw_buffer.last) return 1; /* Return if the buffer contains enough characters. */ if (parser->unread >= length) return 1; /* Determine the input encoding if it is not known yet. */ if (!parser->encoding) { if (!yaml_parser_determine_encoding(parser)) return 0; } /* Move the unread characters to the beginning of the buffer. */ if (parser->buffer.start < parser->buffer.pointer && parser->buffer.pointer < parser->buffer.last) { size_t size = parser->buffer.last - parser->buffer.pointer; memmove(parser->buffer.start, parser->buffer.pointer, size); parser->buffer.pointer = parser->buffer.start; parser->buffer.last = parser->buffer.start + size; } else if (parser->buffer.pointer == parser->buffer.last) { parser->buffer.pointer = parser->buffer.start; parser->buffer.last = parser->buffer.start; } /* Fill the buffer until it has enough characters. */ while (parser->unread < length) { /* Fill the raw buffer if necessary. */ if (!first || parser->raw_buffer.pointer == parser->raw_buffer.last) { if (!yaml_parser_update_raw_buffer(parser)) return 0; } first = 0; /* Decode the raw buffer. */ while (parser->raw_buffer.pointer != parser->raw_buffer.last) { unsigned int value = 0, value2 = 0; int incomplete = 0; unsigned char octet; unsigned int width = 0; int low, high; size_t k; size_t raw_unread = parser->raw_buffer.last - parser->raw_buffer.pointer; /* Decode the next character. */ switch (parser->encoding) { case YAML_UTF8_ENCODING: /* * Decode a UTF-8 character. Check RFC 3629 * (http://www.ietf.org/rfc/rfc3629.txt) for more details. * * The following table (taken from the RFC) is used for * decoding. * * Char. number range | UTF-8 octet sequence * (hexadecimal) | (binary) * --------------------+------------------------------------ * 0000 0000-0000 007F | 0xxxxxxx * 0000 0080-0000 07FF | 110xxxxx 10xxxxxx * 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx * 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx * * Additionally, the characters in the range 0xD800-0xDFFF * are prohibited as they are reserved for use with UTF-16 * surrogate pairs. */ /* Determine the length of the UTF-8 sequence. */ octet = parser->raw_buffer.pointer[0]; width = (octet & 0x80) == 0x00 ? 1 : (octet & 0xE0) == 0xC0 ? 2 : (octet & 0xF0) == 0xE0 ? 3 : (octet & 0xF8) == 0xF0 ? 4 : 0; /* Check if the leading octet is valid. */ if (!width) return yaml_parser_set_reader_error(parser, "invalid leading UTF-8 octet", parser->offset, octet); /* Check if the raw buffer contains an incomplete character. */ if (width > raw_unread) { if (parser->eof) { return yaml_parser_set_reader_error(parser, "incomplete UTF-8 octet sequence", parser->offset, -1); } incomplete = 1; break; } /* Decode the leading octet. */ value = (octet & 0x80) == 0x00 ? octet & 0x7F : (octet & 0xE0) == 0xC0 ? octet & 0x1F : (octet & 0xF0) == 0xE0 ? octet & 0x0F : (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; /* Check and decode the trailing octets. */ for (k = 1; k < width; k ++) { octet = parser->raw_buffer.pointer[k]; /* Check if the octet is valid. */ if ((octet & 0xC0) != 0x80) return yaml_parser_set_reader_error(parser, "invalid trailing UTF-8 octet", parser->offset+k, octet); /* Decode the octet. */ value = (value << 6) + (octet & 0x3F); } /* Check the length of the sequence against the value. */ if (!((width == 1) || (width == 2 && value >= 0x80) || (width == 3 && value >= 0x800) || (width == 4 && value >= 0x10000))) return yaml_parser_set_reader_error(parser, "invalid length of a UTF-8 sequence", parser->offset, -1); /* Check the range of the value. */ if ((value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF) return yaml_parser_set_reader_error(parser, "invalid Unicode character", parser->offset, value); break; case YAML_UTF16LE_ENCODING: case YAML_UTF16BE_ENCODING: low = (parser->encoding == YAML_UTF16LE_ENCODING ? 0 : 1); high = (parser->encoding == YAML_UTF16LE_ENCODING ? 1 : 0); /* * The UTF-16 encoding is not as simple as one might * naively think. Check RFC 2781 * (http://www.ietf.org/rfc/rfc2781.txt). * * Normally, two subsequent bytes describe a Unicode * character. However a special technique (called a * surrogate pair) is used for specifying character * values larger than 0xFFFF. * * A surrogate pair consists of two pseudo-characters: * high surrogate area (0xD800-0xDBFF) * low surrogate area (0xDC00-0xDFFF) * * The following formulas are used for decoding * and encoding characters using surrogate pairs: * * U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) * U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) * W1 = 110110yyyyyyyyyy * W2 = 110111xxxxxxxxxx * * where U is the character value, W1 is the high surrogate * area, W2 is the low surrogate area. */ /* Check for incomplete UTF-16 character. */ if (raw_unread < 2) { if (parser->eof) { return yaml_parser_set_reader_error(parser, "incomplete UTF-16 character", parser->offset, -1); } incomplete = 1; break; } /* Get the character. */ value = parser->raw_buffer.pointer[low] + (parser->raw_buffer.pointer[high] << 8); /* Check for unexpected low surrogate area. */ if ((value & 0xFC00) == 0xDC00) return yaml_parser_set_reader_error(parser, "unexpected low surrogate area", parser->offset, value); /* Check for a high surrogate area. */ if ((value & 0xFC00) == 0xD800) { width = 4; /* Check for incomplete surrogate pair. */ if (raw_unread < 4) { if (parser->eof) { return yaml_parser_set_reader_error(parser, "incomplete UTF-16 surrogate pair", parser->offset, -1); } incomplete = 1; break; } /* Get the next character. */ value2 = parser->raw_buffer.pointer[low+2] + (parser->raw_buffer.pointer[high+2] << 8); /* Check for a low surrogate area. */ if ((value2 & 0xFC00) != 0xDC00) return yaml_parser_set_reader_error(parser, "expected low surrogate area", parser->offset+2, value2); /* Generate the value of the surrogate pair. */ value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF); } else { width = 2; } break; default: assert(1); /* Impossible. */ } /* Check if the raw buffer contains enough bytes to form a character. */ if (incomplete) break; /* * Check if the character is in the allowed range: * #x9 | #xA | #xD | [#x20-#x7E] (8 bit) * | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) * | [#x10000-#x10FFFF] (32 bit) */ if (! (value == 0x09 || value == 0x0A || value == 0x0D || (value >= 0x20 && value <= 0x7E) || (value == 0x85) || (value >= 0xA0 && value <= 0xD7FF) || (value >= 0xE000 && value <= 0xFFFD) || (value >= 0x10000 && value <= 0x10FFFF))) return yaml_parser_set_reader_error(parser, "control characters are not allowed", parser->offset, value); /* Move the raw pointers. */ parser->raw_buffer.pointer += width; parser->offset += width; /* Finally put the character into the buffer. */ /* 0000 0000-0000 007F -> 0xxxxxxx */ if (value <= 0x7F) { *(parser->buffer.last++) = value; } /* 0000 0080-0000 07FF -> 110xxxxx 10xxxxxx */ else if (value <= 0x7FF) { *(parser->buffer.last++) = 0xC0 + (value >> 6); *(parser->buffer.last++) = 0x80 + (value & 0x3F); } /* 0000 0800-0000 FFFF -> 1110xxxx 10xxxxxx 10xxxxxx */ else if (value <= 0xFFFF) { *(parser->buffer.last++) = 0xE0 + (value >> 12); *(parser->buffer.last++) = 0x80 + ((value >> 6) & 0x3F); *(parser->buffer.last++) = 0x80 + (value & 0x3F); } /* 0001 0000-0010 FFFF -> 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx */ else { *(parser->buffer.last++) = 0xF0 + (value >> 18); *(parser->buffer.last++) = 0x80 + ((value >> 12) & 0x3F); *(parser->buffer.last++) = 0x80 + ((value >> 6) & 0x3F); *(parser->buffer.last++) = 0x80 + (value & 0x3F); } parser->unread ++; } /* On EOF, put NUL into the buffer and return. */ if (parser->eof) { *(parser->buffer.last++) = '\0'; parser->unread ++; return 1; } } if (parser->offset >= PTRDIFF_MAX) return yaml_parser_set_reader_error(parser, "input is too long", PTRDIFF_MAX, -1); return 1; } psych-2.2.4/ext/psych/yaml/scanner.c000066400000000000000000002777701305404671600173640ustar00rootroot00000000000000 /* * Introduction * ************ * * The following notes assume that you are familiar with the YAML specification * (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in * some cases we are less restrictive that it requires. * * The process of transforming a YAML stream into a sequence of events is * divided on two steps: Scanning and Parsing. * * The Scanner transforms the input stream into a sequence of tokens, while the * parser transform the sequence of tokens produced by the Scanner into a * sequence of parsing events. * * The Scanner is rather clever and complicated. The Parser, on the contrary, * is a straightforward implementation of a recursive-descendant parser (or, * LL(1) parser, as it is usually called). * * Actually there are two issues of Scanning that might be called "clever", the * rest is quite straightforward. The issues are "block collection start" and * "simple keys". Both issues are explained below in details. * * Here the Scanning step is explained and implemented. We start with the list * of all the tokens produced by the Scanner together with short descriptions. * * Now, tokens: * * STREAM-START(encoding) # The stream start. * STREAM-END # The stream end. * VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. * TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. * DOCUMENT-START # '---' * DOCUMENT-END # '...' * BLOCK-SEQUENCE-START # Indentation increase denoting a block * BLOCK-MAPPING-START # sequence or a block mapping. * BLOCK-END # Indentation decrease. * FLOW-SEQUENCE-START # '[' * FLOW-SEQUENCE-END # ']' * BLOCK-SEQUENCE-START # '{' * BLOCK-SEQUENCE-END # '}' * BLOCK-ENTRY # '-' * FLOW-ENTRY # ',' * KEY # '?' or nothing (simple keys). * VALUE # ':' * ALIAS(anchor) # '*anchor' * ANCHOR(anchor) # '&anchor' * TAG(handle,suffix) # '!handle!suffix' * SCALAR(value,style) # A scalar. * * The following two tokens are "virtual" tokens denoting the beginning and the * end of the stream: * * STREAM-START(encoding) * STREAM-END * * We pass the information about the input stream encoding with the * STREAM-START token. * * The next two tokens are responsible for tags: * * VERSION-DIRECTIVE(major,minor) * TAG-DIRECTIVE(handle,prefix) * * Example: * * %YAML 1.1 * %TAG ! !foo * %TAG !yaml! tag:yaml.org,2002: * --- * * The corresponding sequence of tokens: * * STREAM-START(utf-8) * VERSION-DIRECTIVE(1,1) * TAG-DIRECTIVE("!","!foo") * TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") * DOCUMENT-START * STREAM-END * * Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole * line. * * The document start and end indicators are represented by: * * DOCUMENT-START * DOCUMENT-END * * Note that if a YAML stream contains an implicit document (without '---' * and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be * produced. * * In the following examples, we present whole documents together with the * produced tokens. * * 1. An implicit document: * * 'a scalar' * * Tokens: * * STREAM-START(utf-8) * SCALAR("a scalar",single-quoted) * STREAM-END * * 2. An explicit document: * * --- * 'a scalar' * ... * * Tokens: * * STREAM-START(utf-8) * DOCUMENT-START * SCALAR("a scalar",single-quoted) * DOCUMENT-END * STREAM-END * * 3. Several documents in a stream: * * 'a scalar' * --- * 'another scalar' * --- * 'yet another scalar' * * Tokens: * * STREAM-START(utf-8) * SCALAR("a scalar",single-quoted) * DOCUMENT-START * SCALAR("another scalar",single-quoted) * DOCUMENT-START * SCALAR("yet another scalar",single-quoted) * STREAM-END * * We have already introduced the SCALAR token above. The following tokens are * used to describe aliases, anchors, tag, and scalars: * * ALIAS(anchor) * ANCHOR(anchor) * TAG(handle,suffix) * SCALAR(value,style) * * The following series of examples illustrate the usage of these tokens: * * 1. A recursive sequence: * * &A [ *A ] * * Tokens: * * STREAM-START(utf-8) * ANCHOR("A") * FLOW-SEQUENCE-START * ALIAS("A") * FLOW-SEQUENCE-END * STREAM-END * * 2. A tagged scalar: * * !!float "3.14" # A good approximation. * * Tokens: * * STREAM-START(utf-8) * TAG("!!","float") * SCALAR("3.14",double-quoted) * STREAM-END * * 3. Various scalar styles: * * --- # Implicit empty plain scalars do not produce tokens. * --- a plain scalar * --- 'a single-quoted scalar' * --- "a double-quoted scalar" * --- |- * a literal scalar * --- >- * a folded * scalar * * Tokens: * * STREAM-START(utf-8) * DOCUMENT-START * DOCUMENT-START * SCALAR("a plain scalar",plain) * DOCUMENT-START * SCALAR("a single-quoted scalar",single-quoted) * DOCUMENT-START * SCALAR("a double-quoted scalar",double-quoted) * DOCUMENT-START * SCALAR("a literal scalar",literal) * DOCUMENT-START * SCALAR("a folded scalar",folded) * STREAM-END * * Now it's time to review collection-related tokens. We will start with * flow collections: * * FLOW-SEQUENCE-START * FLOW-SEQUENCE-END * FLOW-MAPPING-START * FLOW-MAPPING-END * FLOW-ENTRY * KEY * VALUE * * The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and * FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' * correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the * indicators '?' and ':', which are used for denoting mapping keys and values, * are represented by the KEY and VALUE tokens. * * The following examples show flow collections: * * 1. A flow sequence: * * [item 1, item 2, item 3] * * Tokens: * * STREAM-START(utf-8) * FLOW-SEQUENCE-START * SCALAR("item 1",plain) * FLOW-ENTRY * SCALAR("item 2",plain) * FLOW-ENTRY * SCALAR("item 3",plain) * FLOW-SEQUENCE-END * STREAM-END * * 2. A flow mapping: * * { * a simple key: a value, # Note that the KEY token is produced. * ? a complex key: another value, * } * * Tokens: * * STREAM-START(utf-8) * FLOW-MAPPING-START * KEY * SCALAR("a simple key",plain) * VALUE * SCALAR("a value",plain) * FLOW-ENTRY * KEY * SCALAR("a complex key",plain) * VALUE * SCALAR("another value",plain) * FLOW-ENTRY * FLOW-MAPPING-END * STREAM-END * * A simple key is a key which is not denoted by the '?' indicator. Note that * the Scanner still produce the KEY token whenever it encounters a simple key. * * For scanning block collections, the following tokens are used (note that we * repeat KEY and VALUE here): * * BLOCK-SEQUENCE-START * BLOCK-MAPPING-START * BLOCK-END * BLOCK-ENTRY * KEY * VALUE * * The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation * increase that precedes a block collection (cf. the INDENT token in Python). * The token BLOCK-END denote indentation decrease that ends a block collection * (cf. the DEDENT token in Python). However YAML has some syntax pecularities * that makes detections of these tokens more complex. * * The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators * '-', '?', and ':' correspondingly. * * The following examples show how the tokens BLOCK-SEQUENCE-START, * BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: * * 1. Block sequences: * * - item 1 * - item 2 * - * - item 3.1 * - item 3.2 * - * key 1: value 1 * key 2: value 2 * * Tokens: * * STREAM-START(utf-8) * BLOCK-SEQUENCE-START * BLOCK-ENTRY * SCALAR("item 1",plain) * BLOCK-ENTRY * SCALAR("item 2",plain) * BLOCK-ENTRY * BLOCK-SEQUENCE-START * BLOCK-ENTRY * SCALAR("item 3.1",plain) * BLOCK-ENTRY * SCALAR("item 3.2",plain) * BLOCK-END * BLOCK-ENTRY * BLOCK-MAPPING-START * KEY * SCALAR("key 1",plain) * VALUE * SCALAR("value 1",plain) * KEY * SCALAR("key 2",plain) * VALUE * SCALAR("value 2",plain) * BLOCK-END * BLOCK-END * STREAM-END * * 2. Block mappings: * * a simple key: a value # The KEY token is produced here. * ? a complex key * : another value * a mapping: * key 1: value 1 * key 2: value 2 * a sequence: * - item 1 * - item 2 * * Tokens: * * STREAM-START(utf-8) * BLOCK-MAPPING-START * KEY * SCALAR("a simple key",plain) * VALUE * SCALAR("a value",plain) * KEY * SCALAR("a complex key",plain) * VALUE * SCALAR("another value",plain) * KEY * SCALAR("a mapping",plain) * BLOCK-MAPPING-START * KEY * SCALAR("key 1",plain) * VALUE * SCALAR("value 1",plain) * KEY * SCALAR("key 2",plain) * VALUE * SCALAR("value 2",plain) * BLOCK-END * KEY * SCALAR("a sequence",plain) * VALUE * BLOCK-SEQUENCE-START * BLOCK-ENTRY * SCALAR("item 1",plain) * BLOCK-ENTRY * SCALAR("item 2",plain) * BLOCK-END * BLOCK-END * STREAM-END * * YAML does not always require to start a new block collection from a new * line. If the current line contains only '-', '?', and ':' indicators, a new * block collection may start at the current line. The following examples * illustrate this case: * * 1. Collections in a sequence: * * - - item 1 * - item 2 * - key 1: value 1 * key 2: value 2 * - ? complex key * : complex value * * Tokens: * * STREAM-START(utf-8) * BLOCK-SEQUENCE-START * BLOCK-ENTRY * BLOCK-SEQUENCE-START * BLOCK-ENTRY * SCALAR("item 1",plain) * BLOCK-ENTRY * SCALAR("item 2",plain) * BLOCK-END * BLOCK-ENTRY * BLOCK-MAPPING-START * KEY * SCALAR("key 1",plain) * VALUE * SCALAR("value 1",plain) * KEY * SCALAR("key 2",plain) * VALUE * SCALAR("value 2",plain) * BLOCK-END * BLOCK-ENTRY * BLOCK-MAPPING-START * KEY * SCALAR("complex key") * VALUE * SCALAR("complex value") * BLOCK-END * BLOCK-END * STREAM-END * * 2. Collections in a mapping: * * ? a sequence * : - item 1 * - item 2 * ? a mapping * : key 1: value 1 * key 2: value 2 * * Tokens: * * STREAM-START(utf-8) * BLOCK-MAPPING-START * KEY * SCALAR("a sequence",plain) * VALUE * BLOCK-SEQUENCE-START * BLOCK-ENTRY * SCALAR("item 1",plain) * BLOCK-ENTRY * SCALAR("item 2",plain) * BLOCK-END * KEY * SCALAR("a mapping",plain) * VALUE * BLOCK-MAPPING-START * KEY * SCALAR("key 1",plain) * VALUE * SCALAR("value 1",plain) * KEY * SCALAR("key 2",plain) * VALUE * SCALAR("value 2",plain) * BLOCK-END * BLOCK-END * STREAM-END * * YAML also permits non-indented sequences if they are included into a block * mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: * * key: * - item 1 # BLOCK-SEQUENCE-START is NOT produced here. * - item 2 * * Tokens: * * STREAM-START(utf-8) * BLOCK-MAPPING-START * KEY * SCALAR("key",plain) * VALUE * BLOCK-ENTRY * SCALAR("item 1",plain) * BLOCK-ENTRY * SCALAR("item 2",plain) * BLOCK-END */ #include "yaml_private.h" /* * Ensure that the buffer contains the required number of characters. * Return 1 on success, 0 on failure (reader error or memory error). */ #define CACHE(parser,length) \ (parser->unread >= (length) \ ? 1 \ : yaml_parser_update_buffer(parser, (length))) /* * Advance the buffer pointer. */ #define SKIP(parser) \ (parser->mark.index ++, \ parser->mark.column ++, \ parser->unread --, \ parser->buffer.pointer += WIDTH(parser->buffer)) #define SKIP_LINE(parser) \ (IS_CRLF(parser->buffer) ? \ (parser->mark.index += 2, \ parser->mark.column = 0, \ parser->mark.line ++, \ parser->unread -= 2, \ parser->buffer.pointer += 2) : \ IS_BREAK(parser->buffer) ? \ (parser->mark.index ++, \ parser->mark.column = 0, \ parser->mark.line ++, \ parser->unread --, \ parser->buffer.pointer += WIDTH(parser->buffer)) : 0) /* * Copy a character to a string buffer and advance pointers. */ #define READ(parser,string) \ (STRING_EXTEND(parser,string) ? \ (COPY(string,parser->buffer), \ parser->mark.index ++, \ parser->mark.column ++, \ parser->unread --, \ 1) : 0) /* * Copy a line break character to a string buffer and advance pointers. */ #define READ_LINE(parser,string) \ (STRING_EXTEND(parser,string) ? \ (((CHECK_AT(parser->buffer,'\r',0) \ && CHECK_AT(parser->buffer,'\n',1)) ? /* CR LF -> LF */ \ (*((string).pointer++) = (yaml_char_t) '\n', \ parser->buffer.pointer += 2, \ parser->mark.index += 2, \ parser->mark.column = 0, \ parser->mark.line ++, \ parser->unread -= 2) : \ (CHECK_AT(parser->buffer,'\r',0) \ || CHECK_AT(parser->buffer,'\n',0)) ? /* CR|LF -> LF */ \ (*((string).pointer++) = (yaml_char_t) '\n', \ parser->buffer.pointer ++, \ parser->mark.index ++, \ parser->mark.column = 0, \ parser->mark.line ++, \ parser->unread --) : \ (CHECK_AT(parser->buffer,'\xC2',0) \ && CHECK_AT(parser->buffer,'\x85',1)) ? /* NEL -> LF */ \ (*((string).pointer++) = (yaml_char_t) '\n', \ parser->buffer.pointer += 2, \ parser->mark.index ++, \ parser->mark.column = 0, \ parser->mark.line ++, \ parser->unread --) : \ (CHECK_AT(parser->buffer,'\xE2',0) && \ CHECK_AT(parser->buffer,'\x80',1) && \ (CHECK_AT(parser->buffer,'\xA8',2) || \ CHECK_AT(parser->buffer,'\xA9',2))) ? /* LS|PS -> LS|PS */ \ (*((string).pointer++) = *(parser->buffer.pointer++), \ *((string).pointer++) = *(parser->buffer.pointer++), \ *((string).pointer++) = *(parser->buffer.pointer++), \ parser->mark.index ++, \ parser->mark.column = 0, \ parser->mark.line ++, \ parser->unread --) : 0), \ 1) : 0) /* * Public API declarations. */ YAML_DECLARE(int) yaml_parser_scan(yaml_parser_t *parser, yaml_token_t *token); /* * Error handling. */ static int yaml_parser_set_scanner_error(yaml_parser_t *parser, const char *context, yaml_mark_t context_mark, const char *problem); /* * High-level token API. */ YAML_DECLARE(int) yaml_parser_fetch_more_tokens(yaml_parser_t *parser); static int yaml_parser_fetch_next_token(yaml_parser_t *parser); /* * Potential simple keys. */ static int yaml_parser_stale_simple_keys(yaml_parser_t *parser); static int yaml_parser_save_simple_key(yaml_parser_t *parser); static int yaml_parser_remove_simple_key(yaml_parser_t *parser); static int yaml_parser_increase_flow_level(yaml_parser_t *parser); static int yaml_parser_decrease_flow_level(yaml_parser_t *parser); /* * Indentation treatment. */ static int yaml_parser_roll_indent(yaml_parser_t *parser, ptrdiff_t column, ptrdiff_t number, yaml_token_type_t type, yaml_mark_t mark); static int yaml_parser_unroll_indent(yaml_parser_t *parser, ptrdiff_t column); /* * Token fetchers. */ static int yaml_parser_fetch_stream_start(yaml_parser_t *parser); static int yaml_parser_fetch_stream_end(yaml_parser_t *parser); static int yaml_parser_fetch_directive(yaml_parser_t *parser); static int yaml_parser_fetch_document_indicator(yaml_parser_t *parser, yaml_token_type_t type); static int yaml_parser_fetch_flow_collection_start(yaml_parser_t *parser, yaml_token_type_t type); static int yaml_parser_fetch_flow_collection_end(yaml_parser_t *parser, yaml_token_type_t type); static int yaml_parser_fetch_flow_entry(yaml_parser_t *parser); static int yaml_parser_fetch_block_entry(yaml_parser_t *parser); static int yaml_parser_fetch_key(yaml_parser_t *parser); static int yaml_parser_fetch_value(yaml_parser_t *parser); static int yaml_parser_fetch_anchor(yaml_parser_t *parser, yaml_token_type_t type); static int yaml_parser_fetch_tag(yaml_parser_t *parser); static int yaml_parser_fetch_block_scalar(yaml_parser_t *parser, int literal); static int yaml_parser_fetch_flow_scalar(yaml_parser_t *parser, int single); static int yaml_parser_fetch_plain_scalar(yaml_parser_t *parser); /* * Token scanners. */ static int yaml_parser_scan_to_next_token(yaml_parser_t *parser); static int yaml_parser_scan_directive(yaml_parser_t *parser, yaml_token_t *token); static int yaml_parser_scan_directive_name(yaml_parser_t *parser, yaml_mark_t start_mark, yaml_char_t **name); static int yaml_parser_scan_version_directive_value(yaml_parser_t *parser, yaml_mark_t start_mark, int *major, int *minor); static int yaml_parser_scan_version_directive_number(yaml_parser_t *parser, yaml_mark_t start_mark, int *number); static int yaml_parser_scan_tag_directive_value(yaml_parser_t *parser, yaml_mark_t mark, yaml_char_t **handle, yaml_char_t **prefix); static int yaml_parser_scan_anchor(yaml_parser_t *parser, yaml_token_t *token, yaml_token_type_t type); static int yaml_parser_scan_tag(yaml_parser_t *parser, yaml_token_t *token); static int yaml_parser_scan_tag_handle(yaml_parser_t *parser, int directive, yaml_mark_t start_mark, yaml_char_t **handle); static int yaml_parser_scan_tag_uri(yaml_parser_t *parser, int directive, yaml_char_t *head, yaml_mark_t start_mark, yaml_char_t **uri); static int yaml_parser_scan_uri_escapes(yaml_parser_t *parser, int directive, yaml_mark_t start_mark, yaml_string_t *string); static int yaml_parser_scan_block_scalar(yaml_parser_t *parser, yaml_token_t *token, int literal); static int yaml_parser_scan_block_scalar_breaks(yaml_parser_t *parser, int *indent, yaml_string_t *breaks, yaml_mark_t start_mark, yaml_mark_t *end_mark); static int yaml_parser_scan_flow_scalar(yaml_parser_t *parser, yaml_token_t *token, int single); static int yaml_parser_scan_plain_scalar(yaml_parser_t *parser, yaml_token_t *token); /* * Get the next token. */ YAML_DECLARE(int) yaml_parser_scan(yaml_parser_t *parser, yaml_token_t *token) { assert(parser); /* Non-NULL parser object is expected. */ assert(token); /* Non-NULL token object is expected. */ /* Erase the token object. */ memset(token, 0, sizeof(yaml_token_t)); /* No tokens after STREAM-END or error. */ if (parser->stream_end_produced || parser->error) { return 1; } /* Ensure that the tokens queue contains enough tokens. */ if (!parser->token_available) { if (!yaml_parser_fetch_more_tokens(parser)) return 0; } /* Fetch the next token from the queue. */ *token = DEQUEUE(parser, parser->tokens); parser->token_available = 0; parser->tokens_parsed ++; if (token->type == YAML_STREAM_END_TOKEN) { parser->stream_end_produced = 1; } return 1; } /* * Set the scanner error and return 0. */ static int yaml_parser_set_scanner_error(yaml_parser_t *parser, const char *context, yaml_mark_t context_mark, const char *problem) { parser->error = YAML_SCANNER_ERROR; parser->context = context; parser->context_mark = context_mark; parser->problem = problem; parser->problem_mark = parser->mark; return 0; } /* * Ensure that the tokens queue contains at least one token which can be * returned to the Parser. */ YAML_DECLARE(int) yaml_parser_fetch_more_tokens(yaml_parser_t *parser) { int need_more_tokens; /* While we need more tokens to fetch, do it. */ while (1) { /* * Check if we really need to fetch more tokens. */ need_more_tokens = 0; if (parser->tokens.head == parser->tokens.tail) { /* Queue is empty. */ need_more_tokens = 1; } else { yaml_simple_key_t *simple_key; /* Check if any potential simple key may occupy the head position. */ if (!yaml_parser_stale_simple_keys(parser)) return 0; for (simple_key = parser->simple_keys.start; simple_key != parser->simple_keys.top; simple_key++) { if (simple_key->possible && simple_key->token_number == parser->tokens_parsed) { need_more_tokens = 1; break; } } } /* We are finished. */ if (!need_more_tokens) break; /* Fetch the next token. */ if (!yaml_parser_fetch_next_token(parser)) return 0; } parser->token_available = 1; return 1; } /* * The dispatcher for token fetchers. */ static int yaml_parser_fetch_next_token(yaml_parser_t *parser) { /* Ensure that the buffer is initialized. */ if (!CACHE(parser, 1)) return 0; /* Check if we just started scanning. Fetch STREAM-START then. */ if (!parser->stream_start_produced) return yaml_parser_fetch_stream_start(parser); /* Eat whitespaces and comments until we reach the next token. */ if (!yaml_parser_scan_to_next_token(parser)) return 0; /* Remove obsolete potential simple keys. */ if (!yaml_parser_stale_simple_keys(parser)) return 0; /* Check the indentation level against the current column. */ if (!yaml_parser_unroll_indent(parser, parser->mark.column)) return 0; /* * Ensure that the buffer contains at least 4 characters. 4 is the length * of the longest indicators ('--- ' and '... '). */ if (!CACHE(parser, 4)) return 0; /* Is it the end of the stream? */ if (IS_Z(parser->buffer)) return yaml_parser_fetch_stream_end(parser); /* Is it a directive? */ if (parser->mark.column == 0 && CHECK(parser->buffer, '%')) return yaml_parser_fetch_directive(parser); /* Is it the document start indicator? */ if (parser->mark.column == 0 && CHECK_AT(parser->buffer, '-', 0) && CHECK_AT(parser->buffer, '-', 1) && CHECK_AT(parser->buffer, '-', 2) && IS_BLANKZ_AT(parser->buffer, 3)) return yaml_parser_fetch_document_indicator(parser, YAML_DOCUMENT_START_TOKEN); /* Is it the document end indicator? */ if (parser->mark.column == 0 && CHECK_AT(parser->buffer, '.', 0) && CHECK_AT(parser->buffer, '.', 1) && CHECK_AT(parser->buffer, '.', 2) && IS_BLANKZ_AT(parser->buffer, 3)) return yaml_parser_fetch_document_indicator(parser, YAML_DOCUMENT_END_TOKEN); /* Is it the flow sequence start indicator? */ if (CHECK(parser->buffer, '[')) return yaml_parser_fetch_flow_collection_start(parser, YAML_FLOW_SEQUENCE_START_TOKEN); /* Is it the flow mapping start indicator? */ if (CHECK(parser->buffer, '{')) return yaml_parser_fetch_flow_collection_start(parser, YAML_FLOW_MAPPING_START_TOKEN); /* Is it the flow sequence end indicator? */ if (CHECK(parser->buffer, ']')) return yaml_parser_fetch_flow_collection_end(parser, YAML_FLOW_SEQUENCE_END_TOKEN); /* Is it the flow mapping end indicator? */ if (CHECK(parser->buffer, '}')) return yaml_parser_fetch_flow_collection_end(parser, YAML_FLOW_MAPPING_END_TOKEN); /* Is it the flow entry indicator? */ if (CHECK(parser->buffer, ',')) return yaml_parser_fetch_flow_entry(parser); /* Is it the block entry indicator? */ if (CHECK(parser->buffer, '-') && IS_BLANKZ_AT(parser->buffer, 1)) return yaml_parser_fetch_block_entry(parser); /* Is it the key indicator? */ if (CHECK(parser->buffer, '?') && (parser->flow_level || IS_BLANKZ_AT(parser->buffer, 1))) return yaml_parser_fetch_key(parser); /* Is it the value indicator? */ if (CHECK(parser->buffer, ':') && (parser->flow_level || IS_BLANKZ_AT(parser->buffer, 1))) return yaml_parser_fetch_value(parser); /* Is it an alias? */ if (CHECK(parser->buffer, '*')) return yaml_parser_fetch_anchor(parser, YAML_ALIAS_TOKEN); /* Is it an anchor? */ if (CHECK(parser->buffer, '&')) return yaml_parser_fetch_anchor(parser, YAML_ANCHOR_TOKEN); /* Is it a tag? */ if (CHECK(parser->buffer, '!')) return yaml_parser_fetch_tag(parser); /* Is it a literal scalar? */ if (CHECK(parser->buffer, '|') && !parser->flow_level) return yaml_parser_fetch_block_scalar(parser, 1); /* Is it a folded scalar? */ if (CHECK(parser->buffer, '>') && !parser->flow_level) return yaml_parser_fetch_block_scalar(parser, 0); /* Is it a single-quoted scalar? */ if (CHECK(parser->buffer, '\'')) return yaml_parser_fetch_flow_scalar(parser, 1); /* Is it a double-quoted scalar? */ if (CHECK(parser->buffer, '"')) return yaml_parser_fetch_flow_scalar(parser, 0); /* * Is it a plain scalar? * * A plain scalar may start with any non-blank characters except * * '-', '?', ':', ',', '[', ']', '{', '}', * '#', '&', '*', '!', '|', '>', '\'', '\"', * '%', '@', '`'. * * In the block context (and, for the '-' indicator, in the flow context * too), it may also start with the characters * * '-', '?', ':' * * if it is followed by a non-space character. * * The last rule is more restrictive than the specification requires. */ if (!(IS_BLANKZ(parser->buffer) || CHECK(parser->buffer, '-') || CHECK(parser->buffer, '?') || CHECK(parser->buffer, ':') || CHECK(parser->buffer, ',') || CHECK(parser->buffer, '[') || CHECK(parser->buffer, ']') || CHECK(parser->buffer, '{') || CHECK(parser->buffer, '}') || CHECK(parser->buffer, '#') || CHECK(parser->buffer, '&') || CHECK(parser->buffer, '*') || CHECK(parser->buffer, '!') || CHECK(parser->buffer, '|') || CHECK(parser->buffer, '>') || CHECK(parser->buffer, '\'') || CHECK(parser->buffer, '"') || CHECK(parser->buffer, '%') || CHECK(parser->buffer, '@') || CHECK(parser->buffer, '`')) || (CHECK(parser->buffer, '-') && !IS_BLANK_AT(parser->buffer, 1)) || (!parser->flow_level && (CHECK(parser->buffer, '?') || CHECK(parser->buffer, ':')) && !IS_BLANKZ_AT(parser->buffer, 1))) return yaml_parser_fetch_plain_scalar(parser); /* * If we don't determine the token type so far, it is an error. */ return yaml_parser_set_scanner_error(parser, "while scanning for the next token", parser->mark, "found character that cannot start any token"); } /* * Check the list of potential simple keys and remove the positions that * cannot contain simple keys anymore. */ static int yaml_parser_stale_simple_keys(yaml_parser_t *parser) { yaml_simple_key_t *simple_key; /* Check for a potential simple key for each flow level. */ for (simple_key = parser->simple_keys.start; simple_key != parser->simple_keys.top; simple_key ++) { /* * The specification requires that a simple key * * - is limited to a single line, * - is shorter than 1024 characters. */ if (simple_key->possible && (simple_key->mark.line < parser->mark.line || simple_key->mark.index+1024 < parser->mark.index)) { /* Check if the potential simple key to be removed is required. */ if (simple_key->required) { return yaml_parser_set_scanner_error(parser, "while scanning a simple key", simple_key->mark, "could not find expected ':'"); } simple_key->possible = 0; } } return 1; } /* * Check if a simple key may start at the current position and add it if * needed. */ static int yaml_parser_save_simple_key(yaml_parser_t *parser) { /* * A simple key is required at the current position if the scanner is in * the block context and the current column coincides with the indentation * level. */ int required = (!parser->flow_level && parser->indent == (ptrdiff_t)parser->mark.column); /* * If the current position may start a simple key, save it. */ if (parser->simple_key_allowed) { yaml_simple_key_t simple_key; simple_key.possible = 1; simple_key.required = required; simple_key.token_number = parser->tokens_parsed + (parser->tokens.tail - parser->tokens.head); simple_key.mark = parser->mark; if (!yaml_parser_remove_simple_key(parser)) return 0; *(parser->simple_keys.top-1) = simple_key; } return 1; } /* * Remove a potential simple key at the current flow level. */ static int yaml_parser_remove_simple_key(yaml_parser_t *parser) { yaml_simple_key_t *simple_key = parser->simple_keys.top-1; if (simple_key->possible) { /* If the key is required, it is an error. */ if (simple_key->required) { return yaml_parser_set_scanner_error(parser, "while scanning a simple key", simple_key->mark, "could not find expected ':'"); } } /* Remove the key from the stack. */ simple_key->possible = 0; return 1; } /* * Increase the flow level and resize the simple key list if needed. */ static int yaml_parser_increase_flow_level(yaml_parser_t *parser) { yaml_simple_key_t empty_simple_key = { 0, 0, 0, { 0, 0, 0 } }; /* Reset the simple key on the next level. */ if (!PUSH(parser, parser->simple_keys, empty_simple_key)) return 0; /* Increase the flow level. */ if (parser->flow_level == INT_MAX) { parser->error = YAML_MEMORY_ERROR; return 0; } parser->flow_level++; return 1; } /* * Decrease the flow level. */ static int yaml_parser_decrease_flow_level(yaml_parser_t *parser) { yaml_simple_key_t dummy_key; /* Used to eliminate a compiler warning. */ if (parser->flow_level) { parser->flow_level --; dummy_key = POP(parser, parser->simple_keys); } return 1; } /* * Push the current indentation level to the stack and set the new level * the current column is greater than the indentation level. In this case, * append or insert the specified token into the token queue. * */ static int yaml_parser_roll_indent(yaml_parser_t *parser, ptrdiff_t column, ptrdiff_t number, yaml_token_type_t type, yaml_mark_t mark) { yaml_token_t token; /* In the flow context, do nothing. */ if (parser->flow_level) return 1; if (parser->indent < column) { /* * Push the current indentation level to the stack and set the new * indentation level. */ if (!PUSH(parser, parser->indents, parser->indent)) return 0; if (column > INT_MAX) { parser->error = YAML_MEMORY_ERROR; return 0; } parser->indent = column; /* Create a token and insert it into the queue. */ TOKEN_INIT(token, type, mark, mark); if (number == -1) { if (!ENQUEUE(parser, parser->tokens, token)) return 0; } else { if (!QUEUE_INSERT(parser, parser->tokens, number - parser->tokens_parsed, token)) return 0; } } return 1; } /* * Pop indentation levels from the indents stack until the current level * becomes less or equal to the column. For each indentation level, append * the BLOCK-END token. */ static int yaml_parser_unroll_indent(yaml_parser_t *parser, ptrdiff_t column) { yaml_token_t token; /* In the flow context, do nothing. */ if (parser->flow_level) return 1; /* Loop through the indentation levels in the stack. */ while (parser->indent > column) { /* Create a token and append it to the queue. */ TOKEN_INIT(token, YAML_BLOCK_END_TOKEN, parser->mark, parser->mark); if (!ENQUEUE(parser, parser->tokens, token)) return 0; /* Pop the indentation level. */ parser->indent = POP(parser, parser->indents); } return 1; } /* * Initialize the scanner and produce the STREAM-START token. */ static int yaml_parser_fetch_stream_start(yaml_parser_t *parser) { yaml_simple_key_t simple_key = { 0, 0, 0, { 0, 0, 0 } }; yaml_token_t token; /* Set the initial indentation. */ parser->indent = -1; /* Initialize the simple key stack. */ if (!PUSH(parser, parser->simple_keys, simple_key)) return 0; /* A simple key is allowed at the beginning of the stream. */ parser->simple_key_allowed = 1; /* We have started. */ parser->stream_start_produced = 1; /* Create the STREAM-START token and append it to the queue. */ STREAM_START_TOKEN_INIT(token, parser->encoding, parser->mark, parser->mark); if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the STREAM-END token and shut down the scanner. */ static int yaml_parser_fetch_stream_end(yaml_parser_t *parser) { yaml_token_t token; /* Force new line. */ if (parser->mark.column != 0) { parser->mark.column = 0; parser->mark.line ++; } /* Reset the indentation level. */ if (!yaml_parser_unroll_indent(parser, -1)) return 0; /* Reset simple keys. */ if (!yaml_parser_remove_simple_key(parser)) return 0; parser->simple_key_allowed = 0; /* Create the STREAM-END token and append it to the queue. */ STREAM_END_TOKEN_INIT(token, parser->mark, parser->mark); if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. */ static int yaml_parser_fetch_directive(yaml_parser_t *parser) { yaml_token_t token; /* Reset the indentation level. */ if (!yaml_parser_unroll_indent(parser, -1)) return 0; /* Reset simple keys. */ if (!yaml_parser_remove_simple_key(parser)) return 0; parser->simple_key_allowed = 0; /* Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. */ if (!yaml_parser_scan_directive(parser, &token)) return 0; /* Append the token to the queue. */ if (!ENQUEUE(parser, parser->tokens, token)) { yaml_token_delete(&token); return 0; } return 1; } /* * Produce the DOCUMENT-START or DOCUMENT-END token. */ static int yaml_parser_fetch_document_indicator(yaml_parser_t *parser, yaml_token_type_t type) { yaml_mark_t start_mark, end_mark; yaml_token_t token; /* Reset the indentation level. */ if (!yaml_parser_unroll_indent(parser, -1)) return 0; /* Reset simple keys. */ if (!yaml_parser_remove_simple_key(parser)) return 0; parser->simple_key_allowed = 0; /* Consume the token. */ start_mark = parser->mark; SKIP(parser); SKIP(parser); SKIP(parser); end_mark = parser->mark; /* Create the DOCUMENT-START or DOCUMENT-END token. */ TOKEN_INIT(token, type, start_mark, end_mark); /* Append the token to the queue. */ if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. */ static int yaml_parser_fetch_flow_collection_start(yaml_parser_t *parser, yaml_token_type_t type) { yaml_mark_t start_mark, end_mark; yaml_token_t token; /* The indicators '[' and '{' may start a simple key. */ if (!yaml_parser_save_simple_key(parser)) return 0; /* Increase the flow level. */ if (!yaml_parser_increase_flow_level(parser)) return 0; /* A simple key may follow the indicators '[' and '{'. */ parser->simple_key_allowed = 1; /* Consume the token. */ start_mark = parser->mark; SKIP(parser); end_mark = parser->mark; /* Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. */ TOKEN_INIT(token, type, start_mark, end_mark); /* Append the token to the queue. */ if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. */ static int yaml_parser_fetch_flow_collection_end(yaml_parser_t *parser, yaml_token_type_t type) { yaml_mark_t start_mark, end_mark; yaml_token_t token; /* Reset any potential simple key on the current flow level. */ if (!yaml_parser_remove_simple_key(parser)) return 0; /* Decrease the flow level. */ if (!yaml_parser_decrease_flow_level(parser)) return 0; /* No simple keys after the indicators ']' and '}'. */ parser->simple_key_allowed = 0; /* Consume the token. */ start_mark = parser->mark; SKIP(parser); end_mark = parser->mark; /* Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. */ TOKEN_INIT(token, type, start_mark, end_mark); /* Append the token to the queue. */ if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the FLOW-ENTRY token. */ static int yaml_parser_fetch_flow_entry(yaml_parser_t *parser) { yaml_mark_t start_mark, end_mark; yaml_token_t token; /* Reset any potential simple keys on the current flow level. */ if (!yaml_parser_remove_simple_key(parser)) return 0; /* Simple keys are allowed after ','. */ parser->simple_key_allowed = 1; /* Consume the token. */ start_mark = parser->mark; SKIP(parser); end_mark = parser->mark; /* Create the FLOW-ENTRY token and append it to the queue. */ TOKEN_INIT(token, YAML_FLOW_ENTRY_TOKEN, start_mark, end_mark); if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the BLOCK-ENTRY token. */ static int yaml_parser_fetch_block_entry(yaml_parser_t *parser) { yaml_mark_t start_mark, end_mark; yaml_token_t token; /* Check if the scanner is in the block context. */ if (!parser->flow_level) { /* Check if we are allowed to start a new entry. */ if (!parser->simple_key_allowed) { return yaml_parser_set_scanner_error(parser, NULL, parser->mark, "block sequence entries are not allowed in this context"); } /* Add the BLOCK-SEQUENCE-START token if needed. */ if (!yaml_parser_roll_indent(parser, parser->mark.column, -1, YAML_BLOCK_SEQUENCE_START_TOKEN, parser->mark)) return 0; } else { /* * It is an error for the '-' indicator to occur in the flow context, * but we let the Parser detect and report about it because the Parser * is able to point to the context. */ } /* Reset any potential simple keys on the current flow level. */ if (!yaml_parser_remove_simple_key(parser)) return 0; /* Simple keys are allowed after '-'. */ parser->simple_key_allowed = 1; /* Consume the token. */ start_mark = parser->mark; SKIP(parser); end_mark = parser->mark; /* Create the BLOCK-ENTRY token and append it to the queue. */ TOKEN_INIT(token, YAML_BLOCK_ENTRY_TOKEN, start_mark, end_mark); if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the KEY token. */ static int yaml_parser_fetch_key(yaml_parser_t *parser) { yaml_mark_t start_mark, end_mark; yaml_token_t token; /* In the block context, additional checks are required. */ if (!parser->flow_level) { /* Check if we are allowed to start a new key (not nessesary simple). */ if (!parser->simple_key_allowed) { return yaml_parser_set_scanner_error(parser, NULL, parser->mark, "mapping keys are not allowed in this context"); } /* Add the BLOCK-MAPPING-START token if needed. */ if (!yaml_parser_roll_indent(parser, parser->mark.column, -1, YAML_BLOCK_MAPPING_START_TOKEN, parser->mark)) return 0; } /* Reset any potential simple keys on the current flow level. */ if (!yaml_parser_remove_simple_key(parser)) return 0; /* Simple keys are allowed after '?' in the block context. */ parser->simple_key_allowed = (!parser->flow_level); /* Consume the token. */ start_mark = parser->mark; SKIP(parser); end_mark = parser->mark; /* Create the KEY token and append it to the queue. */ TOKEN_INIT(token, YAML_KEY_TOKEN, start_mark, end_mark); if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the VALUE token. */ static int yaml_parser_fetch_value(yaml_parser_t *parser) { yaml_mark_t start_mark, end_mark; yaml_token_t token; yaml_simple_key_t *simple_key = parser->simple_keys.top-1; /* Have we found a simple key? */ if (simple_key->possible) { /* Create the KEY token and insert it into the queue. */ TOKEN_INIT(token, YAML_KEY_TOKEN, simple_key->mark, simple_key->mark); if (!QUEUE_INSERT(parser, parser->tokens, simple_key->token_number - parser->tokens_parsed, token)) return 0; /* In the block context, we may need to add the BLOCK-MAPPING-START token. */ if (!yaml_parser_roll_indent(parser, simple_key->mark.column, simple_key->token_number, YAML_BLOCK_MAPPING_START_TOKEN, simple_key->mark)) return 0; /* Remove the simple key. */ simple_key->possible = 0; /* A simple key cannot follow another simple key. */ parser->simple_key_allowed = 0; } else { /* The ':' indicator follows a complex key. */ /* In the block context, extra checks are required. */ if (!parser->flow_level) { /* Check if we are allowed to start a complex value. */ if (!parser->simple_key_allowed) { return yaml_parser_set_scanner_error(parser, NULL, parser->mark, "mapping values are not allowed in this context"); } /* Add the BLOCK-MAPPING-START token if needed. */ if (!yaml_parser_roll_indent(parser, parser->mark.column, -1, YAML_BLOCK_MAPPING_START_TOKEN, parser->mark)) return 0; } /* Simple keys after ':' are allowed in the block context. */ parser->simple_key_allowed = (!parser->flow_level); } /* Consume the token. */ start_mark = parser->mark; SKIP(parser); end_mark = parser->mark; /* Create the VALUE token and append it to the queue. */ TOKEN_INIT(token, YAML_VALUE_TOKEN, start_mark, end_mark); if (!ENQUEUE(parser, parser->tokens, token)) return 0; return 1; } /* * Produce the ALIAS or ANCHOR token. */ static int yaml_parser_fetch_anchor(yaml_parser_t *parser, yaml_token_type_t type) { yaml_token_t token; /* An anchor or an alias could be a simple key. */ if (!yaml_parser_save_simple_key(parser)) return 0; /* A simple key cannot follow an anchor or an alias. */ parser->simple_key_allowed = 0; /* Create the ALIAS or ANCHOR token and append it to the queue. */ if (!yaml_parser_scan_anchor(parser, &token, type)) return 0; if (!ENQUEUE(parser, parser->tokens, token)) { yaml_token_delete(&token); return 0; } return 1; } /* * Produce the TAG token. */ static int yaml_parser_fetch_tag(yaml_parser_t *parser) { yaml_token_t token; /* A tag could be a simple key. */ if (!yaml_parser_save_simple_key(parser)) return 0; /* A simple key cannot follow a tag. */ parser->simple_key_allowed = 0; /* Create the TAG token and append it to the queue. */ if (!yaml_parser_scan_tag(parser, &token)) return 0; if (!ENQUEUE(parser, parser->tokens, token)) { yaml_token_delete(&token); return 0; } return 1; } /* * Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. */ static int yaml_parser_fetch_block_scalar(yaml_parser_t *parser, int literal) { yaml_token_t token; /* Remove any potential simple keys. */ if (!yaml_parser_remove_simple_key(parser)) return 0; /* A simple key may follow a block scalar. */ parser->simple_key_allowed = 1; /* Create the SCALAR token and append it to the queue. */ if (!yaml_parser_scan_block_scalar(parser, &token, literal)) return 0; if (!ENQUEUE(parser, parser->tokens, token)) { yaml_token_delete(&token); return 0; } return 1; } /* * Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. */ static int yaml_parser_fetch_flow_scalar(yaml_parser_t *parser, int single) { yaml_token_t token; /* A plain scalar could be a simple key. */ if (!yaml_parser_save_simple_key(parser)) return 0; /* A simple key cannot follow a flow scalar. */ parser->simple_key_allowed = 0; /* Create the SCALAR token and append it to the queue. */ if (!yaml_parser_scan_flow_scalar(parser, &token, single)) return 0; if (!ENQUEUE(parser, parser->tokens, token)) { yaml_token_delete(&token); return 0; } return 1; } /* * Produce the SCALAR(...,plain) token. */ static int yaml_parser_fetch_plain_scalar(yaml_parser_t *parser) { yaml_token_t token; /* A plain scalar could be a simple key. */ if (!yaml_parser_save_simple_key(parser)) return 0; /* A simple key cannot follow a flow scalar. */ parser->simple_key_allowed = 0; /* Create the SCALAR token and append it to the queue. */ if (!yaml_parser_scan_plain_scalar(parser, &token)) return 0; if (!ENQUEUE(parser, parser->tokens, token)) { yaml_token_delete(&token); return 0; } return 1; } /* * Eat whitespaces and comments until the next token is found. */ static int yaml_parser_scan_to_next_token(yaml_parser_t *parser) { /* Until the next token is not found. */ while (1) { /* Allow the BOM mark to start a line. */ if (!CACHE(parser, 1)) return 0; if (parser->mark.column == 0 && IS_BOM(parser->buffer)) SKIP(parser); /* * Eat whitespaces. * * Tabs are allowed: * * - in the flow context; * - in the block context, but not at the beginning of the line or * after '-', '?', or ':' (complex value). */ if (!CACHE(parser, 1)) return 0; while (CHECK(parser->buffer,' ') || ((parser->flow_level || !parser->simple_key_allowed) && CHECK(parser->buffer, '\t'))) { SKIP(parser); if (!CACHE(parser, 1)) return 0; } /* Eat a comment until a line break. */ if (CHECK(parser->buffer, '#')) { while (!IS_BREAKZ(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) return 0; } } /* If it is a line break, eat it. */ if (IS_BREAK(parser->buffer)) { if (!CACHE(parser, 2)) return 0; SKIP_LINE(parser); /* In the block context, a new line may start a simple key. */ if (!parser->flow_level) { parser->simple_key_allowed = 1; } } else { /* We have found a token. */ break; } } return 1; } /* * Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. * * Scope: * %YAML 1.1 # a comment \n * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * %TAG !yaml! tag:yaml.org,2002: \n * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ */ int yaml_parser_scan_directive(yaml_parser_t *parser, yaml_token_t *token) { yaml_mark_t start_mark, end_mark; yaml_char_t *name = NULL; int major, minor; yaml_char_t *handle = NULL, *prefix = NULL; /* Eat '%'. */ start_mark = parser->mark; SKIP(parser); /* Scan the directive name. */ if (!yaml_parser_scan_directive_name(parser, start_mark, &name)) goto error; /* Is it a YAML directive? */ if (strcmp((char *)name, "YAML") == 0) { /* Scan the VERSION directive value. */ if (!yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor)) goto error; end_mark = parser->mark; /* Create a VERSION-DIRECTIVE token. */ VERSION_DIRECTIVE_TOKEN_INIT(*token, major, minor, start_mark, end_mark); } /* Is it a TAG directive? */ else if (strcmp((char *)name, "TAG") == 0) { /* Scan the TAG directive value. */ if (!yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix)) goto error; end_mark = parser->mark; /* Create a TAG-DIRECTIVE token. */ TAG_DIRECTIVE_TOKEN_INIT(*token, handle, prefix, start_mark, end_mark); } /* Unknown directive. */ else { yaml_parser_set_scanner_error(parser, "while scanning a directive", start_mark, "found unknown directive name"); goto error; } /* Eat the rest of the line including any comments. */ if (!CACHE(parser, 1)) goto error; while (IS_BLANK(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) goto error; } if (CHECK(parser->buffer, '#')) { while (!IS_BREAKZ(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) goto error; } } /* Check if we are at the end of the line. */ if (!IS_BREAKZ(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a directive", start_mark, "did not find expected comment or line break"); goto error; } /* Eat a line break. */ if (IS_BREAK(parser->buffer)) { if (!CACHE(parser, 2)) goto error; SKIP_LINE(parser); } yaml_free(name); return 1; error: yaml_free(prefix); yaml_free(handle); yaml_free(name); return 0; } /* * Scan the directive name. * * Scope: * %YAML 1.1 # a comment \n * ^^^^ * %TAG !yaml! tag:yaml.org,2002: \n * ^^^ */ static int yaml_parser_scan_directive_name(yaml_parser_t *parser, yaml_mark_t start_mark, yaml_char_t **name) { yaml_string_t string = NULL_STRING; if (!STRING_INIT(parser, string, INITIAL_STRING_SIZE)) goto error; /* Consume the directive name. */ if (!CACHE(parser, 1)) goto error; while (IS_ALPHA(parser->buffer)) { if (!READ(parser, string)) goto error; if (!CACHE(parser, 1)) goto error; } /* Check if the name is empty. */ if (string.start == string.pointer) { yaml_parser_set_scanner_error(parser, "while scanning a directive", start_mark, "could not find expected directive name"); goto error; } /* Check for an blank character after the name. */ if (!IS_BLANKZ(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a directive", start_mark, "found unexpected non-alphabetical character"); goto error; } *name = string.start; return 1; error: STRING_DEL(parser, string); return 0; } /* * Scan the value of VERSION-DIRECTIVE. * * Scope: * %YAML 1.1 # a comment \n * ^^^^^^ */ static int yaml_parser_scan_version_directive_value(yaml_parser_t *parser, yaml_mark_t start_mark, int *major, int *minor) { /* Eat whitespaces. */ if (!CACHE(parser, 1)) return 0; while (IS_BLANK(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) return 0; } /* Consume the major version number. */ if (!yaml_parser_scan_version_directive_number(parser, start_mark, major)) return 0; /* Eat '.'. */ if (!CHECK(parser->buffer, '.')) { return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", start_mark, "did not find expected digit or '.' character"); } SKIP(parser); /* Consume the minor version number. */ if (!yaml_parser_scan_version_directive_number(parser, start_mark, minor)) return 0; return 1; } #define MAX_NUMBER_LENGTH 9 /* * Scan the version number of VERSION-DIRECTIVE. * * Scope: * %YAML 1.1 # a comment \n * ^ * %YAML 1.1 # a comment \n * ^ */ static int yaml_parser_scan_version_directive_number(yaml_parser_t *parser, yaml_mark_t start_mark, int *number) { int value = 0; size_t length = 0; /* Repeat while the next character is digit. */ if (!CACHE(parser, 1)) return 0; while (IS_DIGIT(parser->buffer)) { /* Check if the number is too long. */ if (++length > MAX_NUMBER_LENGTH) { return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", start_mark, "found extremely long version number"); } value = value*10 + AS_DIGIT(parser->buffer); SKIP(parser); if (!CACHE(parser, 1)) return 0; } /* Check if the number was present. */ if (!length) { return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", start_mark, "did not find expected version number"); } *number = value; return 1; } /* * Scan the value of a TAG-DIRECTIVE token. * * Scope: * %TAG !yaml! tag:yaml.org,2002: \n * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ */ static int yaml_parser_scan_tag_directive_value(yaml_parser_t *parser, yaml_mark_t start_mark, yaml_char_t **handle, yaml_char_t **prefix) { yaml_char_t *handle_value = NULL; yaml_char_t *prefix_value = NULL; /* Eat whitespaces. */ if (!CACHE(parser, 1)) goto error; while (IS_BLANK(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) goto error; } /* Scan a handle. */ if (!yaml_parser_scan_tag_handle(parser, 1, start_mark, &handle_value)) goto error; /* Expect a whitespace. */ if (!CACHE(parser, 1)) goto error; if (!IS_BLANK(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", start_mark, "did not find expected whitespace"); goto error; } /* Eat whitespaces. */ while (IS_BLANK(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) goto error; } /* Scan a prefix. */ if (!yaml_parser_scan_tag_uri(parser, 1, NULL, start_mark, &prefix_value)) goto error; /* Expect a whitespace or line break. */ if (!CACHE(parser, 1)) goto error; if (!IS_BLANKZ(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", start_mark, "did not find expected whitespace or line break"); goto error; } *handle = handle_value; *prefix = prefix_value; return 1; error: yaml_free(handle_value); yaml_free(prefix_value); return 0; } static int yaml_parser_scan_anchor(yaml_parser_t *parser, yaml_token_t *token, yaml_token_type_t type) { int length = 0; yaml_mark_t start_mark, end_mark; yaml_string_t string = NULL_STRING; if (!STRING_INIT(parser, string, INITIAL_STRING_SIZE)) goto error; /* Eat the indicator character. */ start_mark = parser->mark; SKIP(parser); /* Consume the value. */ if (!CACHE(parser, 1)) goto error; while (IS_ALPHA(parser->buffer)) { if (!READ(parser, string)) goto error; if (!CACHE(parser, 1)) goto error; length ++; } end_mark = parser->mark; /* * Check if length of the anchor is greater than 0 and it is followed by * a whitespace character or one of the indicators: * * '?', ':', ',', ']', '}', '%', '@', '`'. */ if (!length || !(IS_BLANKZ(parser->buffer) || CHECK(parser->buffer, '?') || CHECK(parser->buffer, ':') || CHECK(parser->buffer, ',') || CHECK(parser->buffer, ']') || CHECK(parser->buffer, '}') || CHECK(parser->buffer, '%') || CHECK(parser->buffer, '@') || CHECK(parser->buffer, '`'))) { yaml_parser_set_scanner_error(parser, type == YAML_ANCHOR_TOKEN ? "while scanning an anchor" : "while scanning an alias", start_mark, "did not find expected alphabetic or numeric character"); goto error; } /* Create a token. */ if (type == YAML_ANCHOR_TOKEN) { ANCHOR_TOKEN_INIT(*token, string.start, start_mark, end_mark); } else { ALIAS_TOKEN_INIT(*token, string.start, start_mark, end_mark); } return 1; error: STRING_DEL(parser, string); return 0; } /* * Scan a TAG token. */ static int yaml_parser_scan_tag(yaml_parser_t *parser, yaml_token_t *token) { yaml_char_t *handle = NULL; yaml_char_t *suffix = NULL; yaml_mark_t start_mark, end_mark; start_mark = parser->mark; /* Check if the tag is in the canonical form. */ if (!CACHE(parser, 2)) goto error; if (CHECK_AT(parser->buffer, '<', 1)) { /* Set the handle to '' */ handle = yaml_malloc(1); if (!handle) goto error; handle[0] = '\0'; /* Eat '!<' */ SKIP(parser); SKIP(parser); /* Consume the tag value. */ if (!yaml_parser_scan_tag_uri(parser, 0, NULL, start_mark, &suffix)) goto error; /* Check for '>' and eat it. */ if (!CHECK(parser->buffer, '>')) { yaml_parser_set_scanner_error(parser, "while scanning a tag", start_mark, "did not find the expected '>'"); goto error; } SKIP(parser); } else { /* The tag has either the '!suffix' or the '!handle!suffix' form. */ /* First, try to scan a handle. */ if (!yaml_parser_scan_tag_handle(parser, 0, start_mark, &handle)) goto error; /* Check if it is, indeed, handle. */ if (handle[0] == '!' && handle[1] != '\0' && handle[strlen((char *)handle)-1] == '!') { /* Scan the suffix now. */ if (!yaml_parser_scan_tag_uri(parser, 0, NULL, start_mark, &suffix)) goto error; } else { /* It wasn't a handle after all. Scan the rest of the tag. */ if (!yaml_parser_scan_tag_uri(parser, 0, handle, start_mark, &suffix)) goto error; /* Set the handle to '!'. */ yaml_free(handle); handle = yaml_malloc(2); if (!handle) goto error; handle[0] = '!'; handle[1] = '\0'; /* * A special case: the '!' tag. Set the handle to '' and the * suffix to '!'. */ if (suffix[0] == '\0') { yaml_char_t *tmp = handle; handle = suffix; suffix = tmp; } } } /* Check the character which ends the tag. */ if (!CACHE(parser, 1)) goto error; if (!IS_BLANKZ(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a tag", start_mark, "did not find expected whitespace or line break"); goto error; } end_mark = parser->mark; /* Create a token. */ TAG_TOKEN_INIT(*token, handle, suffix, start_mark, end_mark); return 1; error: yaml_free(handle); yaml_free(suffix); return 0; } /* * Scan a tag handle. */ static int yaml_parser_scan_tag_handle(yaml_parser_t *parser, int directive, yaml_mark_t start_mark, yaml_char_t **handle) { yaml_string_t string = NULL_STRING; if (!STRING_INIT(parser, string, INITIAL_STRING_SIZE)) goto error; /* Check the initial '!' character. */ if (!CACHE(parser, 1)) goto error; if (!CHECK(parser->buffer, '!')) { yaml_parser_set_scanner_error(parser, directive ? "while scanning a tag directive" : "while scanning a tag", start_mark, "did not find expected '!'"); goto error; } /* Copy the '!' character. */ if (!READ(parser, string)) goto error; /* Copy all subsequent alphabetical and numerical characters. */ if (!CACHE(parser, 1)) goto error; while (IS_ALPHA(parser->buffer)) { if (!READ(parser, string)) goto error; if (!CACHE(parser, 1)) goto error; } /* Check if the trailing character is '!' and copy it. */ if (CHECK(parser->buffer, '!')) { if (!READ(parser, string)) goto error; } else { /* * It's either the '!' tag or not really a tag handle. If it's a %TAG * directive, it's an error. If it's a tag token, it must be a part of * URI. */ if (directive && !(string.start[0] == '!' && string.start[1] == '\0')) { yaml_parser_set_scanner_error(parser, "while parsing a tag directive", start_mark, "did not find expected '!'"); goto error; } } *handle = string.start; return 1; error: STRING_DEL(parser, string); return 0; } /* * Scan a tag. */ static int yaml_parser_scan_tag_uri(yaml_parser_t *parser, int directive, yaml_char_t *head, yaml_mark_t start_mark, yaml_char_t **uri) { size_t length = head ? strlen((char *)head) : 0; yaml_string_t string = NULL_STRING; if (!STRING_INIT(parser, string, INITIAL_STRING_SIZE)) goto error; /* Resize the string to include the head. */ while ((size_t)(string.end - string.start) <= length) { if (!yaml_string_extend(&string.start, &string.pointer, &string.end)) { parser->error = YAML_MEMORY_ERROR; goto error; } } /* * Copy the head if needed. * * Note that we don't copy the leading '!' character. */ if (length > 1) { memcpy(string.start, head+1, length-1); string.pointer += length-1; } /* Scan the tag. */ if (!CACHE(parser, 1)) goto error; /* * The set of characters that may appear in URI is as follows: * * '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', * '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', * '%'. */ while (IS_ALPHA(parser->buffer) || CHECK(parser->buffer, ';') || CHECK(parser->buffer, '/') || CHECK(parser->buffer, '?') || CHECK(parser->buffer, ':') || CHECK(parser->buffer, '@') || CHECK(parser->buffer, '&') || CHECK(parser->buffer, '=') || CHECK(parser->buffer, '+') || CHECK(parser->buffer, '$') || CHECK(parser->buffer, ',') || CHECK(parser->buffer, '.') || CHECK(parser->buffer, '!') || CHECK(parser->buffer, '~') || CHECK(parser->buffer, '*') || CHECK(parser->buffer, '\'') || CHECK(parser->buffer, '(') || CHECK(parser->buffer, ')') || CHECK(parser->buffer, '[') || CHECK(parser->buffer, ']') || CHECK(parser->buffer, '%')) { /* Check if it is a URI-escape sequence. */ if (CHECK(parser->buffer, '%')) { if (!STRING_EXTEND(parser, string)) goto error; if (!yaml_parser_scan_uri_escapes(parser, directive, start_mark, &string)) goto error; } else { if (!READ(parser, string)) goto error; } length ++; if (!CACHE(parser, 1)) goto error; } /* Check if the tag is non-empty. */ if (!length) { if (!STRING_EXTEND(parser, string)) goto error; yaml_parser_set_scanner_error(parser, directive ? "while parsing a %TAG directive" : "while parsing a tag", start_mark, "did not find expected tag URI"); goto error; } *uri = string.start; return 1; error: STRING_DEL(parser, string); return 0; } /* * Decode an URI-escape sequence corresponding to a single UTF-8 character. */ static int yaml_parser_scan_uri_escapes(yaml_parser_t *parser, int directive, yaml_mark_t start_mark, yaml_string_t *string) { int width = 0; /* Decode the required number of characters. */ do { unsigned char octet = 0; /* Check for a URI-escaped octet. */ if (!CACHE(parser, 3)) return 0; if (!(CHECK(parser->buffer, '%') && IS_HEX_AT(parser->buffer, 1) && IS_HEX_AT(parser->buffer, 2))) { return yaml_parser_set_scanner_error(parser, directive ? "while parsing a %TAG directive" : "while parsing a tag", start_mark, "did not find URI escaped octet"); } /* Get the octet. */ octet = (AS_HEX_AT(parser->buffer, 1) << 4) + AS_HEX_AT(parser->buffer, 2); /* If it is the leading octet, determine the length of the UTF-8 sequence. */ if (!width) { width = (octet & 0x80) == 0x00 ? 1 : (octet & 0xE0) == 0xC0 ? 2 : (octet & 0xF0) == 0xE0 ? 3 : (octet & 0xF8) == 0xF0 ? 4 : 0; if (!width) { return yaml_parser_set_scanner_error(parser, directive ? "while parsing a %TAG directive" : "while parsing a tag", start_mark, "found an incorrect leading UTF-8 octet"); } } else { /* Check if the trailing octet is correct. */ if ((octet & 0xC0) != 0x80) { return yaml_parser_set_scanner_error(parser, directive ? "while parsing a %TAG directive" : "while parsing a tag", start_mark, "found an incorrect trailing UTF-8 octet"); } } /* Copy the octet and move the pointers. */ *(string->pointer++) = octet; SKIP(parser); SKIP(parser); SKIP(parser); } while (--width); return 1; } /* * Scan a block scalar. */ static int yaml_parser_scan_block_scalar(yaml_parser_t *parser, yaml_token_t *token, int literal) { yaml_mark_t start_mark; yaml_mark_t end_mark; yaml_string_t string = NULL_STRING; yaml_string_t leading_break = NULL_STRING; yaml_string_t trailing_breaks = NULL_STRING; int chomping = 0; int increment = 0; int indent = 0; int leading_blank = 0; int trailing_blank = 0; if (!STRING_INIT(parser, string, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, leading_break, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, trailing_breaks, INITIAL_STRING_SIZE)) goto error; /* Eat the indicator '|' or '>'. */ start_mark = parser->mark; SKIP(parser); /* Scan the additional block scalar indicators. */ if (!CACHE(parser, 1)) goto error; /* Check for a chomping indicator. */ if (CHECK(parser->buffer, '+') || CHECK(parser->buffer, '-')) { /* Set the chomping method and eat the indicator. */ chomping = CHECK(parser->buffer, '+') ? +1 : -1; SKIP(parser); /* Check for an indentation indicator. */ if (!CACHE(parser, 1)) goto error; if (IS_DIGIT(parser->buffer)) { /* Check that the indentation is greater than 0. */ if (CHECK(parser->buffer, '0')) { yaml_parser_set_scanner_error(parser, "while scanning a block scalar", start_mark, "found an indentation indicator equal to 0"); goto error; } /* Get the indentation level and eat the indicator. */ increment = AS_DIGIT(parser->buffer); SKIP(parser); } } /* Do the same as above, but in the opposite order. */ else if (IS_DIGIT(parser->buffer)) { if (CHECK(parser->buffer, '0')) { yaml_parser_set_scanner_error(parser, "while scanning a block scalar", start_mark, "found an indentation indicator equal to 0"); goto error; } increment = AS_DIGIT(parser->buffer); SKIP(parser); if (!CACHE(parser, 1)) goto error; if (CHECK(parser->buffer, '+') || CHECK(parser->buffer, '-')) { chomping = CHECK(parser->buffer, '+') ? +1 : -1; SKIP(parser); } } /* Eat whitespaces and comments to the end of the line. */ if (!CACHE(parser, 1)) goto error; while (IS_BLANK(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) goto error; } if (CHECK(parser->buffer, '#')) { while (!IS_BREAKZ(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) goto error; } } /* Check if we are at the end of the line. */ if (!IS_BREAKZ(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a block scalar", start_mark, "did not find expected comment or line break"); goto error; } /* Eat a line break. */ if (IS_BREAK(parser->buffer)) { if (!CACHE(parser, 2)) goto error; SKIP_LINE(parser); } end_mark = parser->mark; /* Set the indentation level if it was specified. */ if (increment) { indent = parser->indent >= 0 ? parser->indent+increment : increment; } /* Scan the leading line breaks and determine the indentation level if needed. */ if (!yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark)) goto error; /* Scan the block scalar content. */ if (!CACHE(parser, 1)) goto error; while ((int)parser->mark.column == indent && !IS_Z(parser->buffer)) { /* * We are at the beginning of a non-empty line. */ /* Is it a trailing whitespace? */ trailing_blank = IS_BLANK(parser->buffer); /* Check if we need to fold the leading line break. */ if (!literal && (*leading_break.start == '\n') && !leading_blank && !trailing_blank) { /* Do we need to join the lines by space? */ if (*trailing_breaks.start == '\0') { if (!STRING_EXTEND(parser, string)) goto error; *(string.pointer ++) = ' '; } CLEAR(parser, leading_break); } else { if (!JOIN(parser, string, leading_break)) goto error; CLEAR(parser, leading_break); } /* Append the remaining line breaks. */ if (!JOIN(parser, string, trailing_breaks)) goto error; CLEAR(parser, trailing_breaks); /* Is it a leading whitespace? */ leading_blank = IS_BLANK(parser->buffer); /* Consume the current line. */ while (!IS_BREAKZ(parser->buffer)) { if (!READ(parser, string)) goto error; if (!CACHE(parser, 1)) goto error; } /* Consume the line break. */ if (!CACHE(parser, 2)) goto error; if (!READ_LINE(parser, leading_break)) goto error; /* Eat the following indentation spaces and line breaks. */ if (!yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark)) goto error; } /* Chomp the tail. */ if (chomping != -1) { if (!JOIN(parser, string, leading_break)) goto error; } if (chomping == 1) { if (!JOIN(parser, string, trailing_breaks)) goto error; } /* Create a token. */ SCALAR_TOKEN_INIT(*token, string.start, string.pointer-string.start, literal ? YAML_LITERAL_SCALAR_STYLE : YAML_FOLDED_SCALAR_STYLE, start_mark, end_mark); STRING_DEL(parser, leading_break); STRING_DEL(parser, trailing_breaks); return 1; error: STRING_DEL(parser, string); STRING_DEL(parser, leading_break); STRING_DEL(parser, trailing_breaks); return 0; } /* * Scan indentation spaces and line breaks for a block scalar. Determine the * indentation level if needed. */ static int yaml_parser_scan_block_scalar_breaks(yaml_parser_t *parser, int *indent, yaml_string_t *breaks, yaml_mark_t start_mark, yaml_mark_t *end_mark) { int max_indent = 0; *end_mark = parser->mark; /* Eat the indentation spaces and line breaks. */ while (1) { /* Eat the indentation spaces. */ if (!CACHE(parser, 1)) return 0; while ((!*indent || (int)parser->mark.column < *indent) && IS_SPACE(parser->buffer)) { SKIP(parser); if (!CACHE(parser, 1)) return 0; } if ((int)parser->mark.column > max_indent) max_indent = (int)parser->mark.column; /* Check for a tab character messing the indentation. */ if ((!*indent || (int)parser->mark.column < *indent) && IS_TAB(parser->buffer)) { return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", start_mark, "found a tab character where an indentation space is expected"); } /* Have we found a non-empty line? */ if (!IS_BREAK(parser->buffer)) break; /* Consume the line break. */ if (!CACHE(parser, 2)) return 0; if (!READ_LINE(parser, *breaks)) return 0; *end_mark = parser->mark; } /* Determine the indentation level if needed. */ if (!*indent) { *indent = max_indent; if (*indent < parser->indent + 1) *indent = parser->indent + 1; if (*indent < 1) *indent = 1; } return 1; } /* * Scan a quoted scalar. */ static int yaml_parser_scan_flow_scalar(yaml_parser_t *parser, yaml_token_t *token, int single) { yaml_mark_t start_mark; yaml_mark_t end_mark; yaml_string_t string = NULL_STRING; yaml_string_t leading_break = NULL_STRING; yaml_string_t trailing_breaks = NULL_STRING; yaml_string_t whitespaces = NULL_STRING; int leading_blanks; if (!STRING_INIT(parser, string, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, leading_break, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, trailing_breaks, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, whitespaces, INITIAL_STRING_SIZE)) goto error; /* Eat the left quote. */ start_mark = parser->mark; SKIP(parser); /* Consume the content of the quoted scalar. */ while (1) { /* Check that there are no document indicators at the beginning of the line. */ if (!CACHE(parser, 4)) goto error; if (parser->mark.column == 0 && ((CHECK_AT(parser->buffer, '-', 0) && CHECK_AT(parser->buffer, '-', 1) && CHECK_AT(parser->buffer, '-', 2)) || (CHECK_AT(parser->buffer, '.', 0) && CHECK_AT(parser->buffer, '.', 1) && CHECK_AT(parser->buffer, '.', 2))) && IS_BLANKZ_AT(parser->buffer, 3)) { yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", start_mark, "found unexpected document indicator"); goto error; } /* Check for EOF. */ if (IS_Z(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", start_mark, "found unexpected end of stream"); goto error; } /* Consume non-blank characters. */ if (!CACHE(parser, 2)) goto error; leading_blanks = 0; while (!IS_BLANKZ(parser->buffer)) { /* Check for an escaped single quote. */ if (single && CHECK_AT(parser->buffer, '\'', 0) && CHECK_AT(parser->buffer, '\'', 1)) { if (!STRING_EXTEND(parser, string)) goto error; *(string.pointer++) = '\''; SKIP(parser); SKIP(parser); } /* Check for the right quote. */ else if (CHECK(parser->buffer, single ? '\'' : '"')) { break; } /* Check for an escaped line break. */ else if (!single && CHECK(parser->buffer, '\\') && IS_BREAK_AT(parser->buffer, 1)) { if (!CACHE(parser, 3)) goto error; SKIP(parser); SKIP_LINE(parser); leading_blanks = 1; break; } /* Check for an escape sequence. */ else if (!single && CHECK(parser->buffer, '\\')) { size_t code_length = 0; if (!STRING_EXTEND(parser, string)) goto error; /* Check the escape character. */ switch (parser->buffer.pointer[1]) { case '0': *(string.pointer++) = '\0'; break; case 'a': *(string.pointer++) = '\x07'; break; case 'b': *(string.pointer++) = '\x08'; break; case 't': case '\t': *(string.pointer++) = '\x09'; break; case 'n': *(string.pointer++) = '\x0A'; break; case 'v': *(string.pointer++) = '\x0B'; break; case 'f': *(string.pointer++) = '\x0C'; break; case 'r': *(string.pointer++) = '\x0D'; break; case 'e': *(string.pointer++) = '\x1B'; break; case ' ': *(string.pointer++) = '\x20'; break; case '"': *(string.pointer++) = '"'; break; case '\'': *(string.pointer++) = '\''; break; case '\\': *(string.pointer++) = '\\'; break; case 'N': /* NEL (#x85) */ *(string.pointer++) = '\xC2'; *(string.pointer++) = '\x85'; break; case '_': /* #xA0 */ *(string.pointer++) = '\xC2'; *(string.pointer++) = '\xA0'; break; case 'L': /* LS (#x2028) */ *(string.pointer++) = '\xE2'; *(string.pointer++) = '\x80'; *(string.pointer++) = '\xA8'; break; case 'P': /* PS (#x2029) */ *(string.pointer++) = '\xE2'; *(string.pointer++) = '\x80'; *(string.pointer++) = '\xA9'; break; case 'x': code_length = 2; break; case 'u': code_length = 4; break; case 'U': code_length = 8; break; default: yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", start_mark, "found unknown escape character"); goto error; } SKIP(parser); SKIP(parser); /* Consume an arbitrary escape code. */ if (code_length) { unsigned int value = 0; size_t k; /* Scan the character value. */ if (!CACHE(parser, code_length)) goto error; for (k = 0; k < code_length; k ++) { if (!IS_HEX_AT(parser->buffer, k)) { yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", start_mark, "did not find expected hexdecimal number"); goto error; } value = (value << 4) + AS_HEX_AT(parser->buffer, k); } /* Check the value and write the character. */ if ((value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF) { yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", start_mark, "found invalid Unicode character escape code"); goto error; } if (value <= 0x7F) { *(string.pointer++) = value; } else if (value <= 0x7FF) { *(string.pointer++) = 0xC0 + (value >> 6); *(string.pointer++) = 0x80 + (value & 0x3F); } else if (value <= 0xFFFF) { *(string.pointer++) = 0xE0 + (value >> 12); *(string.pointer++) = 0x80 + ((value >> 6) & 0x3F); *(string.pointer++) = 0x80 + (value & 0x3F); } else { *(string.pointer++) = 0xF0 + (value >> 18); *(string.pointer++) = 0x80 + ((value >> 12) & 0x3F); *(string.pointer++) = 0x80 + ((value >> 6) & 0x3F); *(string.pointer++) = 0x80 + (value & 0x3F); } /* Advance the pointer. */ for (k = 0; k < code_length; k ++) { SKIP(parser); } } } else { /* It is a non-escaped non-blank character. */ if (!READ(parser, string)) goto error; } if (!CACHE(parser, 2)) goto error; } /* Check if we are at the end of the scalar. */ if (CHECK(parser->buffer, single ? '\'' : '"')) break; /* Consume blank characters. */ if (!CACHE(parser, 1)) goto error; while (IS_BLANK(parser->buffer) || IS_BREAK(parser->buffer)) { if (IS_BLANK(parser->buffer)) { /* Consume a space or a tab character. */ if (!leading_blanks) { if (!READ(parser, whitespaces)) goto error; } else { SKIP(parser); } } else { if (!CACHE(parser, 2)) goto error; /* Check if it is a first line break. */ if (!leading_blanks) { CLEAR(parser, whitespaces); if (!READ_LINE(parser, leading_break)) goto error; leading_blanks = 1; } else { if (!READ_LINE(parser, trailing_breaks)) goto error; } } if (!CACHE(parser, 1)) goto error; } /* Join the whitespaces or fold line breaks. */ if (leading_blanks) { /* Do we need to fold line breaks? */ if (leading_break.start[0] == '\n') { if (trailing_breaks.start[0] == '\0') { if (!STRING_EXTEND(parser, string)) goto error; *(string.pointer++) = ' '; } else { if (!JOIN(parser, string, trailing_breaks)) goto error; CLEAR(parser, trailing_breaks); } CLEAR(parser, leading_break); } else { if (!JOIN(parser, string, leading_break)) goto error; if (!JOIN(parser, string, trailing_breaks)) goto error; CLEAR(parser, leading_break); CLEAR(parser, trailing_breaks); } } else { if (!JOIN(parser, string, whitespaces)) goto error; CLEAR(parser, whitespaces); } } /* Eat the right quote. */ SKIP(parser); end_mark = parser->mark; /* Create a token. */ SCALAR_TOKEN_INIT(*token, string.start, string.pointer-string.start, single ? YAML_SINGLE_QUOTED_SCALAR_STYLE : YAML_DOUBLE_QUOTED_SCALAR_STYLE, start_mark, end_mark); STRING_DEL(parser, leading_break); STRING_DEL(parser, trailing_breaks); STRING_DEL(parser, whitespaces); return 1; error: STRING_DEL(parser, string); STRING_DEL(parser, leading_break); STRING_DEL(parser, trailing_breaks); STRING_DEL(parser, whitespaces); return 0; } /* * Scan a plain scalar. */ static int yaml_parser_scan_plain_scalar(yaml_parser_t *parser, yaml_token_t *token) { yaml_mark_t start_mark; yaml_mark_t end_mark; yaml_string_t string = NULL_STRING; yaml_string_t leading_break = NULL_STRING; yaml_string_t trailing_breaks = NULL_STRING; yaml_string_t whitespaces = NULL_STRING; int leading_blanks = 0; int indent = parser->indent+1; if (!STRING_INIT(parser, string, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, leading_break, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, trailing_breaks, INITIAL_STRING_SIZE)) goto error; if (!STRING_INIT(parser, whitespaces, INITIAL_STRING_SIZE)) goto error; start_mark = end_mark = parser->mark; /* Consume the content of the plain scalar. */ while (1) { /* Check for a document indicator. */ if (!CACHE(parser, 4)) goto error; if (parser->mark.column == 0 && ((CHECK_AT(parser->buffer, '-', 0) && CHECK_AT(parser->buffer, '-', 1) && CHECK_AT(parser->buffer, '-', 2)) || (CHECK_AT(parser->buffer, '.', 0) && CHECK_AT(parser->buffer, '.', 1) && CHECK_AT(parser->buffer, '.', 2))) && IS_BLANKZ_AT(parser->buffer, 3)) break; /* Check for a comment. */ if (CHECK(parser->buffer, '#')) break; /* Consume non-blank characters. */ while (!IS_BLANKZ(parser->buffer)) { /* Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13". */ if (parser->flow_level && CHECK(parser->buffer, ':') && !IS_BLANKZ_AT(parser->buffer, 1)) { yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", start_mark, "found unexpected ':'"); goto error; } /* Check for indicators that may end a plain scalar. */ if ((CHECK(parser->buffer, ':') && IS_BLANKZ_AT(parser->buffer, 1)) || (parser->flow_level && (CHECK(parser->buffer, ',') || CHECK(parser->buffer, ':') || CHECK(parser->buffer, '?') || CHECK(parser->buffer, '[') || CHECK(parser->buffer, ']') || CHECK(parser->buffer, '{') || CHECK(parser->buffer, '}')))) break; /* Check if we need to join whitespaces and breaks. */ if (leading_blanks || whitespaces.start != whitespaces.pointer) { if (leading_blanks) { /* Do we need to fold line breaks? */ if (leading_break.start[0] == '\n') { if (trailing_breaks.start[0] == '\0') { if (!STRING_EXTEND(parser, string)) goto error; *(string.pointer++) = ' '; } else { if (!JOIN(parser, string, trailing_breaks)) goto error; CLEAR(parser, trailing_breaks); } CLEAR(parser, leading_break); } else { if (!JOIN(parser, string, leading_break)) goto error; if (!JOIN(parser, string, trailing_breaks)) goto error; CLEAR(parser, leading_break); CLEAR(parser, trailing_breaks); } leading_blanks = 0; } else { if (!JOIN(parser, string, whitespaces)) goto error; CLEAR(parser, whitespaces); } } /* Copy the character. */ if (!READ(parser, string)) goto error; end_mark = parser->mark; if (!CACHE(parser, 2)) goto error; } /* Is it the end? */ if (!(IS_BLANK(parser->buffer) || IS_BREAK(parser->buffer))) break; /* Consume blank characters. */ if (!CACHE(parser, 1)) goto error; while (IS_BLANK(parser->buffer) || IS_BREAK(parser->buffer)) { if (IS_BLANK(parser->buffer)) { /* Check for tab character that abuse indentation. */ if (leading_blanks && (int)parser->mark.column < indent && IS_TAB(parser->buffer)) { yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", start_mark, "found a tab character that violate indentation"); goto error; } /* Consume a space or a tab character. */ if (!leading_blanks) { if (!READ(parser, whitespaces)) goto error; } else { SKIP(parser); } } else { if (!CACHE(parser, 2)) goto error; /* Check if it is a first line break. */ if (!leading_blanks) { CLEAR(parser, whitespaces); if (!READ_LINE(parser, leading_break)) goto error; leading_blanks = 1; } else { if (!READ_LINE(parser, trailing_breaks)) goto error; } } if (!CACHE(parser, 1)) goto error; } /* Check indentation level. */ if (!parser->flow_level && (int)parser->mark.column < indent) break; } /* Create a token. */ SCALAR_TOKEN_INIT(*token, string.start, string.pointer-string.start, YAML_PLAIN_SCALAR_STYLE, start_mark, end_mark); /* Note that we change the 'simple_key_allowed' flag. */ if (leading_blanks) { parser->simple_key_allowed = 1; } STRING_DEL(parser, leading_break); STRING_DEL(parser, trailing_breaks); STRING_DEL(parser, whitespaces); return 1; error: STRING_DEL(parser, string); STRING_DEL(parser, leading_break); STRING_DEL(parser, trailing_breaks); STRING_DEL(parser, whitespaces); return 0; } psych-2.2.4/ext/psych/yaml/writer.c000066400000000000000000000077141305404671600172340ustar00rootroot00000000000000 #include "yaml_private.h" /* * Declarations. */ static int yaml_emitter_set_writer_error(yaml_emitter_t *emitter, const char *problem); YAML_DECLARE(int) yaml_emitter_flush(yaml_emitter_t *emitter); /* * Set the writer error and return 0. */ static int yaml_emitter_set_writer_error(yaml_emitter_t *emitter, const char *problem) { emitter->error = YAML_WRITER_ERROR; emitter->problem = problem; return 0; } /* * Flush the output buffer. */ YAML_DECLARE(int) yaml_emitter_flush(yaml_emitter_t *emitter) { int low, high; assert(emitter); /* Non-NULL emitter object is expected. */ assert(emitter->write_handler); /* Write handler must be set. */ assert(emitter->encoding); /* Output encoding must be set. */ emitter->buffer.last = emitter->buffer.pointer; emitter->buffer.pointer = emitter->buffer.start; /* Check if the buffer is empty. */ if (emitter->buffer.start == emitter->buffer.last) { return 1; } /* If the output encoding is UTF-8, we don't need to recode the buffer. */ if (emitter->encoding == YAML_UTF8_ENCODING) { if (emitter->write_handler(emitter->write_handler_data, emitter->buffer.start, emitter->buffer.last - emitter->buffer.start)) { emitter->buffer.last = emitter->buffer.start; emitter->buffer.pointer = emitter->buffer.start; return 1; } else { return yaml_emitter_set_writer_error(emitter, "write error"); } } /* Recode the buffer into the raw buffer. */ low = (emitter->encoding == YAML_UTF16LE_ENCODING ? 0 : 1); high = (emitter->encoding == YAML_UTF16LE_ENCODING ? 1 : 0); while (emitter->buffer.pointer != emitter->buffer.last) { unsigned char octet; unsigned int width; unsigned int value; size_t k; /* * See the "reader.c" code for more details on UTF-8 encoding. Note * that we assume that the buffer contains a valid UTF-8 sequence. */ /* Read the next UTF-8 character. */ octet = emitter->buffer.pointer[0]; width = (octet & 0x80) == 0x00 ? 1 : (octet & 0xE0) == 0xC0 ? 2 : (octet & 0xF0) == 0xE0 ? 3 : (octet & 0xF8) == 0xF0 ? 4 : 0; value = (octet & 0x80) == 0x00 ? octet & 0x7F : (octet & 0xE0) == 0xC0 ? octet & 0x1F : (octet & 0xF0) == 0xE0 ? octet & 0x0F : (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; for (k = 1; k < width; k ++) { octet = emitter->buffer.pointer[k]; value = (value << 6) + (octet & 0x3F); } emitter->buffer.pointer += width; /* Write the character. */ if (value < 0x10000) { emitter->raw_buffer.last[high] = value >> 8; emitter->raw_buffer.last[low] = value & 0xFF; emitter->raw_buffer.last += 2; } else { /* Write the character using a surrogate pair (check "reader.c"). */ value -= 0x10000; emitter->raw_buffer.last[high] = 0xD8 + (value >> 18); emitter->raw_buffer.last[low] = (value >> 10) & 0xFF; emitter->raw_buffer.last[high+2] = 0xDC + ((value >> 8) & 0xFF); emitter->raw_buffer.last[low+2] = value & 0xFF; emitter->raw_buffer.last += 4; } } /* Write the raw buffer. */ if (emitter->write_handler(emitter->write_handler_data, emitter->raw_buffer.start, emitter->raw_buffer.last - emitter->raw_buffer.start)) { emitter->buffer.last = emitter->buffer.start; emitter->buffer.pointer = emitter->buffer.start; emitter->raw_buffer.last = emitter->raw_buffer.start; emitter->raw_buffer.pointer = emitter->raw_buffer.start; return 1; } else { return yaml_emitter_set_writer_error(emitter, "write error"); } } psych-2.2.4/ext/psych/yaml/yaml.h000066400000000000000000001517011305404671600166630ustar00rootroot00000000000000/** * @file yaml.h * @brief Public interface for libyaml. * * Include the header file with the code: * @code * #include * @endcode */ #ifndef YAML_H #define YAML_H #ifdef __cplusplus extern "C" { #endif #include #include #include /** * @defgroup export Export Definitions * @{ */ /** The public API declaration. */ #ifdef _WIN32 # if defined(YAML_DECLARE_STATIC) # define YAML_DECLARE(type) type # elif defined(YAML_DECLARE_EXPORT) # define YAML_DECLARE(type) __declspec(dllexport) type # else # define YAML_DECLARE(type) __declspec(dllimport) type # endif #else # define YAML_DECLARE(type) type #endif /** @} */ /** * @defgroup version Version Information * @{ */ /** * Get the library version as a string. * * @returns The function returns the pointer to a static string of the form * @c "X.Y.Z", where @c X is the major version number, @c Y is a minor version * number, and @c Z is the patch version number. */ YAML_DECLARE(const char *) yaml_get_version_string(void); /** * Get the library version numbers. * * @param[out] major Major version number. * @param[out] minor Minor version number. * @param[out] patch Patch version number. */ YAML_DECLARE(void) yaml_get_version(int *major, int *minor, int *patch); /** @} */ /** * @defgroup basic Basic Types * @{ */ /** The character type (UTF-8 octet). */ typedef unsigned char yaml_char_t; /** The version directive data. */ typedef struct yaml_version_directive_s { /** The major version number. */ int major; /** The minor version number. */ int minor; } yaml_version_directive_t; /** The tag directive data. */ typedef struct yaml_tag_directive_s { /** The tag handle. */ yaml_char_t *handle; /** The tag prefix. */ yaml_char_t *prefix; } yaml_tag_directive_t; /** The stream encoding. */ typedef enum yaml_encoding_e { /** Let the parser choose the encoding. */ YAML_ANY_ENCODING, /** The default UTF-8 encoding. */ YAML_UTF8_ENCODING, /** The UTF-16-LE encoding with BOM. */ YAML_UTF16LE_ENCODING, /** The UTF-16-BE encoding with BOM. */ YAML_UTF16BE_ENCODING } yaml_encoding_t; /** Line break types. */ typedef enum yaml_break_e { /** Let the parser choose the break type. */ YAML_ANY_BREAK, /** Use CR for line breaks (Mac style). */ YAML_CR_BREAK, /** Use LN for line breaks (Unix style). */ YAML_LN_BREAK, /** Use CR LN for line breaks (DOS style). */ YAML_CRLN_BREAK } yaml_break_t; /** Many bad things could happen with the parser and emitter. */ typedef enum yaml_error_type_e { /** No error is produced. */ YAML_NO_ERROR, /** Cannot allocate or reallocate a block of memory. */ YAML_MEMORY_ERROR, /** Cannot read or decode the input stream. */ YAML_READER_ERROR, /** Cannot scan the input stream. */ YAML_SCANNER_ERROR, /** Cannot parse the input stream. */ YAML_PARSER_ERROR, /** Cannot compose a YAML document. */ YAML_COMPOSER_ERROR, /** Cannot write to the output stream. */ YAML_WRITER_ERROR, /** Cannot emit a YAML stream. */ YAML_EMITTER_ERROR } yaml_error_type_t; /** The pointer position. */ typedef struct yaml_mark_s { /** The position index. */ size_t index; /** The position line. */ size_t line; /** The position column. */ size_t column; } yaml_mark_t; /** @} */ /** * @defgroup styles Node Styles * @{ */ /** Scalar styles. */ typedef enum yaml_scalar_style_e { /** Let the emitter choose the style. */ YAML_ANY_SCALAR_STYLE, /** The plain scalar style. */ YAML_PLAIN_SCALAR_STYLE, /** The single-quoted scalar style. */ YAML_SINGLE_QUOTED_SCALAR_STYLE, /** The double-quoted scalar style. */ YAML_DOUBLE_QUOTED_SCALAR_STYLE, /** The literal scalar style. */ YAML_LITERAL_SCALAR_STYLE, /** The folded scalar style. */ YAML_FOLDED_SCALAR_STYLE } yaml_scalar_style_t; /** Sequence styles. */ typedef enum yaml_sequence_style_e { /** Let the emitter choose the style. */ YAML_ANY_SEQUENCE_STYLE, /** The block sequence style. */ YAML_BLOCK_SEQUENCE_STYLE, /** The flow sequence style. */ YAML_FLOW_SEQUENCE_STYLE } yaml_sequence_style_t; /** Mapping styles. */ typedef enum yaml_mapping_style_e { /** Let the emitter choose the style. */ YAML_ANY_MAPPING_STYLE, /** The block mapping style. */ YAML_BLOCK_MAPPING_STYLE, /** The flow mapping style. */ YAML_FLOW_MAPPING_STYLE /* YAML_FLOW_SET_MAPPING_STYLE */ } yaml_mapping_style_t; /** @} */ /** * @defgroup tokens Tokens * @{ */ /** Token types. */ typedef enum yaml_token_type_e { /** An empty token. */ YAML_NO_TOKEN, /** A STREAM-START token. */ YAML_STREAM_START_TOKEN, /** A STREAM-END token. */ YAML_STREAM_END_TOKEN, /** A VERSION-DIRECTIVE token. */ YAML_VERSION_DIRECTIVE_TOKEN, /** A TAG-DIRECTIVE token. */ YAML_TAG_DIRECTIVE_TOKEN, /** A DOCUMENT-START token. */ YAML_DOCUMENT_START_TOKEN, /** A DOCUMENT-END token. */ YAML_DOCUMENT_END_TOKEN, /** A BLOCK-SEQUENCE-START token. */ YAML_BLOCK_SEQUENCE_START_TOKEN, /** A BLOCK-SEQUENCE-END token. */ YAML_BLOCK_MAPPING_START_TOKEN, /** A BLOCK-END token. */ YAML_BLOCK_END_TOKEN, /** A FLOW-SEQUENCE-START token. */ YAML_FLOW_SEQUENCE_START_TOKEN, /** A FLOW-SEQUENCE-END token. */ YAML_FLOW_SEQUENCE_END_TOKEN, /** A FLOW-MAPPING-START token. */ YAML_FLOW_MAPPING_START_TOKEN, /** A FLOW-MAPPING-END token. */ YAML_FLOW_MAPPING_END_TOKEN, /** A BLOCK-ENTRY token. */ YAML_BLOCK_ENTRY_TOKEN, /** A FLOW-ENTRY token. */ YAML_FLOW_ENTRY_TOKEN, /** A KEY token. */ YAML_KEY_TOKEN, /** A VALUE token. */ YAML_VALUE_TOKEN, /** An ALIAS token. */ YAML_ALIAS_TOKEN, /** An ANCHOR token. */ YAML_ANCHOR_TOKEN, /** A TAG token. */ YAML_TAG_TOKEN, /** A SCALAR token. */ YAML_SCALAR_TOKEN } yaml_token_type_t; /** The token structure. */ typedef struct yaml_token_s { /** The token type. */ yaml_token_type_t type; /** The token data. */ union { /** The stream start (for @c YAML_STREAM_START_TOKEN). */ struct { /** The stream encoding. */ yaml_encoding_t encoding; } stream_start; /** The alias (for @c YAML_ALIAS_TOKEN). */ struct { /** The alias value. */ yaml_char_t *value; } alias; /** The anchor (for @c YAML_ANCHOR_TOKEN). */ struct { /** The anchor value. */ yaml_char_t *value; } anchor; /** The tag (for @c YAML_TAG_TOKEN). */ struct { /** The tag handle. */ yaml_char_t *handle; /** The tag suffix. */ yaml_char_t *suffix; } tag; /** The scalar value (for @c YAML_SCALAR_TOKEN). */ struct { /** The scalar value. */ yaml_char_t *value; /** The length of the scalar value. */ size_t length; /** The scalar style. */ yaml_scalar_style_t style; } scalar; /** The version directive (for @c YAML_VERSION_DIRECTIVE_TOKEN). */ struct { /** The major version number. */ int major; /** The minor version number. */ int minor; } version_directive; /** The tag directive (for @c YAML_TAG_DIRECTIVE_TOKEN). */ struct { /** The tag handle. */ yaml_char_t *handle; /** The tag prefix. */ yaml_char_t *prefix; } tag_directive; } data; /** The beginning of the token. */ yaml_mark_t start_mark; /** The end of the token. */ yaml_mark_t end_mark; } yaml_token_t; /** * Free any memory allocated for a token object. * * @param[in,out] token A token object. */ YAML_DECLARE(void) yaml_token_delete(yaml_token_t *token); /** @} */ /** * @defgroup events Events * @{ */ /** Event types. */ typedef enum yaml_event_type_e { /** An empty event. */ YAML_NO_EVENT, /** A STREAM-START event. */ YAML_STREAM_START_EVENT, /** A STREAM-END event. */ YAML_STREAM_END_EVENT, /** A DOCUMENT-START event. */ YAML_DOCUMENT_START_EVENT, /** A DOCUMENT-END event. */ YAML_DOCUMENT_END_EVENT, /** An ALIAS event. */ YAML_ALIAS_EVENT, /** A SCALAR event. */ YAML_SCALAR_EVENT, /** A SEQUENCE-START event. */ YAML_SEQUENCE_START_EVENT, /** A SEQUENCE-END event. */ YAML_SEQUENCE_END_EVENT, /** A MAPPING-START event. */ YAML_MAPPING_START_EVENT, /** A MAPPING-END event. */ YAML_MAPPING_END_EVENT } yaml_event_type_t; /** The event structure. */ typedef struct yaml_event_s { /** The event type. */ yaml_event_type_t type; /** The event data. */ union { /** The stream parameters (for @c YAML_STREAM_START_EVENT). */ struct { /** The document encoding. */ yaml_encoding_t encoding; } stream_start; /** The document parameters (for @c YAML_DOCUMENT_START_EVENT). */ struct { /** The version directive. */ yaml_version_directive_t *version_directive; /** The list of tag directives. */ struct { /** The beginning of the tag directives list. */ yaml_tag_directive_t *start; /** The end of the tag directives list. */ yaml_tag_directive_t *end; } tag_directives; /** Is the document indicator implicit? */ int implicit; } document_start; /** The document end parameters (for @c YAML_DOCUMENT_END_EVENT). */ struct { /** Is the document end indicator implicit? */ int implicit; } document_end; /** The alias parameters (for @c YAML_ALIAS_EVENT). */ struct { /** The anchor. */ yaml_char_t *anchor; } alias; /** The scalar parameters (for @c YAML_SCALAR_EVENT). */ struct { /** The anchor. */ yaml_char_t *anchor; /** The tag. */ yaml_char_t *tag; /** The scalar value. */ yaml_char_t *value; /** The length of the scalar value. */ size_t length; /** Is the tag optional for the plain style? */ int plain_implicit; /** Is the tag optional for any non-plain style? */ int quoted_implicit; /** The scalar style. */ yaml_scalar_style_t style; } scalar; /** The sequence parameters (for @c YAML_SEQUENCE_START_EVENT). */ struct { /** The anchor. */ yaml_char_t *anchor; /** The tag. */ yaml_char_t *tag; /** Is the tag optional? */ int implicit; /** The sequence style. */ yaml_sequence_style_t style; } sequence_start; /** The mapping parameters (for @c YAML_MAPPING_START_EVENT). */ struct { /** The anchor. */ yaml_char_t *anchor; /** The tag. */ yaml_char_t *tag; /** Is the tag optional? */ int implicit; /** The mapping style. */ yaml_mapping_style_t style; } mapping_start; } data; /** The beginning of the event. */ yaml_mark_t start_mark; /** The end of the event. */ yaml_mark_t end_mark; } yaml_event_t; /** * Create the STREAM-START event. * * @param[out] event An empty event object. * @param[in] encoding The stream encoding. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_stream_start_event_initialize(yaml_event_t *event, yaml_encoding_t encoding); /** * Create the STREAM-END event. * * @param[out] event An empty event object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_stream_end_event_initialize(yaml_event_t *event); /** * Create the DOCUMENT-START event. * * The @a implicit argument is considered as a stylistic parameter and may be * ignored by the emitter. * * @param[out] event An empty event object. * @param[in] version_directive The %YAML directive value or * @c NULL. * @param[in] tag_directives_start The beginning of the %TAG * directives list. * @param[in] tag_directives_end The end of the %TAG directives * list. * @param[in] implicit If the document start indicator is * implicit. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_document_start_event_initialize(yaml_event_t *event, yaml_version_directive_t *version_directive, yaml_tag_directive_t *tag_directives_start, yaml_tag_directive_t *tag_directives_end, int implicit); /** * Create the DOCUMENT-END event. * * The @a implicit argument is considered as a stylistic parameter and may be * ignored by the emitter. * * @param[out] event An empty event object. * @param[in] implicit If the document end indicator is implicit. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_document_end_event_initialize(yaml_event_t *event, int implicit); /** * Create an ALIAS event. * * @param[out] event An empty event object. * @param[in] anchor The anchor value. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_alias_event_initialize(yaml_event_t *event, yaml_char_t *anchor); /** * Create a SCALAR event. * * The @a style argument may be ignored by the emitter. * * Either the @a tag attribute or one of the @a plain_implicit and * @a quoted_implicit flags must be set. * * @param[out] event An empty event object. * @param[in] anchor The scalar anchor or @c NULL. * @param[in] tag The scalar tag or @c NULL. * @param[in] value The scalar value. * @param[in] length The length of the scalar value. * @param[in] plain_implicit If the tag may be omitted for the plain * style. * @param[in] quoted_implicit If the tag may be omitted for any * non-plain style. * @param[in] style The scalar style. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_scalar_event_initialize(yaml_event_t *event, yaml_char_t *anchor, yaml_char_t *tag, yaml_char_t *value, int length, int plain_implicit, int quoted_implicit, yaml_scalar_style_t style); /** * Create a SEQUENCE-START event. * * The @a style argument may be ignored by the emitter. * * Either the @a tag attribute or the @a implicit flag must be set. * * @param[out] event An empty event object. * @param[in] anchor The sequence anchor or @c NULL. * @param[in] tag The sequence tag or @c NULL. * @param[in] implicit If the tag may be omitted. * @param[in] style The sequence style. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_sequence_start_event_initialize(yaml_event_t *event, yaml_char_t *anchor, yaml_char_t *tag, int implicit, yaml_sequence_style_t style); /** * Create a SEQUENCE-END event. * * @param[out] event An empty event object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_sequence_end_event_initialize(yaml_event_t *event); /** * Create a MAPPING-START event. * * The @a style argument may be ignored by the emitter. * * Either the @a tag attribute or the @a implicit flag must be set. * * @param[out] event An empty event object. * @param[in] anchor The mapping anchor or @c NULL. * @param[in] tag The mapping tag or @c NULL. * @param[in] implicit If the tag may be omitted. * @param[in] style The mapping style. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_mapping_start_event_initialize(yaml_event_t *event, yaml_char_t *anchor, yaml_char_t *tag, int implicit, yaml_mapping_style_t style); /** * Create a MAPPING-END event. * * @param[out] event An empty event object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_mapping_end_event_initialize(yaml_event_t *event); /** * Free any memory allocated for an event object. * * @param[in,out] event An event object. */ YAML_DECLARE(void) yaml_event_delete(yaml_event_t *event); /** @} */ /** * @defgroup nodes Nodes * @{ */ /** The tag @c !!null with the only possible value: @c null. */ #define YAML_NULL_TAG "tag:yaml.org,2002:null" /** The tag @c !!bool with the values: @c true and @c falce. */ #define YAML_BOOL_TAG "tag:yaml.org,2002:bool" /** The tag @c !!str for string values. */ #define YAML_STR_TAG "tag:yaml.org,2002:str" /** The tag @c !!int for integer values. */ #define YAML_INT_TAG "tag:yaml.org,2002:int" /** The tag @c !!float for float values. */ #define YAML_FLOAT_TAG "tag:yaml.org,2002:float" /** The tag @c !!timestamp for date and time values. */ #define YAML_TIMESTAMP_TAG "tag:yaml.org,2002:timestamp" /** The tag @c !!seq is used to denote sequences. */ #define YAML_SEQ_TAG "tag:yaml.org,2002:seq" /** The tag @c !!map is used to denote mapping. */ #define YAML_MAP_TAG "tag:yaml.org,2002:map" /** The default scalar tag is @c !!str. */ #define YAML_DEFAULT_SCALAR_TAG YAML_STR_TAG /** The default sequence tag is @c !!seq. */ #define YAML_DEFAULT_SEQUENCE_TAG YAML_SEQ_TAG /** The default mapping tag is @c !!map. */ #define YAML_DEFAULT_MAPPING_TAG YAML_MAP_TAG /** Node types. */ typedef enum yaml_node_type_e { /** An empty node. */ YAML_NO_NODE, /** A scalar node. */ YAML_SCALAR_NODE, /** A sequence node. */ YAML_SEQUENCE_NODE, /** A mapping node. */ YAML_MAPPING_NODE } yaml_node_type_t; /** The forward definition of a document node structure. */ typedef struct yaml_node_s yaml_node_t; /** An element of a sequence node. */ typedef int yaml_node_item_t; /** An element of a mapping node. */ typedef struct yaml_node_pair_s { /** The key of the element. */ int key; /** The value of the element. */ int value; } yaml_node_pair_t; /** The node structure. */ struct yaml_node_s { /** The node type. */ yaml_node_type_t type; /** The node tag. */ yaml_char_t *tag; /** The node data. */ union { /** The scalar parameters (for @c YAML_SCALAR_NODE). */ struct { /** The scalar value. */ yaml_char_t *value; /** The length of the scalar value. */ size_t length; /** The scalar style. */ yaml_scalar_style_t style; } scalar; /** The sequence parameters (for @c YAML_SEQUENCE_NODE). */ struct { /** The stack of sequence items. */ struct { /** The beginning of the stack. */ yaml_node_item_t *start; /** The end of the stack. */ yaml_node_item_t *end; /** The top of the stack. */ yaml_node_item_t *top; } items; /** The sequence style. */ yaml_sequence_style_t style; } sequence; /** The mapping parameters (for @c YAML_MAPPING_NODE). */ struct { /** The stack of mapping pairs (key, value). */ struct { /** The beginning of the stack. */ yaml_node_pair_t *start; /** The end of the stack. */ yaml_node_pair_t *end; /** The top of the stack. */ yaml_node_pair_t *top; } pairs; /** The mapping style. */ yaml_mapping_style_t style; } mapping; } data; /** The beginning of the node. */ yaml_mark_t start_mark; /** The end of the node. */ yaml_mark_t end_mark; }; /** The document structure. */ typedef struct yaml_document_s { /** The document nodes. */ struct { /** The beginning of the stack. */ yaml_node_t *start; /** The end of the stack. */ yaml_node_t *end; /** The top of the stack. */ yaml_node_t *top; } nodes; /** The version directive. */ yaml_version_directive_t *version_directive; /** The list of tag directives. */ struct { /** The beginning of the tag directives list. */ yaml_tag_directive_t *start; /** The end of the tag directives list. */ yaml_tag_directive_t *end; } tag_directives; /** Is the document start indicator implicit? */ int start_implicit; /** Is the document end indicator implicit? */ int end_implicit; /** The beginning of the document. */ yaml_mark_t start_mark; /** The end of the document. */ yaml_mark_t end_mark; } yaml_document_t; /** * Create a YAML document. * * @param[out] document An empty document object. * @param[in] version_directive The %YAML directive value or * @c NULL. * @param[in] tag_directives_start The beginning of the %TAG * directives list. * @param[in] tag_directives_end The end of the %TAG directives * list. * @param[in] start_implicit If the document start indicator is * implicit. * @param[in] end_implicit If the document end indicator is * implicit. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_document_initialize(yaml_document_t *document, yaml_version_directive_t *version_directive, yaml_tag_directive_t *tag_directives_start, yaml_tag_directive_t *tag_directives_end, int start_implicit, int end_implicit); /** * Delete a YAML document and all its nodes. * * @param[in,out] document A document object. */ YAML_DECLARE(void) yaml_document_delete(yaml_document_t *document); /** * Get a node of a YAML document. * * The pointer returned by this function is valid until any of the functions * modifying the documents are called. * * @param[in] document A document object. * @param[in] index The node id. * * @returns the node objct or @c NULL if @c node_id is out of range. */ YAML_DECLARE(yaml_node_t *) yaml_document_get_node(yaml_document_t *document, int index); /** * Get the root of a YAML document node. * * The root object is the first object added to the document. * * The pointer returned by this function is valid until any of the functions * modifying the documents are called. * * An empty document produced by the parser signifies the end of a YAML * stream. * * @param[in] document A document object. * * @returns the node object or @c NULL if the document is empty. */ YAML_DECLARE(yaml_node_t *) yaml_document_get_root_node(yaml_document_t *document); /** * Create a SCALAR node and attach it to the document. * * The @a style argument may be ignored by the emitter. * * @param[in,out] document A document object. * @param[in] tag The scalar tag. * @param[in] value The scalar value. * @param[in] length The length of the scalar value. * @param[in] style The scalar style. * * @returns the node id or @c 0 on error. */ YAML_DECLARE(int) yaml_document_add_scalar(yaml_document_t *document, yaml_char_t *tag, yaml_char_t *value, int length, yaml_scalar_style_t style); /** * Create a SEQUENCE node and attach it to the document. * * The @a style argument may be ignored by the emitter. * * @param[in,out] document A document object. * @param[in] tag The sequence tag. * @param[in] style The sequence style. * * @returns the node id or @c 0 on error. */ YAML_DECLARE(int) yaml_document_add_sequence(yaml_document_t *document, yaml_char_t *tag, yaml_sequence_style_t style); /** * Create a MAPPING node and attach it to the document. * * The @a style argument may be ignored by the emitter. * * @param[in,out] document A document object. * @param[in] tag The sequence tag. * @param[in] style The sequence style. * * @returns the node id or @c 0 on error. */ YAML_DECLARE(int) yaml_document_add_mapping(yaml_document_t *document, yaml_char_t *tag, yaml_mapping_style_t style); /** * Add an item to a SEQUENCE node. * * @param[in,out] document A document object. * @param[in] sequence The sequence node id. * @param[in] item The item node id. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_document_append_sequence_item(yaml_document_t *document, int sequence, int item); /** * Add a pair of a key and a value to a MAPPING node. * * @param[in,out] document A document object. * @param[in] mapping The mapping node id. * @param[in] key The key node id. * @param[in] value The value node id. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_document_append_mapping_pair(yaml_document_t *document, int mapping, int key, int value); /** @} */ /** * @defgroup parser Parser Definitions * @{ */ /** * The prototype of a read handler. * * The read handler is called when the parser needs to read more bytes from the * source. The handler should write not more than @a size bytes to the @a * buffer. The number of written bytes should be set to the @a length variable. * * @param[in,out] data A pointer to an application data specified by * yaml_parser_set_input(). * @param[out] buffer The buffer to write the data from the source. * @param[in] size The size of the buffer. * @param[out] size_read The actual number of bytes read from the source. * * @returns On success, the handler should return @c 1. If the handler failed, * the returned value should be @c 0. On EOF, the handler should set the * @a size_read to @c 0 and return @c 1. */ typedef int yaml_read_handler_t(void *data, unsigned char *buffer, size_t size, size_t *size_read); /** * This structure holds information about a potential simple key. */ typedef struct yaml_simple_key_s { /** Is a simple key possible? */ int possible; /** Is a simple key required? */ int required; /** The number of the token. */ size_t token_number; /** The position mark. */ yaml_mark_t mark; } yaml_simple_key_t; /** * The states of the parser. */ typedef enum yaml_parser_state_e { /** Expect STREAM-START. */ YAML_PARSE_STREAM_START_STATE, /** Expect the beginning of an implicit document. */ YAML_PARSE_IMPLICIT_DOCUMENT_START_STATE, /** Expect DOCUMENT-START. */ YAML_PARSE_DOCUMENT_START_STATE, /** Expect the content of a document. */ YAML_PARSE_DOCUMENT_CONTENT_STATE, /** Expect DOCUMENT-END. */ YAML_PARSE_DOCUMENT_END_STATE, /** Expect a block node. */ YAML_PARSE_BLOCK_NODE_STATE, /** Expect a block node or indentless sequence. */ YAML_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE, /** Expect a flow node. */ YAML_PARSE_FLOW_NODE_STATE, /** Expect the first entry of a block sequence. */ YAML_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE, /** Expect an entry of a block sequence. */ YAML_PARSE_BLOCK_SEQUENCE_ENTRY_STATE, /** Expect an entry of an indentless sequence. */ YAML_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE, /** Expect the first key of a block mapping. */ YAML_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE, /** Expect a block mapping key. */ YAML_PARSE_BLOCK_MAPPING_KEY_STATE, /** Expect a block mapping value. */ YAML_PARSE_BLOCK_MAPPING_VALUE_STATE, /** Expect the first entry of a flow sequence. */ YAML_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE, /** Expect an entry of a flow sequence. */ YAML_PARSE_FLOW_SEQUENCE_ENTRY_STATE, /** Expect a key of an ordered mapping. */ YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE, /** Expect a value of an ordered mapping. */ YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE, /** Expect the and of an ordered mapping entry. */ YAML_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE, /** Expect the first key of a flow mapping. */ YAML_PARSE_FLOW_MAPPING_FIRST_KEY_STATE, /** Expect a key of a flow mapping. */ YAML_PARSE_FLOW_MAPPING_KEY_STATE, /** Expect a value of a flow mapping. */ YAML_PARSE_FLOW_MAPPING_VALUE_STATE, /** Expect an empty value of a flow mapping. */ YAML_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE, /** Expect nothing. */ YAML_PARSE_END_STATE } yaml_parser_state_t; /** * This structure holds aliases data. */ typedef struct yaml_alias_data_s { /** The anchor. */ yaml_char_t *anchor; /** The node id. */ int index; /** The anchor mark. */ yaml_mark_t mark; } yaml_alias_data_t; /** * The parser structure. * * All members are internal. Manage the structure using the @c yaml_parser_ * family of functions. */ typedef struct yaml_parser_s { /** * @name Error handling * @{ */ /** Error type. */ yaml_error_type_t error; /** Error description. */ const char *problem; /** The byte about which the problem occurred. */ size_t problem_offset; /** The problematic value (@c -1 is none). */ int problem_value; /** The problem position. */ yaml_mark_t problem_mark; /** The error context. */ const char *context; /** The context position. */ yaml_mark_t context_mark; /** * @} */ /** * @name Reader stuff * @{ */ /** Read handler. */ yaml_read_handler_t *read_handler; /** A pointer for passing to the read handler. */ void *read_handler_data; /** Standard (string or file) input data. */ union { /** String input data. */ struct { /** The string start pointer. */ const unsigned char *start; /** The string end pointer. */ const unsigned char *end; /** The string current position. */ const unsigned char *current; } string; /** File input data. */ FILE *file; } input; /** EOF flag */ int eof; /** The working buffer. */ struct { /** The beginning of the buffer. */ yaml_char_t *start; /** The end of the buffer. */ yaml_char_t *end; /** The current position of the buffer. */ yaml_char_t *pointer; /** The last filled position of the buffer. */ yaml_char_t *last; } buffer; /* The number of unread characters in the buffer. */ size_t unread; /** The raw buffer. */ struct { /** The beginning of the buffer. */ unsigned char *start; /** The end of the buffer. */ unsigned char *end; /** The current position of the buffer. */ unsigned char *pointer; /** The last filled position of the buffer. */ unsigned char *last; } raw_buffer; /** The input encoding. */ yaml_encoding_t encoding; /** The offset of the current position (in bytes). */ size_t offset; /** The mark of the current position. */ yaml_mark_t mark; /** * @} */ /** * @name Scanner stuff * @{ */ /** Have we started to scan the input stream? */ int stream_start_produced; /** Have we reached the end of the input stream? */ int stream_end_produced; /** The number of unclosed '[' and '{' indicators. */ int flow_level; /** The tokens queue. */ struct { /** The beginning of the tokens queue. */ yaml_token_t *start; /** The end of the tokens queue. */ yaml_token_t *end; /** The head of the tokens queue. */ yaml_token_t *head; /** The tail of the tokens queue. */ yaml_token_t *tail; } tokens; /** The number of tokens fetched from the queue. */ size_t tokens_parsed; /* Does the tokens queue contain a token ready for dequeueing. */ int token_available; /** The indentation levels stack. */ struct { /** The beginning of the stack. */ int *start; /** The end of the stack. */ int *end; /** The top of the stack. */ int *top; } indents; /** The current indentation level. */ int indent; /** May a simple key occur at the current position? */ int simple_key_allowed; /** The stack of simple keys. */ struct { /** The beginning of the stack. */ yaml_simple_key_t *start; /** The end of the stack. */ yaml_simple_key_t *end; /** The top of the stack. */ yaml_simple_key_t *top; } simple_keys; /** * @} */ /** * @name Parser stuff * @{ */ /** The parser states stack. */ struct { /** The beginning of the stack. */ yaml_parser_state_t *start; /** The end of the stack. */ yaml_parser_state_t *end; /** The top of the stack. */ yaml_parser_state_t *top; } states; /** The current parser state. */ yaml_parser_state_t state; /** The stack of marks. */ struct { /** The beginning of the stack. */ yaml_mark_t *start; /** The end of the stack. */ yaml_mark_t *end; /** The top of the stack. */ yaml_mark_t *top; } marks; /** The list of TAG directives. */ struct { /** The beginning of the list. */ yaml_tag_directive_t *start; /** The end of the list. */ yaml_tag_directive_t *end; /** The top of the list. */ yaml_tag_directive_t *top; } tag_directives; /** * @} */ /** * @name Dumper stuff * @{ */ /** The alias data. */ struct { /** The beginning of the list. */ yaml_alias_data_t *start; /** The end of the list. */ yaml_alias_data_t *end; /** The top of the list. */ yaml_alias_data_t *top; } aliases; /** The currently parsed document. */ yaml_document_t *document; /** * @} */ } yaml_parser_t; /** * Initialize a parser. * * This function creates a new parser object. An application is responsible * for destroying the object using the yaml_parser_delete() function. * * @param[out] parser An empty parser object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_parser_initialize(yaml_parser_t *parser); /** * Destroy a parser. * * @param[in,out] parser A parser object. */ YAML_DECLARE(void) yaml_parser_delete(yaml_parser_t *parser); /** * Set a string input. * * Note that the @a input pointer must be valid while the @a parser object * exists. The application is responsible for destroing @a input after * destroying the @a parser. * * @param[in,out] parser A parser object. * @param[in] input A source data. * @param[in] size The length of the source data in bytes. */ YAML_DECLARE(void) yaml_parser_set_input_string(yaml_parser_t *parser, const unsigned char *input, size_t size); /** * Set a file input. * * @a file should be a file object open for reading. The application is * responsible for closing the @a file. * * @param[in,out] parser A parser object. * @param[in] file An open file. */ YAML_DECLARE(void) yaml_parser_set_input_file(yaml_parser_t *parser, FILE *file); /** * Set a generic input handler. * * @param[in,out] parser A parser object. * @param[in] handler A read handler. * @param[in] data Any application data for passing to the read * handler. */ YAML_DECLARE(void) yaml_parser_set_input(yaml_parser_t *parser, yaml_read_handler_t *handler, void *data); /** * Set the source encoding. * * @param[in,out] parser A parser object. * @param[in] encoding The source encoding. */ YAML_DECLARE(void) yaml_parser_set_encoding(yaml_parser_t *parser, yaml_encoding_t encoding); /** * Scan the input stream and produce the next token. * * Call the function subsequently to produce a sequence of tokens corresponding * to the input stream. The initial token has the type * @c YAML_STREAM_START_TOKEN while the ending token has the type * @c YAML_STREAM_END_TOKEN. * * An application is responsible for freeing any buffers associated with the * produced token object using the @c yaml_token_delete function. * * An application must not alternate the calls of yaml_parser_scan() with the * calls of yaml_parser_parse() or yaml_parser_load(). Doing this will break * the parser. * * @param[in,out] parser A parser object. * @param[out] token An empty token object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_parser_scan(yaml_parser_t *parser, yaml_token_t *token); /** * Parse the input stream and produce the next parsing event. * * Call the function subsequently to produce a sequence of events corresponding * to the input stream. The initial event has the type * @c YAML_STREAM_START_EVENT while the ending event has the type * @c YAML_STREAM_END_EVENT. * * An application is responsible for freeing any buffers associated with the * produced event object using the yaml_event_delete() function. * * An application must not alternate the calls of yaml_parser_parse() with the * calls of yaml_parser_scan() or yaml_parser_load(). Doing this will break the * parser. * * @param[in,out] parser A parser object. * @param[out] event An empty event object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_parser_parse(yaml_parser_t *parser, yaml_event_t *event); /** * Parse the input stream and produce the next YAML document. * * Call this function subsequently to produce a sequence of documents * constituting the input stream. * * If the produced document has no root node, it means that the document * end has been reached. * * An application is responsible for freeing any data associated with the * produced document object using the yaml_document_delete() function. * * An application must not alternate the calls of yaml_parser_load() with the * calls of yaml_parser_scan() or yaml_parser_parse(). Doing this will break * the parser. * * @param[in,out] parser A parser object. * @param[out] document An empty document object. * * @return @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_parser_load(yaml_parser_t *parser, yaml_document_t *document); /** @} */ /** * @defgroup emitter Emitter Definitions * @{ */ /** * The prototype of a write handler. * * The write handler is called when the emitter needs to flush the accumulated * characters to the output. The handler should write @a size bytes of the * @a buffer to the output. * * @param[in,out] data A pointer to an application data specified by * yaml_emitter_set_output(). * @param[in] buffer The buffer with bytes to be written. * @param[in] size The size of the buffer. * * @returns On success, the handler should return @c 1. If the handler failed, * the returned value should be @c 0. */ typedef int yaml_write_handler_t(void *data, unsigned char *buffer, size_t size); /** The emitter states. */ typedef enum yaml_emitter_state_e { /** Expect STREAM-START. */ YAML_EMIT_STREAM_START_STATE, /** Expect the first DOCUMENT-START or STREAM-END. */ YAML_EMIT_FIRST_DOCUMENT_START_STATE, /** Expect DOCUMENT-START or STREAM-END. */ YAML_EMIT_DOCUMENT_START_STATE, /** Expect the content of a document. */ YAML_EMIT_DOCUMENT_CONTENT_STATE, /** Expect DOCUMENT-END. */ YAML_EMIT_DOCUMENT_END_STATE, /** Expect the first item of a flow sequence. */ YAML_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE, /** Expect an item of a flow sequence. */ YAML_EMIT_FLOW_SEQUENCE_ITEM_STATE, /** Expect the first key of a flow mapping. */ YAML_EMIT_FLOW_MAPPING_FIRST_KEY_STATE, /** Expect a key of a flow mapping. */ YAML_EMIT_FLOW_MAPPING_KEY_STATE, /** Expect a value for a simple key of a flow mapping. */ YAML_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE, /** Expect a value of a flow mapping. */ YAML_EMIT_FLOW_MAPPING_VALUE_STATE, /** Expect the first item of a block sequence. */ YAML_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE, /** Expect an item of a block sequence. */ YAML_EMIT_BLOCK_SEQUENCE_ITEM_STATE, /** Expect the first key of a block mapping. */ YAML_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE, /** Expect the key of a block mapping. */ YAML_EMIT_BLOCK_MAPPING_KEY_STATE, /** Expect a value for a simple key of a block mapping. */ YAML_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE, /** Expect a value of a block mapping. */ YAML_EMIT_BLOCK_MAPPING_VALUE_STATE, /** Expect nothing. */ YAML_EMIT_END_STATE } yaml_emitter_state_t; /** * The emitter structure. * * All members are internal. Manage the structure using the @c yaml_emitter_ * family of functions. */ typedef struct yaml_emitter_s { /** * @name Error handling * @{ */ /** Error type. */ yaml_error_type_t error; /** Error description. */ const char *problem; /** * @} */ /** * @name Writer stuff * @{ */ /** Write handler. */ yaml_write_handler_t *write_handler; /** A pointer for passing to the white handler. */ void *write_handler_data; /** Standard (string or file) output data. */ union { /** String output data. */ struct { /** The buffer pointer. */ unsigned char *buffer; /** The buffer size. */ size_t size; /** The number of written bytes. */ size_t *size_written; } string; /** File output data. */ FILE *file; } output; /** The working buffer. */ struct { /** The beginning of the buffer. */ yaml_char_t *start; /** The end of the buffer. */ yaml_char_t *end; /** The current position of the buffer. */ yaml_char_t *pointer; /** The last filled position of the buffer. */ yaml_char_t *last; } buffer; /** The raw buffer. */ struct { /** The beginning of the buffer. */ unsigned char *start; /** The end of the buffer. */ unsigned char *end; /** The current position of the buffer. */ unsigned char *pointer; /** The last filled position of the buffer. */ unsigned char *last; } raw_buffer; /** The stream encoding. */ yaml_encoding_t encoding; /** * @} */ /** * @name Emitter stuff * @{ */ /** If the output is in the canonical style? */ int canonical; /** The number of indentation spaces. */ int best_indent; /** The preferred width of the output lines. */ int best_width; /** Allow unescaped non-ASCII characters? */ int unicode; /** The preferred line break. */ yaml_break_t line_break; /** The stack of states. */ struct { /** The beginning of the stack. */ yaml_emitter_state_t *start; /** The end of the stack. */ yaml_emitter_state_t *end; /** The top of the stack. */ yaml_emitter_state_t *top; } states; /** The current emitter state. */ yaml_emitter_state_t state; /** The event queue. */ struct { /** The beginning of the event queue. */ yaml_event_t *start; /** The end of the event queue. */ yaml_event_t *end; /** The head of the event queue. */ yaml_event_t *head; /** The tail of the event queue. */ yaml_event_t *tail; } events; /** The stack of indentation levels. */ struct { /** The beginning of the stack. */ int *start; /** The end of the stack. */ int *end; /** The top of the stack. */ int *top; } indents; /** The list of tag directives. */ struct { /** The beginning of the list. */ yaml_tag_directive_t *start; /** The end of the list. */ yaml_tag_directive_t *end; /** The top of the list. */ yaml_tag_directive_t *top; } tag_directives; /** The current indentation level. */ int indent; /** The current flow level. */ int flow_level; /** Is it the document root context? */ int root_context; /** Is it a sequence context? */ int sequence_context; /** Is it a mapping context? */ int mapping_context; /** Is it a simple mapping key context? */ int simple_key_context; /** The current line. */ int line; /** The current column. */ int column; /** If the last character was a whitespace? */ int whitespace; /** If the last character was an indentation character (' ', '-', '?', ':')? */ int indention; /** If an explicit document end is required? */ int open_ended; /** Anchor analysis. */ struct { /** The anchor value. */ yaml_char_t *anchor; /** The anchor length. */ size_t anchor_length; /** Is it an alias? */ int alias; } anchor_data; /** Tag analysis. */ struct { /** The tag handle. */ yaml_char_t *handle; /** The tag handle length. */ size_t handle_length; /** The tag suffix. */ yaml_char_t *suffix; /** The tag suffix length. */ size_t suffix_length; } tag_data; /** Scalar analysis. */ struct { /** The scalar value. */ yaml_char_t *value; /** The scalar length. */ size_t length; /** Does the scalar contain line breaks? */ int multiline; /** Can the scalar be expessed in the flow plain style? */ int flow_plain_allowed; /** Can the scalar be expressed in the block plain style? */ int block_plain_allowed; /** Can the scalar be expressed in the single quoted style? */ int single_quoted_allowed; /** Can the scalar be expressed in the literal or folded styles? */ int block_allowed; /** The output style. */ yaml_scalar_style_t style; } scalar_data; /** * @} */ /** * @name Dumper stuff * @{ */ /** If the stream was already opened? */ int opened; /** If the stream was already closed? */ int closed; /** The information associated with the document nodes. */ struct { /** The number of references. */ int references; /** The anchor id. */ int anchor; /** If the node has been emitted? */ int serialized; } *anchors; /** The last assigned anchor id. */ int last_anchor_id; /** The currently emitted document. */ yaml_document_t *document; /** * @} */ } yaml_emitter_t; /** * Initialize an emitter. * * This function creates a new emitter object. An application is responsible * for destroying the object using the yaml_emitter_delete() function. * * @param[out] emitter An empty parser object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_emitter_initialize(yaml_emitter_t *emitter); /** * Destroy an emitter. * * @param[in,out] emitter An emitter object. */ YAML_DECLARE(void) yaml_emitter_delete(yaml_emitter_t *emitter); /** * Set a string output. * * The emitter will write the output characters to the @a output buffer of the * size @a size. The emitter will set @a size_written to the number of written * bytes. If the buffer is smaller than required, the emitter produces the * YAML_WRITE_ERROR error. * * @param[in,out] emitter An emitter object. * @param[in] output An output buffer. * @param[in] size The buffer size. * @param[in] size_written The pointer to save the number of written * bytes. */ YAML_DECLARE(void) yaml_emitter_set_output_string(yaml_emitter_t *emitter, unsigned char *output, size_t size, size_t *size_written); /** * Set a file output. * * @a file should be a file object open for writing. The application is * responsible for closing the @a file. * * @param[in,out] emitter An emitter object. * @param[in] file An open file. */ YAML_DECLARE(void) yaml_emitter_set_output_file(yaml_emitter_t *emitter, FILE *file); /** * Set a generic output handler. * * @param[in,out] emitter An emitter object. * @param[in] handler A write handler. * @param[in] data Any application data for passing to the write * handler. */ YAML_DECLARE(void) yaml_emitter_set_output(yaml_emitter_t *emitter, yaml_write_handler_t *handler, void *data); /** * Set the output encoding. * * @param[in,out] emitter An emitter object. * @param[in] encoding The output encoding. */ YAML_DECLARE(void) yaml_emitter_set_encoding(yaml_emitter_t *emitter, yaml_encoding_t encoding); /** * Set if the output should be in the "canonical" format as in the YAML * specification. * * @param[in,out] emitter An emitter object. * @param[in] canonical If the output is canonical. */ YAML_DECLARE(void) yaml_emitter_set_canonical(yaml_emitter_t *emitter, int canonical); /** * Set the indentation increment. * * @param[in,out] emitter An emitter object. * @param[in] indent The indentation increment (1 < . < 10). */ YAML_DECLARE(void) yaml_emitter_set_indent(yaml_emitter_t *emitter, int indent); /** * Set the preferred line width. @c -1 means unlimited. * * @param[in,out] emitter An emitter object. * @param[in] width The preferred line width. */ YAML_DECLARE(void) yaml_emitter_set_width(yaml_emitter_t *emitter, int width); /** * Set if unescaped non-ASCII characters are allowed. * * @param[in,out] emitter An emitter object. * @param[in] unicode If unescaped Unicode characters are allowed. */ YAML_DECLARE(void) yaml_emitter_set_unicode(yaml_emitter_t *emitter, int unicode); /** * Set the preferred line break. * * @param[in,out] emitter An emitter object. * @param[in] line_break The preferred line break. */ YAML_DECLARE(void) yaml_emitter_set_break(yaml_emitter_t *emitter, yaml_break_t line_break); /** * Emit an event. * * The event object may be generated using the yaml_parser_parse() function. * The emitter takes the responsibility for the event object and destroys its * content after it is emitted. The event object is destroyed even if the * function fails. * * @param[in,out] emitter An emitter object. * @param[in,out] event An event object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_emitter_emit(yaml_emitter_t *emitter, yaml_event_t *event); /** * Start a YAML stream. * * This function should be used before yaml_emitter_dump() is called. * * @param[in,out] emitter An emitter object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_emitter_open(yaml_emitter_t *emitter); /** * Finish a YAML stream. * * This function should be used after yaml_emitter_dump() is called. * * @param[in,out] emitter An emitter object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_emitter_close(yaml_emitter_t *emitter); /** * Emit a YAML document. * * The documen object may be generated using the yaml_parser_load() function * or the yaml_document_initialize() function. The emitter takes the * responsibility for the document object and destoys its content after * it is emitted. The document object is destroyedeven if the function fails. * * @param[in,out] emitter An emitter object. * @param[in,out] document A document object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_emitter_dump(yaml_emitter_t *emitter, yaml_document_t *document); /** * Flush the accumulated characters to the output. * * @param[in,out] emitter An emitter object. * * @returns @c 1 if the function succeeded, @c 0 on error. */ YAML_DECLARE(int) yaml_emitter_flush(yaml_emitter_t *emitter); /** @} */ #ifdef __cplusplus } #endif #endif /* #ifndef YAML_H */ psych-2.2.4/ext/psych/yaml/yaml_private.h000066400000000000000000000716211305404671600204170ustar00rootroot00000000000000#ifdef RUBY_EXTCONF_H #include RUBY_EXTCONF_H #endif #if HAVE_CONFIG_H #include #endif #include #include #include #include #ifndef _MSC_VER #include #else #ifdef _WIN64 #define PTRDIFF_MAX _I64_MAX #else #define PTRDIFF_MAX INT_MAX #endif #endif /* * Memory management. */ YAML_DECLARE(void *) yaml_malloc(size_t size); YAML_DECLARE(void *) yaml_realloc(void *ptr, size_t size); YAML_DECLARE(void) yaml_free(void *ptr); YAML_DECLARE(yaml_char_t *) yaml_strdup(const yaml_char_t *); /* * Reader: Ensure that the buffer contains at least `length` characters. */ YAML_DECLARE(int) yaml_parser_update_buffer(yaml_parser_t *parser, size_t length); /* * Scanner: Ensure that the token stack contains at least one token ready. */ YAML_DECLARE(int) yaml_parser_fetch_more_tokens(yaml_parser_t *parser); /* * The size of the input raw buffer. */ #define INPUT_RAW_BUFFER_SIZE 16384 /* * The size of the input buffer. * * It should be possible to decode the whole raw buffer. */ #define INPUT_BUFFER_SIZE (INPUT_RAW_BUFFER_SIZE*3) /* * The size of the output buffer. */ #define OUTPUT_BUFFER_SIZE 16384 /* * The size of the output raw buffer. * * It should be possible to encode the whole output buffer. */ #define OUTPUT_RAW_BUFFER_SIZE (OUTPUT_BUFFER_SIZE*2+2) /* * The size of other stacks and queues. */ #define INITIAL_STACK_SIZE 16 #define INITIAL_QUEUE_SIZE 16 #define INITIAL_STRING_SIZE 16 /* * Buffer management. */ #define BUFFER_INIT(context,buffer,size) \ (((buffer).start = yaml_malloc(size)) ? \ ((buffer).last = (buffer).pointer = (buffer).start, \ (buffer).end = (buffer).start+(size), \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define BUFFER_DEL(context,buffer) \ (yaml_free((buffer).start), \ (buffer).start = (buffer).pointer = (buffer).end = 0) /* * String management. */ typedef struct { yaml_char_t *start; yaml_char_t *end; yaml_char_t *pointer; } yaml_string_t; YAML_DECLARE(int) yaml_string_extend(yaml_char_t **start, yaml_char_t **pointer, yaml_char_t **end); YAML_DECLARE(int) yaml_string_join( yaml_char_t **a_start, yaml_char_t **a_pointer, yaml_char_t **a_end, yaml_char_t **b_start, yaml_char_t **b_pointer, yaml_char_t **b_end); #define NULL_STRING { NULL, NULL, NULL } #define STRING(string,length) { (string), (string)+(length), (string) } #define STRING_ASSIGN(value,string,length) \ ((value).start = (string), \ (value).end = (string)+(length), \ (value).pointer = (string)) #define STRING_INIT(context,string,size) \ (((string).start = yaml_malloc(size)) ? \ ((string).pointer = (string).start, \ (string).end = (string).start+(size), \ memset((string).start, 0, (size)), \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define STRING_DEL(context,string) \ (yaml_free((string).start), \ (string).start = (string).pointer = (string).end = 0) #define STRING_EXTEND(context,string) \ ((((string).pointer+5 < (string).end) \ || yaml_string_extend(&(string).start, \ &(string).pointer, &(string).end)) ? \ 1 : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define CLEAR(context,string) \ ((string).pointer = (string).start, \ memset((string).start, 0, (string).end-(string).start)) #define JOIN(context,string_a,string_b) \ ((yaml_string_join(&(string_a).start, &(string_a).pointer, \ &(string_a).end, &(string_b).start, \ &(string_b).pointer, &(string_b).end)) ? \ ((string_b).pointer = (string_b).start, \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) /* * String check operations. */ /* * Check the octet at the specified position. */ #define CHECK_AT(string,octet,offset) \ ((string).pointer[offset] == (yaml_char_t)(octet)) /* * Check the current octet in the buffer. */ #define CHECK(string,octet) CHECK_AT((string),(octet),0) /* * Check if the character at the specified position is an alphabetical * character, a digit, '_', or '-'. */ #define IS_ALPHA_AT(string,offset) \ (((string).pointer[offset] >= (yaml_char_t) '0' && \ (string).pointer[offset] <= (yaml_char_t) '9') || \ ((string).pointer[offset] >= (yaml_char_t) 'A' && \ (string).pointer[offset] <= (yaml_char_t) 'Z') || \ ((string).pointer[offset] >= (yaml_char_t) 'a' && \ (string).pointer[offset] <= (yaml_char_t) 'z') || \ (string).pointer[offset] == '_' || \ (string).pointer[offset] == '-') #define IS_ALPHA(string) IS_ALPHA_AT((string),0) /* * Check if the character at the specified position is a digit. */ #define IS_DIGIT_AT(string,offset) \ (((string).pointer[offset] >= (yaml_char_t) '0' && \ (string).pointer[offset] <= (yaml_char_t) '9')) #define IS_DIGIT(string) IS_DIGIT_AT((string),0) /* * Get the value of a digit. */ #define AS_DIGIT_AT(string,offset) \ ((string).pointer[offset] - (yaml_char_t) '0') #define AS_DIGIT(string) AS_DIGIT_AT((string),0) /* * Check if the character at the specified position is a hex-digit. */ #define IS_HEX_AT(string,offset) \ (((string).pointer[offset] >= (yaml_char_t) '0' && \ (string).pointer[offset] <= (yaml_char_t) '9') || \ ((string).pointer[offset] >= (yaml_char_t) 'A' && \ (string).pointer[offset] <= (yaml_char_t) 'F') || \ ((string).pointer[offset] >= (yaml_char_t) 'a' && \ (string).pointer[offset] <= (yaml_char_t) 'f')) #define IS_HEX(string) IS_HEX_AT((string),0) /* * Get the value of a hex-digit. */ #define AS_HEX_AT(string,offset) \ (((string).pointer[offset] >= (yaml_char_t) 'A' && \ (string).pointer[offset] <= (yaml_char_t) 'F') ? \ ((string).pointer[offset] - (yaml_char_t) 'A' + 10) : \ ((string).pointer[offset] >= (yaml_char_t) 'a' && \ (string).pointer[offset] <= (yaml_char_t) 'f') ? \ ((string).pointer[offset] - (yaml_char_t) 'a' + 10) : \ ((string).pointer[offset] - (yaml_char_t) '0')) #define AS_HEX(string) AS_HEX_AT((string),0) /* * Check if the character is ASCII. */ #define IS_ASCII_AT(string,offset) \ ((string).pointer[offset] <= (yaml_char_t) '\x7F') #define IS_ASCII(string) IS_ASCII_AT((string),0) /* * Check if the character can be printed unescaped. */ #define IS_PRINTABLE_AT(string,offset) \ (((string).pointer[offset] == 0x0A) /* . == #x0A */ \ || ((string).pointer[offset] >= 0x20 /* #x20 <= . <= #x7E */ \ && (string).pointer[offset] <= 0x7E) \ || ((string).pointer[offset] == 0xC2 /* #0xA0 <= . <= #xD7FF */ \ && (string).pointer[offset+1] >= 0xA0) \ || ((string).pointer[offset] > 0xC2 \ && (string).pointer[offset] < 0xED) \ || ((string).pointer[offset] == 0xED \ && (string).pointer[offset+1] < 0xA0) \ || ((string).pointer[offset] == 0xEE) \ || ((string).pointer[offset] == 0xEF /* #xE000 <= . <= #xFFFD */ \ && !((string).pointer[offset+1] == 0xBB /* && . != #xFEFF */ \ && (string).pointer[offset+2] == 0xBF) \ && !((string).pointer[offset+1] == 0xBF \ && ((string).pointer[offset+2] == 0xBE \ || (string).pointer[offset+2] == 0xBF)))) #define IS_PRINTABLE(string) IS_PRINTABLE_AT((string),0) /* * Check if the character at the specified position is NUL. */ #define IS_Z_AT(string,offset) CHECK_AT((string),'\0',(offset)) #define IS_Z(string) IS_Z_AT((string),0) /* * Check if the character at the specified position is BOM. */ #define IS_BOM_AT(string,offset) \ (CHECK_AT((string),'\xEF',(offset)) \ && CHECK_AT((string),'\xBB',(offset)+1) \ && CHECK_AT((string),'\xBF',(offset)+2)) /* BOM (#xFEFF) */ #define IS_BOM(string) IS_BOM_AT(string,0) /* * Check if the character at the specified position is space. */ #define IS_SPACE_AT(string,offset) CHECK_AT((string),' ',(offset)) #define IS_SPACE(string) IS_SPACE_AT((string),0) /* * Check if the character at the specified position is tab. */ #define IS_TAB_AT(string,offset) CHECK_AT((string),'\t',(offset)) #define IS_TAB(string) IS_TAB_AT((string),0) /* * Check if the character at the specified position is blank (space or tab). */ #define IS_BLANK_AT(string,offset) \ (IS_SPACE_AT((string),(offset)) || IS_TAB_AT((string),(offset))) #define IS_BLANK(string) IS_BLANK_AT((string),0) /* * Check if the character at the specified position is a line break. */ #define IS_BREAK_AT(string,offset) \ (CHECK_AT((string),'\r',(offset)) /* CR (#xD)*/ \ || CHECK_AT((string),'\n',(offset)) /* LF (#xA) */ \ || (CHECK_AT((string),'\xC2',(offset)) \ && CHECK_AT((string),'\x85',(offset)+1)) /* NEL (#x85) */ \ || (CHECK_AT((string),'\xE2',(offset)) \ && CHECK_AT((string),'\x80',(offset)+1) \ && CHECK_AT((string),'\xA8',(offset)+2)) /* LS (#x2028) */ \ || (CHECK_AT((string),'\xE2',(offset)) \ && CHECK_AT((string),'\x80',(offset)+1) \ && CHECK_AT((string),'\xA9',(offset)+2))) /* PS (#x2029) */ #define IS_BREAK(string) IS_BREAK_AT((string),0) #define IS_CRLF_AT(string,offset) \ (CHECK_AT((string),'\r',(offset)) && CHECK_AT((string),'\n',(offset)+1)) #define IS_CRLF(string) IS_CRLF_AT((string),0) /* * Check if the character is a line break or NUL. */ #define IS_BREAKZ_AT(string,offset) \ (IS_BREAK_AT((string),(offset)) || IS_Z_AT((string),(offset))) #define IS_BREAKZ(string) IS_BREAKZ_AT((string),0) /* * Check if the character is a line break, space, or NUL. */ #define IS_SPACEZ_AT(string,offset) \ (IS_SPACE_AT((string),(offset)) || IS_BREAKZ_AT((string),(offset))) #define IS_SPACEZ(string) IS_SPACEZ_AT((string),0) /* * Check if the character is a line break, space, tab, or NUL. */ #define IS_BLANKZ_AT(string,offset) \ (IS_BLANK_AT((string),(offset)) || IS_BREAKZ_AT((string),(offset))) #define IS_BLANKZ(string) IS_BLANKZ_AT((string),0) /* * Determine the width of the character. */ #define WIDTH_AT(string,offset) \ (((string).pointer[offset] & 0x80) == 0x00 ? 1 : \ ((string).pointer[offset] & 0xE0) == 0xC0 ? 2 : \ ((string).pointer[offset] & 0xF0) == 0xE0 ? 3 : \ ((string).pointer[offset] & 0xF8) == 0xF0 ? 4 : 0) #define WIDTH(string) WIDTH_AT((string),0) /* * Move the string pointer to the next character. */ #define MOVE(string) ((string).pointer += WIDTH((string))) /* * Copy a character and move the pointers of both strings. */ #define COPY(string_a,string_b) \ ((*(string_b).pointer & 0x80) == 0x00 ? \ (*((string_a).pointer++) = *((string_b).pointer++)) : \ (*(string_b).pointer & 0xE0) == 0xC0 ? \ (*((string_a).pointer++) = *((string_b).pointer++), \ *((string_a).pointer++) = *((string_b).pointer++)) : \ (*(string_b).pointer & 0xF0) == 0xE0 ? \ (*((string_a).pointer++) = *((string_b).pointer++), \ *((string_a).pointer++) = *((string_b).pointer++), \ *((string_a).pointer++) = *((string_b).pointer++)) : \ (*(string_b).pointer & 0xF8) == 0xF0 ? \ (*((string_a).pointer++) = *((string_b).pointer++), \ *((string_a).pointer++) = *((string_b).pointer++), \ *((string_a).pointer++) = *((string_b).pointer++), \ *((string_a).pointer++) = *((string_b).pointer++)) : 0) /* * Stack and queue management. */ YAML_DECLARE(int) yaml_stack_extend(void **start, void **top, void **end); YAML_DECLARE(int) yaml_queue_extend(void **start, void **head, void **tail, void **end); #define STACK_INIT(context,stack,size) \ (((stack).start = yaml_malloc((size)*sizeof(*(stack).start))) ? \ ((stack).top = (stack).start, \ (stack).end = (stack).start+(size), \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define STACK_DEL(context,stack) \ (yaml_free((stack).start), \ (stack).start = (stack).top = (stack).end = 0) #define STACK_EMPTY(context,stack) \ ((stack).start == (stack).top) #define STACK_LIMIT(context,stack,size) \ ((stack).top - (stack).start < (size) ? \ 1 : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define PUSH(context,stack,value) \ (((stack).top != (stack).end \ || yaml_stack_extend((void **)&(stack).start, \ (void **)&(stack).top, (void **)&(stack).end)) ? \ (*((stack).top++) = value, \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define POP(context,stack) \ (*(--(stack).top)) #define QUEUE_INIT(context,queue,size) \ (((queue).start = yaml_malloc((size)*sizeof(*(queue).start))) ? \ ((queue).head = (queue).tail = (queue).start, \ (queue).end = (queue).start+(size), \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define QUEUE_DEL(context,queue) \ (yaml_free((queue).start), \ (queue).start = (queue).head = (queue).tail = (queue).end = 0) #define QUEUE_EMPTY(context,queue) \ ((queue).head == (queue).tail) #define ENQUEUE(context,queue,value) \ (((queue).tail != (queue).end \ || yaml_queue_extend((void **)&(queue).start, (void **)&(queue).head, \ (void **)&(queue).tail, (void **)&(queue).end)) ? \ (*((queue).tail++) = value, \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) #define DEQUEUE(context,queue) \ (*((queue).head++)) #define QUEUE_INSERT(context,queue,index,value) \ (((queue).tail != (queue).end \ || yaml_queue_extend((void **)&(queue).start, (void **)&(queue).head, \ (void **)&(queue).tail, (void **)&(queue).end)) ? \ (memmove((queue).head+(index)+1,(queue).head+(index), \ ((queue).tail-(queue).head-(index))*sizeof(*(queue).start)), \ *((queue).head+(index)) = value, \ (queue).tail++, \ 1) : \ ((context)->error = YAML_MEMORY_ERROR, \ 0)) /* * Token initializers. */ #define TOKEN_INIT(token,token_type,token_start_mark,token_end_mark) \ (memset(&(token), 0, sizeof(yaml_token_t)), \ (token).type = (token_type), \ (token).start_mark = (token_start_mark), \ (token).end_mark = (token_end_mark)) #define STREAM_START_TOKEN_INIT(token,token_encoding,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_STREAM_START_TOKEN,(start_mark),(end_mark)), \ (token).data.stream_start.encoding = (token_encoding)) #define STREAM_END_TOKEN_INIT(token,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_STREAM_END_TOKEN,(start_mark),(end_mark))) #define ALIAS_TOKEN_INIT(token,token_value,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_ALIAS_TOKEN,(start_mark),(end_mark)), \ (token).data.alias.value = (token_value)) #define ANCHOR_TOKEN_INIT(token,token_value,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_ANCHOR_TOKEN,(start_mark),(end_mark)), \ (token).data.anchor.value = (token_value)) #define TAG_TOKEN_INIT(token,token_handle,token_suffix,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_TAG_TOKEN,(start_mark),(end_mark)), \ (token).data.tag.handle = (token_handle), \ (token).data.tag.suffix = (token_suffix)) #define SCALAR_TOKEN_INIT(token,token_value,token_length,token_style,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_SCALAR_TOKEN,(start_mark),(end_mark)), \ (token).data.scalar.value = (token_value), \ (token).data.scalar.length = (token_length), \ (token).data.scalar.style = (token_style)) #define VERSION_DIRECTIVE_TOKEN_INIT(token,token_major,token_minor,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_VERSION_DIRECTIVE_TOKEN,(start_mark),(end_mark)), \ (token).data.version_directive.major = (token_major), \ (token).data.version_directive.minor = (token_minor)) #define TAG_DIRECTIVE_TOKEN_INIT(token,token_handle,token_prefix,start_mark,end_mark) \ (TOKEN_INIT((token),YAML_TAG_DIRECTIVE_TOKEN,(start_mark),(end_mark)), \ (token).data.tag_directive.handle = (token_handle), \ (token).data.tag_directive.prefix = (token_prefix)) /* * Event initializers. */ #define EVENT_INIT(event,event_type,event_start_mark,event_end_mark) \ (memset(&(event), 0, sizeof(yaml_event_t)), \ (event).type = (event_type), \ (event).start_mark = (event_start_mark), \ (event).end_mark = (event_end_mark)) #define STREAM_START_EVENT_INIT(event,event_encoding,start_mark,end_mark) \ (EVENT_INIT((event),YAML_STREAM_START_EVENT,(start_mark),(end_mark)), \ (event).data.stream_start.encoding = (event_encoding)) #define STREAM_END_EVENT_INIT(event,start_mark,end_mark) \ (EVENT_INIT((event),YAML_STREAM_END_EVENT,(start_mark),(end_mark))) #define DOCUMENT_START_EVENT_INIT(event,event_version_directive, \ event_tag_directives_start,event_tag_directives_end,event_implicit,start_mark,end_mark) \ (EVENT_INIT((event),YAML_DOCUMENT_START_EVENT,(start_mark),(end_mark)), \ (event).data.document_start.version_directive = (event_version_directive), \ (event).data.document_start.tag_directives.start = (event_tag_directives_start), \ (event).data.document_start.tag_directives.end = (event_tag_directives_end), \ (event).data.document_start.implicit = (event_implicit)) #define DOCUMENT_END_EVENT_INIT(event,event_implicit,start_mark,end_mark) \ (EVENT_INIT((event),YAML_DOCUMENT_END_EVENT,(start_mark),(end_mark)), \ (event).data.document_end.implicit = (event_implicit)) #define ALIAS_EVENT_INIT(event,event_anchor,start_mark,end_mark) \ (EVENT_INIT((event),YAML_ALIAS_EVENT,(start_mark),(end_mark)), \ (event).data.alias.anchor = (event_anchor)) #define SCALAR_EVENT_INIT(event,event_anchor,event_tag,event_value,event_length, \ event_plain_implicit, event_quoted_implicit,event_style,start_mark,end_mark) \ (EVENT_INIT((event),YAML_SCALAR_EVENT,(start_mark),(end_mark)), \ (event).data.scalar.anchor = (event_anchor), \ (event).data.scalar.tag = (event_tag), \ (event).data.scalar.value = (event_value), \ (event).data.scalar.length = (event_length), \ (event).data.scalar.plain_implicit = (event_plain_implicit), \ (event).data.scalar.quoted_implicit = (event_quoted_implicit), \ (event).data.scalar.style = (event_style)) #define SEQUENCE_START_EVENT_INIT(event,event_anchor,event_tag, \ event_implicit,event_style,start_mark,end_mark) \ (EVENT_INIT((event),YAML_SEQUENCE_START_EVENT,(start_mark),(end_mark)), \ (event).data.sequence_start.anchor = (event_anchor), \ (event).data.sequence_start.tag = (event_tag), \ (event).data.sequence_start.implicit = (event_implicit), \ (event).data.sequence_start.style = (event_style)) #define SEQUENCE_END_EVENT_INIT(event,start_mark,end_mark) \ (EVENT_INIT((event),YAML_SEQUENCE_END_EVENT,(start_mark),(end_mark))) #define MAPPING_START_EVENT_INIT(event,event_anchor,event_tag, \ event_implicit,event_style,start_mark,end_mark) \ (EVENT_INIT((event),YAML_MAPPING_START_EVENT,(start_mark),(end_mark)), \ (event).data.mapping_start.anchor = (event_anchor), \ (event).data.mapping_start.tag = (event_tag), \ (event).data.mapping_start.implicit = (event_implicit), \ (event).data.mapping_start.style = (event_style)) #define MAPPING_END_EVENT_INIT(event,start_mark,end_mark) \ (EVENT_INIT((event),YAML_MAPPING_END_EVENT,(start_mark),(end_mark))) /* * Document initializer. */ #define DOCUMENT_INIT(document,document_nodes_start,document_nodes_end, \ document_version_directive,document_tag_directives_start, \ document_tag_directives_end,document_start_implicit, \ document_end_implicit,document_start_mark,document_end_mark) \ (memset(&(document), 0, sizeof(yaml_document_t)), \ (document).nodes.start = (document_nodes_start), \ (document).nodes.end = (document_nodes_end), \ (document).nodes.top = (document_nodes_start), \ (document).version_directive = (document_version_directive), \ (document).tag_directives.start = (document_tag_directives_start), \ (document).tag_directives.end = (document_tag_directives_end), \ (document).start_implicit = (document_start_implicit), \ (document).end_implicit = (document_end_implicit), \ (document).start_mark = (document_start_mark), \ (document).end_mark = (document_end_mark)) /* * Node initializers. */ #define NODE_INIT(node,node_type,node_tag,node_start_mark,node_end_mark) \ (memset(&(node), 0, sizeof(yaml_node_t)), \ (node).type = (node_type), \ (node).tag = (node_tag), \ (node).start_mark = (node_start_mark), \ (node).end_mark = (node_end_mark)) #define SCALAR_NODE_INIT(node,node_tag,node_value,node_length, \ node_style,start_mark,end_mark) \ (NODE_INIT((node),YAML_SCALAR_NODE,(node_tag),(start_mark),(end_mark)), \ (node).data.scalar.value = (node_value), \ (node).data.scalar.length = (node_length), \ (node).data.scalar.style = (node_style)) #define SEQUENCE_NODE_INIT(node,node_tag,node_items_start,node_items_end, \ node_style,start_mark,end_mark) \ (NODE_INIT((node),YAML_SEQUENCE_NODE,(node_tag),(start_mark),(end_mark)), \ (node).data.sequence.items.start = (node_items_start), \ (node).data.sequence.items.end = (node_items_end), \ (node).data.sequence.items.top = (node_items_start), \ (node).data.sequence.style = (node_style)) #define MAPPING_NODE_INIT(node,node_tag,node_pairs_start,node_pairs_end, \ node_style,start_mark,end_mark) \ (NODE_INIT((node),YAML_MAPPING_NODE,(node_tag),(start_mark),(end_mark)), \ (node).data.mapping.pairs.start = (node_pairs_start), \ (node).data.mapping.pairs.end = (node_pairs_end), \ (node).data.mapping.pairs.top = (node_pairs_start), \ (node).data.mapping.style = (node_style)) psych-2.2.4/lib/000077500000000000000000000000001305404671600134215ustar00rootroot00000000000000psych-2.2.4/lib/psych.rb000066400000000000000000000363771305404671600151140ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/versions' case RUBY_ENGINE when 'jruby' require 'psych_jars' org.jruby.ext.psych.PsychLibrary.new.load(JRuby.runtime, false) else require 'psych.so' end require 'psych/nodes' require 'psych/streaming' require 'psych/visitors' require 'psych/handler' require 'psych/tree_builder' require 'psych/parser' require 'psych/omap' require 'psych/set' require 'psych/coder' require 'psych/core_ext' require 'psych/deprecated' require 'psych/stream' require 'psych/json/tree_builder' require 'psych/json/stream' require 'psych/handlers/document_stream' require 'psych/class_loader' ### # = Overview # # Psych is a YAML parser and emitter. # Psych leverages libyaml [Home page: http://pyyaml.org/wiki/LibYAML] # or [HG repo: https://bitbucket.org/xi/libyaml] for its YAML parsing # and emitting capabilities. In addition to wrapping libyaml, Psych also # knows how to serialize and de-serialize most Ruby objects to and from # the YAML format. # # = I NEED TO PARSE OR EMIT YAML RIGHT NOW! # # # Parse some YAML # Psych.load("--- foo") # => "foo" # # # Emit some YAML # Psych.dump("foo") # => "--- foo\n...\n" # { :a => 'b'}.to_yaml # => "---\n:a: b\n" # # Got more time on your hands? Keep on reading! # # == YAML Parsing # # Psych provides a range of interfaces for parsing a YAML document ranging from # low level to high level, depending on your parsing needs. At the lowest # level, is an event based parser. Mid level is access to the raw YAML AST, # and at the highest level is the ability to unmarshal YAML to Ruby objects. # # == YAML Emitting # # Psych provides a range of interfaces ranging from low to high level for # producing YAML documents. Very similar to the YAML parsing interfaces, Psych # provides at the lowest level, an event based system, mid-level is building # a YAML AST, and the highest level is converting a Ruby object straight to # a YAML document. # # == High-level API # # === Parsing # # The high level YAML parser provided by Psych simply takes YAML as input and # returns a Ruby data structure. For information on using the high level parser # see Psych.load # # ==== Reading from a string # # Psych.load("--- a") # => 'a' # Psych.load("---\n - a\n - b") # => ['a', 'b'] # # ==== Reading from a file # # Psych.load_file("database.yml") # # ==== Exception handling # # begin # # The second argument changes only the exception contents # Psych.parse("--- `", "file.txt") # rescue Psych::SyntaxError => ex # ex.file # => 'file.txt' # ex.message # => "(file.txt): found character that cannot start any token" # end # # === Emitting # # The high level emitter has the easiest interface. Psych simply takes a Ruby # data structure and converts it to a YAML document. See Psych.dump for more # information on dumping a Ruby data structure. # # ==== Writing to a string # # # Dump an array, get back a YAML string # Psych.dump(['a', 'b']) # => "---\n- a\n- b\n" # # # Dump an array to an IO object # Psych.dump(['a', 'b'], StringIO.new) # => # # # # Dump an array with indentation set # Psych.dump(['a', ['b']], :indentation => 3) # => "---\n- a\n- - b\n" # # # Dump an array to an IO with indentation set # Psych.dump(['a', ['b']], StringIO.new, :indentation => 3) # # ==== Writing to a file # # Currently there is no direct API for dumping Ruby structure to file: # # File.open('database.yml', 'w') do |file| # file.write(Psych.dump(['a', 'b'])) # end # # == Mid-level API # # === Parsing # # Psych provides access to an AST produced from parsing a YAML document. This # tree is built using the Psych::Parser and Psych::TreeBuilder. The AST can # be examined and manipulated freely. Please see Psych::parse_stream, # Psych::Nodes, and Psych::Nodes::Node for more information on dealing with # YAML syntax trees. # # ==== Reading from a string # # # Returns Psych::Nodes::Stream # Psych.parse_stream("---\n - a\n - b") # # # Returns Psych::Nodes::Document # Psych.parse("---\n - a\n - b") # # ==== Reading from a file # # # Returns Psych::Nodes::Stream # Psych.parse_stream(File.read('database.yml')) # # # Returns Psych::Nodes::Document # Psych.parse_file('database.yml') # # ==== Exception handling # # begin # # The second argument changes only the exception contents # Psych.parse("--- `", "file.txt") # rescue Psych::SyntaxError => ex # ex.file # => 'file.txt' # ex.message # => "(file.txt): found character that cannot start any token" # end # # === Emitting # # At the mid level is building an AST. This AST is exactly the same as the AST # used when parsing a YAML document. Users can build an AST by hand and the # AST knows how to emit itself as a YAML document. See Psych::Nodes, # Psych::Nodes::Node, and Psych::TreeBuilder for more information on building # a YAML AST. # # ==== Writing to a string # # # We need Psych::Nodes::Stream (not Psych::Nodes::Document) # stream = Psych.parse_stream("---\n - a\n - b") # # stream.to_yaml # => "---\n- a\n- b\n" # # ==== Writing to a file # # # We need Psych::Nodes::Stream (not Psych::Nodes::Document) # stream = Psych.parse_stream(File.read('database.yml')) # # File.open('database.yml', 'w') do |file| # file.write(stream.to_yaml) # end # # == Low-level API # # === Parsing # # The lowest level parser should be used when the YAML input is already known, # and the developer does not want to pay the price of building an AST or # automatic detection and conversion to Ruby objects. See Psych::Parser for # more information on using the event based parser. # # ==== Reading to Psych::Nodes::Stream structure # # parser = Psych::Parser.new(TreeBuilder.new) # => # # parser = Psych.parser # it's an alias for the above # # parser.parse("---\n - a\n - b") # => # # parser.handler # => # # parser.handler.root # => # # # ==== Receiving an events stream # # parser = Psych::Parser.new(Psych::Handlers::Recorder.new) # # parser.parse("---\n - a\n - b") # parser.events # => [list of [event, args] lists] # # event is one of: Psych::Handler::EVENTS # # args are the arguments passed to the event # # === Emitting # # The lowest level emitter is an event based system. Events are sent to a # Psych::Emitter object. That object knows how to convert the events to a YAML # document. This interface should be used when document format is known in # advance or speed is a concern. See Psych::Emitter for more information. # # ==== Writing to a Ruby structure # # Psych.parser.parse("--- a") # => # # # parser.handler.first # => # # parser.handler.first.to_ruby # => ["a"] # # parser.handler.root.first # => # # parser.handler.root.first.to_ruby # => "a" # # # You can instantiate an Emitter manually # Psych::Visitors::ToRuby.new.accept(parser.handler.root.first) # # => "a" module Psych # The version of libyaml Psych is using LIBYAML_VERSION = Psych.libyaml_version.join '.' FALLBACK = Struct.new :to_ruby # :nodoc: ### # Load +yaml+ in to a Ruby data structure. If multiple documents are # provided, the object contained in the first document will be returned. # +filename+ will be used in the exception message if any exception is raised # while parsing. # # Raises a Psych::SyntaxError when a YAML syntax error is detected. # # Example: # # Psych.load("--- a") # => 'a' # Psych.load("---\n - a\n - b") # => ['a', 'b'] # # begin # Psych.load("--- `", "file.txt") # rescue Psych::SyntaxError => ex # ex.file # => 'file.txt' # ex.message # => "(file.txt): found character that cannot start any token" # end def self.load yaml, filename = nil, fallback = false result = parse(yaml, filename, fallback) result ? result.to_ruby : result end ### # Safely load the yaml string in +yaml+. By default, only the following # classes are allowed to be deserialized: # # * TrueClass # * FalseClass # * NilClass # * Numeric # * String # * Array # * Hash # # Recursive data structures are not allowed by default. Arbitrary classes # can be allowed by adding those classes to the +whitelist+. They are # additive. For example, to allow Date deserialization: # # Psych.safe_load(yaml, [Date]) # # Now the Date class can be loaded in addition to the classes listed above. # # Aliases can be explicitly allowed by changing the +aliases+ parameter. # For example: # # x = [] # x << x # yaml = Psych.dump x # Psych.safe_load yaml # => raises an exception # Psych.safe_load yaml, [], [], true # => loads the aliases # # A Psych::DisallowedClass exception will be raised if the yaml contains a # class that isn't in the whitelist. # # A Psych::BadAlias exception will be raised if the yaml contains aliases # but the +aliases+ parameter is set to false. def self.safe_load yaml, whitelist_classes = [], whitelist_symbols = [], aliases = false, filename = nil result = parse(yaml, filename) return unless result class_loader = ClassLoader::Restricted.new(whitelist_classes.map(&:to_s), whitelist_symbols.map(&:to_s)) scanner = ScalarScanner.new class_loader if aliases visitor = Visitors::ToRuby.new scanner, class_loader else visitor = Visitors::NoAliasRuby.new scanner, class_loader end visitor.accept result end ### # Parse a YAML string in +yaml+. Returns the Psych::Nodes::Document. # +filename+ is used in the exception message if a Psych::SyntaxError is # raised. # # Raises a Psych::SyntaxError when a YAML syntax error is detected. # # Example: # # Psych.parse("---\n - a\n - b") # => # # # begin # Psych.parse("--- `", "file.txt") # rescue Psych::SyntaxError => ex # ex.file # => 'file.txt' # ex.message # => "(file.txt): found character that cannot start any token" # end # # See Psych::Nodes for more information about YAML AST. def self.parse yaml, filename = nil, fallback = false parse_stream(yaml, filename) do |node| return node end fallback end ### # Parse a file at +filename+. Returns the Psych::Nodes::Document. # # Raises a Psych::SyntaxError when a YAML syntax error is detected. def self.parse_file filename File.open filename, 'r:bom|utf-8' do |f| parse f, filename end end ### # Returns a default parser def self.parser Psych::Parser.new(TreeBuilder.new) end ### # Parse a YAML string in +yaml+. Returns the Psych::Nodes::Stream. # This method can handle multiple YAML documents contained in +yaml+. # +filename+ is used in the exception message if a Psych::SyntaxError is # raised. # # If a block is given, a Psych::Nodes::Document node will be yielded to the # block as it's being parsed. # # Raises a Psych::SyntaxError when a YAML syntax error is detected. # # Example: # # Psych.parse_stream("---\n - a\n - b") # => # # # Psych.parse_stream("--- a\n--- b") do |node| # node # => # # end # # begin # Psych.parse_stream("--- `", "file.txt") # rescue Psych::SyntaxError => ex # ex.file # => 'file.txt' # ex.message # => "(file.txt): found character that cannot start any token" # end # # See Psych::Nodes for more information about YAML AST. def self.parse_stream yaml, filename = nil, &block if block_given? parser = Psych::Parser.new(Handlers::DocumentStream.new(&block)) parser.parse yaml, filename else parser = self.parser parser.parse yaml, filename parser.handler.root end end ### # call-seq: # Psych.dump(o) -> string of yaml # Psych.dump(o, options) -> string of yaml # Psych.dump(o, io) -> io object passed in # Psych.dump(o, io, options) -> io object passed in # # Dump Ruby object +o+ to a YAML string. Optional +options+ may be passed in # to control the output format. If an IO object is passed in, the YAML will # be dumped to that IO object. # # Example: # # # Dump an array, get back a YAML string # Psych.dump(['a', 'b']) # => "---\n- a\n- b\n" # # # Dump an array to an IO object # Psych.dump(['a', 'b'], StringIO.new) # => # # # # Dump an array with indentation set # Psych.dump(['a', ['b']], :indentation => 3) # => "---\n- a\n- - b\n" # # # Dump an array to an IO with indentation set # Psych.dump(['a', ['b']], StringIO.new, :indentation => 3) def self.dump o, io = nil, options = {} if Hash === io options = io io = nil end visitor = Psych::Visitors::YAMLTree.create options visitor << o visitor.tree.yaml io, options end ### # Dump a list of objects as separate documents to a document stream. # # Example: # # Psych.dump_stream("foo\n ", {}) # => "--- ! \"foo\\n \"\n--- {}\n" def self.dump_stream *objects visitor = Psych::Visitors::YAMLTree.create({}) objects.each do |o| visitor << o end visitor.tree.yaml end ### # Dump Ruby +object+ to a JSON string. def self.to_json object visitor = Psych::Visitors::JSONTree.create visitor << object visitor.tree.yaml end ### # Load multiple documents given in +yaml+. Returns the parsed documents # as a list. If a block is given, each document will be converted to Ruby # and passed to the block during parsing # # Example: # # Psych.load_stream("--- foo\n...\n--- bar\n...") # => ['foo', 'bar'] # # list = [] # Psych.load_stream("--- foo\n...\n--- bar\n...") do |ruby| # list << ruby # end # list # => ['foo', 'bar'] # def self.load_stream yaml, filename = nil if block_given? parse_stream(yaml, filename) do |node| yield node.to_ruby end else parse_stream(yaml, filename).children.map { |child| child.to_ruby } end end ### # Load the document contained in +filename+. Returns the yaml contained in # +filename+ as a Ruby object, or if the file is empty, it returns # the specified default return value, which defaults to an empty Hash def self.load_file filename, fallback = false File.open(filename, 'r:bom|utf-8') { |f| self.load f, filename, FALLBACK.new(fallback) } end # :stopdoc: @domain_types = {} def self.add_domain_type domain, type_tag, &block key = ['tag', domain, type_tag].join ':' @domain_types[key] = [key, block] @domain_types["tag:#{type_tag}"] = [key, block] end def self.add_builtin_type type_tag, &block domain = 'yaml.org,2002' key = ['tag', domain, type_tag].join ':' @domain_types[key] = [key, block] end def self.remove_type type_tag @domain_types.delete type_tag end @load_tags = {} @dump_tags = {} def self.add_tag tag, klass @load_tags[tag] = klass.name @dump_tags[klass] = tag end class << self attr_accessor :load_tags attr_accessor :dump_tags attr_accessor :domain_types end # :startdoc: end psych-2.2.4/lib/psych/000077500000000000000000000000001305404671600145475ustar00rootroot00000000000000psych-2.2.4/lib/psych/class_loader.rb000066400000000000000000000036441305404671600175360ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/omap' require 'psych/set' module Psych class ClassLoader # :nodoc: BIG_DECIMAL = 'BigDecimal' COMPLEX = 'Complex' DATE = 'Date' DATE_TIME = 'DateTime' EXCEPTION = 'Exception' OBJECT = 'Object' PSYCH_OMAP = 'Psych::Omap' PSYCH_SET = 'Psych::Set' RANGE = 'Range' RATIONAL = 'Rational' REGEXP = 'Regexp' STRUCT = 'Struct' SYMBOL = 'Symbol' def initialize @cache = CACHE.dup end def load klassname return nil if !klassname || klassname.empty? find klassname end def symbolize sym symbol sym.to_sym end constants.each do |const| konst = const_get const define_method(const.to_s.downcase) do load konst end end private def find klassname @cache[klassname] ||= resolve(klassname) end def resolve klassname name = klassname retried = false begin path2class(name) rescue ArgumentError, NameError => ex unless retried name = "Struct::#{name}" retried = ex retry end raise retried end end CACHE = Hash[constants.map { |const| val = const_get const begin [val, ::Object.const_get(val)] rescue nil end }.compact] class Restricted < ClassLoader def initialize classes, symbols @classes = classes @symbols = symbols super() end def symbolize sym return super if @symbols.empty? if @symbols.include? sym super else raise DisallowedClass, 'Symbol' end end private def find klassname if @classes.include? klassname super else raise DisallowedClass, klassname end end end end end psych-2.2.4/lib/psych/coder.rb000066400000000000000000000040631305404671600161730ustar00rootroot00000000000000# frozen_string_literal: false module Psych ### # If an object defines +encode_with+, then an instance of Psych::Coder will # be passed to the method when the object is being serialized. The Coder # automatically assumes a Psych::Nodes::Mapping is being emitted. Other # objects like Sequence and Scalar may be emitted if +seq=+ or +scalar=+ are # called, respectively. class Coder attr_accessor :tag, :style, :implicit, :object attr_reader :type, :seq def initialize tag @map = {} @seq = [] @implicit = false @type = :map @tag = tag @style = Psych::Nodes::Mapping::BLOCK @scalar = nil @object = nil end def scalar *args if args.length > 0 warn "#{caller[0]}: Coder#scalar(a,b,c) is deprecated" if $VERBOSE @tag, @scalar, _ = args @type = :scalar end @scalar end # Emit a map. The coder will be yielded to the block. def map tag = @tag, style = @style @tag = tag @style = style yield self if block_given? @map end # Emit a scalar with +value+ and +tag+ def represent_scalar tag, value self.tag = tag self.scalar = value end # Emit a sequence with +list+ and +tag+ def represent_seq tag, list @tag = tag self.seq = list end # Emit a sequence with +map+ and +tag+ def represent_map tag, map @tag = tag self.map = map end # Emit an arbitrary object +obj+ and +tag+ def represent_object tag, obj @tag = tag @type = :object @object = obj end # Emit a scalar with +value+ def scalar= value @type = :scalar @scalar = value end # Emit a map with +value+ def map= map @type = :map @map = map end def []= k, v @type = :map @map[k] = v end alias :add :[]= def [] k @type = :map @map[k] end # Emit a sequence of +list+ def seq= list @type = :seq @seq = list end end end psych-2.2.4/lib/psych/core_ext.rb000066400000000000000000000014161305404671600167060ustar00rootroot00000000000000# frozen_string_literal: false class Object def self.yaml_tag url Psych.add_tag(url, self) end # FIXME: rename this to "to_yaml" when syck is removed ### # call-seq: to_yaml(options = {}) # # Convert an object to YAML. See Psych.dump for more information on the # available +options+. def psych_to_yaml options = {} Psych.dump self, options end remove_method :to_yaml rescue nil alias :to_yaml :psych_to_yaml end class Module def psych_yaml_as url return if caller[0].end_with?('rubytypes.rb') if $VERBOSE warn "#{caller[0]}: yaml_as is deprecated, please use yaml_tag" end Psych.add_tag(url, self) end remove_method :yaml_as rescue nil alias :yaml_as :psych_yaml_as end if defined?(::IRB) require 'psych/y' end psych-2.2.4/lib/psych/deprecated.rb000066400000000000000000000050261305404671600171770ustar00rootroot00000000000000# frozen_string_literal: false require 'date' module Psych DEPRECATED = __FILE__ # :nodoc: module DeprecatedMethods # :nodoc: attr_accessor :taguri attr_accessor :to_yaml_style end def self.quick_emit thing, opts = {}, &block # :nodoc: warn "#{caller[0]}: YAML.quick_emit is deprecated" if $VERBOSE && !caller[0].start_with?(File.dirname(__FILE__)) target = eval 'self', block.binding target.extend DeprecatedMethods metaclass = class << target; self; end metaclass.send(:define_method, :encode_with) do |coder| target.taguri = coder.tag target.to_yaml_style = coder.style block.call coder end target.psych_to_yaml unless opts[:nodump] end # This method is deprecated, use Psych.load_stream instead. def self.load_documents yaml, &block if $VERBOSE warn "#{caller[0]}: load_documents is deprecated, use load_stream" end list = load_stream yaml return list unless block_given? list.each(&block) end def self.detect_implicit thing warn "#{caller[0]}: detect_implicit is deprecated" if $VERBOSE return '' unless String === thing return 'null' if '' == thing ss = ScalarScanner.new(ClassLoader.new) ss.tokenize(thing).class.name.downcase end def self.add_ruby_type type_tag, &block warn "#{caller[0]}: add_ruby_type is deprecated, use add_domain_type" if $VERBOSE domain = 'ruby.yaml.org,2002' key = ['tag', domain, type_tag].join ':' @domain_types[key] = [key, block] end def self.add_private_type type_tag, &block warn "#{caller[0]}: add_private_type is deprecated, use add_domain_type" if $VERBOSE domain = 'x-private' key = [domain, type_tag].join ':' @domain_types[key] = [key, block] end def self.tagurize thing warn "#{caller[0]}: add_private_type is deprecated, use add_domain_type" if $VERBOSE return thing unless String === thing "tag:yaml.org,2002:#{thing}" end def self.read_type_class type, reference warn "#{caller[0]}: read_type_class is deprecated" if $VERBOSE _, _, type, name = type.split ':', 4 reference = name.split('::').inject(reference) do |k,n| k.const_get(n.to_sym) end if name [type, reference] end def self.object_maker klass, hash warn "#{caller[0]}: object_maker is deprecated" if $VERBOSE klass.allocate.tap do |obj| hash.each { |k,v| obj.instance_variable_set(:"@#{k}", v) } end end end class Object undef :to_yaml_properties rescue nil def to_yaml_properties # :nodoc: instance_variables end end psych-2.2.4/lib/psych/exception.rb000066400000000000000000000004111305404671600170660ustar00rootroot00000000000000# frozen_string_literal: false module Psych class Exception < RuntimeError end class BadAlias < Exception end class DisallowedClass < Exception def initialize klass_name super "Tried to load unspecified class: #{klass_name}" end end end psych-2.2.4/lib/psych/handler.rb000066400000000000000000000160631305404671600165170ustar00rootroot00000000000000# frozen_string_literal: false module Psych ### # Psych::Handler is an abstract base class that defines the events used # when dealing with Psych::Parser. Clients who want to use Psych::Parser # should implement a class that inherits from Psych::Handler and define # events that they can handle. # # Psych::Handler defines all events that Psych::Parser can possibly send to # event handlers. # # See Psych::Parser for more details class Handler ### # Configuration options for dumping YAML. class DumperOptions attr_accessor :line_width, :indentation, :canonical def initialize @line_width = 0 @indentation = 2 @canonical = false end end # Default dumping options OPTIONS = DumperOptions.new # Events that a Handler should respond to. EVENTS = [ :alias, :empty, :end_document, :end_mapping, :end_sequence, :end_stream, :scalar, :start_document, :start_mapping, :start_sequence, :start_stream ] ### # Called with +encoding+ when the YAML stream starts. This method is # called once per stream. A stream may contain multiple documents. # # See the constants in Psych::Parser for the possible values of +encoding+. def start_stream encoding end ### # Called when the document starts with the declared +version+, # +tag_directives+, if the document is +implicit+. # # +version+ will be an array of integers indicating the YAML version being # dealt with, +tag_directives+ is a list of tuples indicating the prefix # and suffix of each tag, and +implicit+ is a boolean indicating whether # the document is started implicitly. # # === Example # # Given the following YAML: # # %YAML 1.1 # %TAG ! tag:tenderlovemaking.com,2009: # --- !squee # # The parameters for start_document must be this: # # version # => [1, 1] # tag_directives # => [["!", "tag:tenderlovemaking.com,2009:"]] # implicit # => false def start_document version, tag_directives, implicit end ### # Called with the document ends. +implicit+ is a boolean value indicating # whether or not the document has an implicit ending. # # === Example # # Given the following YAML: # # --- # hello world # # +implicit+ will be true. Given this YAML: # # --- # hello world # ... # # +implicit+ will be false. def end_document implicit end ### # Called when an alias is found to +anchor+. +anchor+ will be the name # of the anchor found. # # === Example # # Here we have an example of an array that references itself in YAML: # # --- &ponies # - first element # - *ponies # # &ponies is the achor, *ponies is the alias. In this case, alias is # called with "ponies". def alias anchor end ### # Called when a scalar +value+ is found. The scalar may have an # +anchor+, a +tag+, be implicitly +plain+ or implicitly +quoted+ # # +value+ is the string value of the scalar # +anchor+ is an associated anchor or nil # +tag+ is an associated tag or nil # +plain+ is a boolean value # +quoted+ is a boolean value # +style+ is an integer idicating the string style # # See the constants in Psych::Nodes::Scalar for the possible values of # +style+ # # === Example # # Here is a YAML document that exercises most of the possible ways this # method can be called: # # --- # - !str "foo" # - &anchor fun # - many # lines # - | # many # newlines # # The above YAML document contains a list with four strings. Here are # the parameters sent to this method in the same order: # # # value anchor tag plain quoted style # ["foo", nil, "!str", false, false, 3 ] # ["fun", "anchor", nil, true, false, 1 ] # ["many lines", nil, nil, true, false, 1 ] # ["many\nnewlines\n", nil, nil, false, true, 4 ] # def scalar value, anchor, tag, plain, quoted, style end ### # Called when a sequence is started. # # +anchor+ is the anchor associated with the sequence or nil. # +tag+ is the tag associated with the sequence or nil. # +implicit+ a boolean indicating whether or not the sequence was implicitly # started. # +style+ is an integer indicating the list style. # # See the constants in Psych::Nodes::Sequence for the possible values of # +style+. # # === Example # # Here is a YAML document that exercises most of the possible ways this # method can be called: # # --- # - !!seq [ # a # ] # - &pewpew # - b # # The above YAML document consists of three lists, an outer list that # contains two inner lists. Here is a matrix of the parameters sent # to represent these lists: # # # anchor tag implicit style # [nil, nil, true, 1 ] # [nil, "tag:yaml.org,2002:seq", false, 2 ] # ["pewpew", nil, true, 1 ] def start_sequence anchor, tag, implicit, style end ### # Called when a sequence ends. def end_sequence end ### # Called when a map starts. # # +anchor+ is the anchor associated with the map or +nil+. # +tag+ is the tag associated with the map or +nil+. # +implicit+ is a boolean indicating whether or not the map was implicitly # started. # +style+ is an integer indicating the mapping style. # # See the constants in Psych::Nodes::Mapping for the possible values of # +style+. # # === Example # # Here is a YAML document that exercises most of the possible ways this # method can be called: # # --- # k: !!map { hello: world } # v: &pewpew # hello: world # # The above YAML document consists of three maps, an outer map that contains # two inner maps. Below is a matrix of the parameters sent in order to # represent these three maps: # # # anchor tag implicit style # [nil, nil, true, 1 ] # [nil, "tag:yaml.org,2002:map", false, 2 ] # ["pewpew", nil, true, 1 ] def start_mapping anchor, tag, implicit, style end ### # Called when a map ends def end_mapping end ### # Called when an empty event happens. (Which, as far as I can tell, is # never). def empty end ### # Called when the YAML stream ends def end_stream end ### # Is this handler a streaming handler? def streaming? false end end end psych-2.2.4/lib/psych/handlers/000077500000000000000000000000001305404671600163475ustar00rootroot00000000000000psych-2.2.4/lib/psych/handlers/document_stream.rb000066400000000000000000000010071305404671600220630ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/tree_builder' module Psych module Handlers class DocumentStream < Psych::TreeBuilder # :nodoc: def initialize &block super @block = block end def start_document version, tag_directives, implicit n = Nodes::Document.new version, tag_directives, implicit push n end def end_document implicit_end = !streaming? @last.implicit_end = implicit_end @block.call pop end end end end psych-2.2.4/lib/psych/handlers/recorder.rb000066400000000000000000000015661305404671600205110ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/handler' module Psych module Handlers ### # This handler will capture an event and record the event. Recorder events # are available vial Psych::Handlers::Recorder#events. # # For example: # # recorder = Psych::Handlers::Recorder.new # parser = Psych::Parser.new recorder # parser.parse '--- foo' # # recorder.events # => [list of events] # # # Replay the events # # emitter = Psych::Emitter.new $stdout # recorder.events.each do |m, args| # emitter.send m, *args # end class Recorder < Psych::Handler attr_reader :events def initialize @events = [] super end EVENTS.each do |event| define_method event do |*args| @events << [event, args] end end end end end psych-2.2.4/lib/psych/json/000077500000000000000000000000001305404671600155205ustar00rootroot00000000000000psych-2.2.4/lib/psych/json/ruby_events.rb000066400000000000000000000007511305404671600204150ustar00rootroot00000000000000# frozen_string_literal: false module Psych module JSON module RubyEvents # :nodoc: def visit_Time o formatted = format_time o @emitter.scalar formatted, nil, nil, false, true, Nodes::Scalar::DOUBLE_QUOTED end def visit_DateTime o visit_Time o.to_time end def visit_String o @emitter.scalar o.to_s, nil, nil, false, true, Nodes::Scalar::DOUBLE_QUOTED end alias :visit_Symbol :visit_String end end end psych-2.2.4/lib/psych/json/stream.rb000066400000000000000000000006271305404671600173450ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/json/ruby_events' require 'psych/json/yaml_events' module Psych module JSON class Stream < Psych::Visitors::JSONTree include Psych::JSON::RubyEvents include Psych::Streaming extend Psych::Streaming::ClassMethods class Emitter < Psych::Stream::Emitter # :nodoc: include Psych::JSON::YAMLEvents end end end end psych-2.2.4/lib/psych/json/tree_builder.rb000066400000000000000000000005461305404671600205170ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/json/yaml_events' module Psych module JSON ### # Psych::JSON::TreeBuilder is an event based AST builder. Events are sent # to an instance of Psych::JSON::TreeBuilder and a JSON AST is constructed. class TreeBuilder < Psych::TreeBuilder include Psych::JSON::YAMLEvents end end end psych-2.2.4/lib/psych/json/yaml_events.rb000066400000000000000000000014141305404671600203730ustar00rootroot00000000000000# frozen_string_literal: false module Psych module JSON module YAMLEvents # :nodoc: def start_document version, tag_directives, implicit super(version, tag_directives, !streaming?) end def end_document implicit_end = !streaming? super(implicit_end) end def start_mapping anchor, tag, implicit, style super(anchor, nil, true, Nodes::Mapping::FLOW) end def start_sequence anchor, tag, implicit, style super(anchor, nil, true, Nodes::Sequence::FLOW) end def scalar value, anchor, tag, plain, quoted, style if "tag:yaml.org,2002:null" == tag super('null', nil, nil, true, false, Nodes::Scalar::PLAIN) else super end end end end end psych-2.2.4/lib/psych/nodes.rb000066400000000000000000000045541305404671600162140ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/nodes/node' require 'psych/nodes/stream' require 'psych/nodes/document' require 'psych/nodes/sequence' require 'psych/nodes/scalar' require 'psych/nodes/mapping' require 'psych/nodes/alias' module Psych ### # = Overview # # When using Psych.load to deserialize a YAML document, the document is # translated to an intermediary AST. That intermediary AST is then # translated in to a Ruby object graph. # # In the opposite direction, when using Psych.dump, the Ruby object graph is # translated to an intermediary AST which is then converted to a YAML # document. # # Psych::Nodes contains all of the classes that make up the nodes of a YAML # AST. You can manually build an AST and use one of the visitors (see # Psych::Visitors) to convert that AST to either a YAML document or to a # Ruby object graph. # # Here is an example of building an AST that represents a list with one # scalar: # # # Create our nodes # stream = Psych::Nodes::Stream.new # doc = Psych::Nodes::Document.new # seq = Psych::Nodes::Sequence.new # scalar = Psych::Nodes::Scalar.new('foo') # # # Build up our tree # stream.children << doc # doc.children << seq # seq.children << scalar # # The stream is the root of the tree. We can then convert the tree to YAML: # # stream.to_yaml => "---\n- foo\n" # # Or convert it to Ruby: # # stream.to_ruby => [["foo"]] # # == YAML AST Requirements # # A valid YAML AST *must* have one Psych::Nodes::Stream at the root. A # Psych::Nodes::Stream node must have 1 or more Psych::Nodes::Document nodes # as children. # # Psych::Nodes::Document nodes must have one and *only* one child. That child # may be one of: # # * Psych::Nodes::Sequence # * Psych::Nodes::Mapping # * Psych::Nodes::Scalar # # Psych::Nodes::Sequence and Psych::Nodes::Mapping nodes may have many # children, but Psych::Nodes::Mapping nodes should have an even number of # children. # # All of these are valid children for Psych::Nodes::Sequence and # Psych::Nodes::Mapping nodes: # # * Psych::Nodes::Sequence # * Psych::Nodes::Mapping # * Psych::Nodes::Scalar # * Psych::Nodes::Alias # # Psych::Nodes::Scalar and Psych::Nodes::Alias are both terminal nodes and # should not have any children. module Nodes end end psych-2.2.4/lib/psych/nodes/000077500000000000000000000000001305404671600156575ustar00rootroot00000000000000psych-2.2.4/lib/psych/nodes/alias.rb000066400000000000000000000007571305404671600173060ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Nodes ### # This class represents a {YAML Alias}[http://yaml.org/spec/1.1/#alias]. # It points to an +anchor+. # # A Psych::Nodes::Alias is a terminal node and may have no children. class Alias < Psych::Nodes::Node # The anchor this alias links to attr_accessor :anchor # Create a new Alias that points to an +anchor+ def initialize anchor @anchor = anchor end end end end psych-2.2.4/lib/psych/nodes/document.rb000066400000000000000000000034421305404671600200250ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Nodes ### # This represents a YAML Document. This node must be a child of # Psych::Nodes::Stream. A Psych::Nodes::Document must have one child, # and that child may be one of the following: # # * Psych::Nodes::Sequence # * Psych::Nodes::Mapping # * Psych::Nodes::Scalar class Document < Psych::Nodes::Node # The version of the YAML document attr_accessor :version # A list of tag directives for this document attr_accessor :tag_directives # Was this document implicitly created? attr_accessor :implicit # Is the end of the document implicit? attr_accessor :implicit_end ### # Create a new Psych::Nodes::Document object. # # +version+ is a list indicating the YAML version. # +tags_directives+ is a list of tag directive declarations # +implicit+ is a flag indicating whether the document will be implicitly # started. # # == Example: # This creates a YAML document object that represents a YAML 1.1 document # with one tag directive, and has an implicit start: # # Psych::Nodes::Document.new( # [1,1], # [["!", "tag:tenderlovemaking.com,2009:"]], # true # ) # # == See Also # See also Psych::Handler#start_document def initialize version = [], tag_directives = [], implicit = false super() @version = version @tag_directives = tag_directives @implicit = implicit @implicit_end = true end ### # Returns the root node. A Document may only have one root node: # http://yaml.org/spec/1.1/#id898031 def root children.first end end end end psych-2.2.4/lib/psych/nodes/mapping.rb000066400000000000000000000030131305404671600176340ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Nodes ### # This class represents a {YAML Mapping}[http://yaml.org/spec/1.1/#mapping]. # # A Psych::Nodes::Mapping node may have 0 or more children, but must have # an even number of children. Here are the valid children a # Psych::Nodes::Mapping node may have: # # * Psych::Nodes::Sequence # * Psych::Nodes::Mapping # * Psych::Nodes::Scalar # * Psych::Nodes::Alias class Mapping < Psych::Nodes::Node # Any Map Style ANY = 0 # Block Map Style BLOCK = 1 # Flow Map Style FLOW = 2 # The optional anchor for this mapping attr_accessor :anchor # The optional tag for this mapping attr_accessor :tag # Is this an implicit mapping? attr_accessor :implicit # The style of this mapping attr_accessor :style ### # Create a new Psych::Nodes::Mapping object. # # +anchor+ is the anchor associated with the map or +nil+. # +tag+ is the tag associated with the map or +nil+. # +implicit+ is a boolean indicating whether or not the map was implicitly # started. # +style+ is an integer indicating the mapping style. # # == See Also # See also Psych::Handler#start_mapping def initialize anchor = nil, tag = nil, implicit = true, style = BLOCK super() @anchor = anchor @tag = tag @implicit = implicit @style = style end end end end psych-2.2.4/lib/psych/nodes/node.rb000066400000000000000000000024171305404671600171350ustar00rootroot00000000000000# frozen_string_literal: false require 'stringio' require 'psych/class_loader' require 'psych/scalar_scanner' module Psych module Nodes ### # The base class for any Node in a YAML parse tree. This class should # never be instantiated. class Node include Enumerable # The children of this node attr_reader :children # An associated tag attr_reader :tag # Create a new Psych::Nodes::Node def initialize @children = [] end ### # Iterate over each node in the tree. Yields each node to +block+ depth # first. def each &block return enum_for :each unless block_given? Visitors::DepthFirst.new(block).accept self end ### # Convert this node to Ruby. # # See also Psych::Visitors::ToRuby def to_ruby Visitors::ToRuby.create.accept(self) end alias :transform :to_ruby ### # Convert this node to YAML. # # See also Psych::Visitors::Emitter def yaml io = nil, options = {} real_io = io || StringIO.new(''.encode('utf-8')) Visitors::Emitter.new(real_io, options).accept self return real_io.string unless io io end alias :to_yaml :yaml end end end psych-2.2.4/lib/psych/nodes/scalar.rb000066400000000000000000000031461305404671600174550ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Nodes ### # This class represents a {YAML Scalar}[http://yaml.org/spec/1.1/#id858081]. # # This node type is a terminal node and should not have any children. class Scalar < Psych::Nodes::Node # Any style scalar, the emitter chooses ANY = 0 # Plain scalar style PLAIN = 1 # Single quoted style SINGLE_QUOTED = 2 # Double quoted style DOUBLE_QUOTED = 3 # Literal style LITERAL = 4 # Folded style FOLDED = 5 # The scalar value attr_accessor :value # The anchor value (if there is one) attr_accessor :anchor # The tag value (if there is one) attr_accessor :tag # Is this a plain scalar? attr_accessor :plain # Is this scalar quoted? attr_accessor :quoted # The style of this scalar attr_accessor :style ### # Create a new Psych::Nodes::Scalar object. # # +value+ is the string value of the scalar # +anchor+ is an associated anchor or nil # +tag+ is an associated tag or nil # +plain+ is a boolean value # +quoted+ is a boolean value # +style+ is an integer idicating the string style # # == See Also # # See also Psych::Handler#scalar def initialize value, anchor = nil, tag = nil, plain = true, quoted = false, style = ANY @value = value @anchor = anchor @tag = tag @plain = plain @quoted = quoted @style = style end end end end psych-2.2.4/lib/psych/nodes/sequence.rb000066400000000000000000000040021305404671600200100ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Nodes ### # This class represents a # {YAML sequence}[http://yaml.org/spec/1.1/#sequence/syntax]. # # A YAML sequence is basically a list, and looks like this: # # %YAML 1.1 # --- # - I am # - a Sequence # # A YAML sequence may have an anchor like this: # # %YAML 1.1 # --- # &A [ # "This sequence", # "has an anchor" # ] # # A YAML sequence may also have a tag like this: # # %YAML 1.1 # --- # !!seq [ # "This sequence", # "has a tag" # ] # # This class represents a sequence in a YAML document. A # Psych::Nodes::Sequence node may have 0 or more children. Valid children # for this node are: # # * Psych::Nodes::Sequence # * Psych::Nodes::Mapping # * Psych::Nodes::Scalar # * Psych::Nodes::Alias class Sequence < Psych::Nodes::Node # Any Styles, emitter chooses ANY = 0 # Block style sequence BLOCK = 1 # Flow style sequence FLOW = 2 # The anchor for this sequence (if any) attr_accessor :anchor # The tag name for this sequence (if any) attr_accessor :tag # Is this sequence started implicitly? attr_accessor :implicit # The sequence style used attr_accessor :style ### # Create a new object representing a YAML sequence. # # +anchor+ is the anchor associated with the sequence or nil. # +tag+ is the tag associated with the sequence or nil. # +implicit+ a boolean indicating whether or not the sequence was # implicitly started. # +style+ is an integer indicating the list style. # # See Psych::Handler#start_sequence def initialize anchor = nil, tag = nil, implicit = true, style = BLOCK super() @anchor = anchor @tag = tag @implicit = implicit @style = style end end end end psych-2.2.4/lib/psych/nodes/stream.rb000066400000000000000000000017441305404671600175050ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Nodes ### # Represents a YAML stream. This is the root node for any YAML parse # tree. This node must have one or more child nodes. The only valid # child node for a Psych::Nodes::Stream node is Psych::Nodes::Document. class Stream < Psych::Nodes::Node # Encodings supported by Psych (and libyaml) # Any encoding ANY = Psych::Parser::ANY # UTF-8 encoding UTF8 = Psych::Parser::UTF8 # UTF-16LE encoding UTF16LE = Psych::Parser::UTF16LE # UTF-16BE encoding UTF16BE = Psych::Parser::UTF16BE # The encoding used for this stream attr_accessor :encoding ### # Create a new Psych::Nodes::Stream node with an +encoding+ that # defaults to Psych::Nodes::Stream::UTF8. # # See also Psych::Handler#start_stream def initialize encoding = UTF8 super() @encoding = encoding end end end end psych-2.2.4/lib/psych/omap.rb000066400000000000000000000001141305404671600160240ustar00rootroot00000000000000# frozen_string_literal: false module Psych class Omap < ::Hash end end psych-2.2.4/lib/psych/parser.rb000066400000000000000000000032601305404671600163710ustar00rootroot00000000000000# frozen_string_literal: false module Psych ### # YAML event parser class. This class parses a YAML document and calls # events on the handler that is passed to the constructor. The events can # be used for things such as constructing a YAML AST or deserializing YAML # documents. It can even be fed back to Psych::Emitter to emit the same # document that was parsed. # # See Psych::Handler for documentation on the events that Psych::Parser emits. # # Here is an example that prints out ever scalar found in a YAML document: # # # Handler for detecting scalar values # class ScalarHandler < Psych::Handler # def scalar value, anchor, tag, plain, quoted, style # puts value # end # end # # parser = Psych::Parser.new(ScalarHandler.new) # parser.parse(yaml_document) # # Here is an example that feeds the parser back in to Psych::Emitter. The # YAML document is read from STDIN and written back out to STDERR: # # parser = Psych::Parser.new(Psych::Emitter.new($stderr)) # parser.parse($stdin) # # Psych uses Psych::Parser in combination with Psych::TreeBuilder to # construct an AST of the parsed YAML document. class Parser class Mark < Struct.new(:index, :line, :column) end # The handler on which events will be called attr_accessor :handler # Set the encoding for this parser to +encoding+ attr_writer :external_encoding ### # Creates a new Psych::Parser instance with +handler+. YAML events will # be called on +handler+. See Psych::Parser for more details. def initialize handler = Handler.new @handler = handler @external_encoding = ANY end end end psych-2.2.4/lib/psych/scalar_scanner.rb000066400000000000000000000104421305404671600200530ustar00rootroot00000000000000# frozen_string_literal: false require 'strscan' module Psych ### # Scan scalars for built in types class ScalarScanner # Taken from http://yaml.org/type/timestamp.html TIME = /^-?\d{4}-\d{1,2}-\d{1,2}(?:[Tt]|\s+)\d{1,2}:\d\d:\d\d(?:\.\d*)?(?:\s*(?:Z|[-+]\d{1,2}:?(?:\d\d)?))?$/ # Taken from http://yaml.org/type/float.html FLOAT = /^(?:[-+]?([0-9][0-9_,]*)?\.[0-9]*([eE][-+][0-9]+)?(?# base 10) |[-+]?[0-9][0-9_,]*(:[0-5]?[0-9])+\.[0-9_]*(?# base 60) |[-+]?\.(inf|Inf|INF)(?# infinity) |\.(nan|NaN|NAN)(?# not a number))$/x # Taken from http://yaml.org/type/int.html INTEGER = /^(?:[-+]?0b[0-1_]+ (?# base 2) |[-+]?0[0-7_]+ (?# base 8) |[-+]?(?:0|[1-9][0-9_]*) (?# base 10) |[-+]?0x[0-9a-fA-F_]+ (?# base 16))$/x attr_reader :class_loader # Create a new scanner def initialize class_loader @string_cache = {} @symbol_cache = {} @class_loader = class_loader end # Tokenize +string+ returning the Ruby object def tokenize string return nil if string.empty? return string if @string_cache.key?(string) return @symbol_cache[string] if @symbol_cache.key?(string) case string # Check for a String type, being careful not to get caught by hash keys, hex values, and # special floats (e.g., -.inf). when /^[^\d\.:-]?[A-Za-z_\s!@#\$%\^&\*\(\)\{\}\<\>\|\/\\~;=]+/, /\n/ if string.length > 5 @string_cache[string] = true return string end case string when /^[^ytonf~]/i @string_cache[string] = true string when '~', /^null$/i nil when /^(yes|true|on)$/i true when /^(no|false|off)$/i false else @string_cache[string] = true string end when TIME begin parse_time string rescue ArgumentError string end when /^\d{4}-(?:1[012]|0\d|\d)-(?:[12]\d|3[01]|0\d|\d)$/ require 'date' begin class_loader.date.strptime(string, '%Y-%m-%d') rescue ArgumentError string end when /^\.inf$/i Float::INFINITY when /^-\.inf$/i -Float::INFINITY when /^\.nan$/i Float::NAN when /^:./ if string =~ /^:(["'])(.*)\1/ @symbol_cache[string] = class_loader.symbolize($2.sub(/^:/, '')) else @symbol_cache[string] = class_loader.symbolize(string.sub(/^:/, '')) end when /^[-+]?[0-9][0-9_]*(:[0-5]?[0-9])+$/ i = 0 string.split(':').each_with_index do |n,e| i += (n.to_i * 60 ** (e - 2).abs) end i when /^[-+]?[0-9][0-9_]*(:[0-5]?[0-9])+\.[0-9_]*$/ i = 0 string.split(':').each_with_index do |n,e| i += (n.to_f * 60 ** (e - 2).abs) end i when FLOAT if string =~ /\A[-+]?\.\Z/ @string_cache[string] = true string else Float(string.gsub(/[,_]|\.([Ee]|$)/, '\1')) end else int = parse_int string.gsub(/[,_]/, '') return int if int @string_cache[string] = true string end end ### # Parse and return an int from +string+ def parse_int string return unless INTEGER === string Integer(string) end ### # Parse and return a Time from +string+ def parse_time string klass = class_loader.load 'Time' date, time = *(string.split(/[ tT]/, 2)) (yy, m, dd) = date.match(/^(-?\d{4})-(\d{1,2})-(\d{1,2})/).captures.map { |x| x.to_i } md = time.match(/(\d+:\d+:\d+)(?:\.(\d*))?\s*(Z|[-+]\d+(:\d\d)?)?/) (hh, mm, ss) = md[1].split(':').map { |x| x.to_i } us = (md[2] ? Rational("0.#{md[2]}") : 0) * 1000000 time = klass.utc(yy, m, dd, hh, mm, ss, us) return time if 'Z' == md[3] return klass.at(time.to_i, us) unless md[3] tz = md[3].match(/^([+\-]?\d{1,2})\:?(\d{1,2})?$/)[1..-1].compact.map { |digit| Integer(digit, 10) } offset = tz.first * 3600 if offset < 0 offset -= ((tz[1] || 0) * 60) else offset += ((tz[1] || 0) * 60) end klass.at((time - offset).to_i, us) end end end psych-2.2.4/lib/psych/set.rb000066400000000000000000000001131305404671600156620ustar00rootroot00000000000000# frozen_string_literal: false module Psych class Set < ::Hash end end psych-2.2.4/lib/psych/stream.rb000066400000000000000000000016341305404671600163730ustar00rootroot00000000000000# frozen_string_literal: false module Psych ### # Psych::Stream is a streaming YAML emitter. It will not buffer your YAML, # but send it straight to an IO. # # Here is an example use: # # stream = Psych::Stream.new($stdout) # stream.start # stream.push({:foo => 'bar'}) # stream.finish # # YAML will be immediately emitted to $stdout with no buffering. # # Psych::Stream#start will take a block and ensure that Psych::Stream#finish # is called, so you can do this form: # # stream = Psych::Stream.new($stdout) # stream.start do |em| # em.push(:foo => 'bar') # end # class Stream < Psych::Visitors::YAMLTree class Emitter < Psych::Emitter # :nodoc: def end_document implicit_end = !streaming? super end def streaming? true end end include Psych::Streaming extend Psych::Streaming::ClassMethods end end psych-2.2.4/lib/psych/streaming.rb000066400000000000000000000012341305404671600170650ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Streaming module ClassMethods ### # Create a new streaming emitter. Emitter will print to +io+. See # Psych::Stream for an example. def new io emitter = const_get(:Emitter).new(io) class_loader = ClassLoader.new ss = ScalarScanner.new class_loader super(emitter, ss, {}) end end ### # Start streaming using +encoding+ def start encoding = Nodes::Stream::UTF8 super.tap { yield self if block_given? } ensure finish if block_given? end private def register target, obj end end end psych-2.2.4/lib/psych/syntax_error.rb000066400000000000000000000011121305404671600176260ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/exception' module Psych class SyntaxError < Psych::Exception attr_reader :file, :line, :column, :offset, :problem, :context def initialize file, line, col, offset, problem, context err = [problem, context].compact.join ' ' filename = file || '' message = "(%s): %s at line %d column %d" % [filename, err, line, col] @file = file @line = line @column = col @offset = offset @problem = problem @context = context super(message) end end end psych-2.2.4/lib/psych/tree_builder.rb000066400000000000000000000041271305404671600175450ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/handler' module Psych ### # This class works in conjunction with Psych::Parser to build an in-memory # parse tree that represents a YAML document. # # == Example # # parser = Psych::Parser.new Psych::TreeBuilder.new # parser.parse('--- foo') # tree = parser.handler.root # # See Psych::Handler for documentation on the event methods used in this # class. class TreeBuilder < Psych::Handler # Returns the root node for the built tree attr_reader :root # Create a new TreeBuilder instance def initialize @stack = [] @last = nil @root = nil end %w{ Sequence Mapping }.each do |node| class_eval %{ def start_#{node.downcase}(anchor, tag, implicit, style) n = Nodes::#{node}.new(anchor, tag, implicit, style) @last.children << n push n end def end_#{node.downcase} pop end } end ### # Handles start_document events with +version+, +tag_directives+, # and +implicit+ styling. # # See Psych::Handler#start_document def start_document version, tag_directives, implicit n = Nodes::Document.new version, tag_directives, implicit @last.children << n push n end ### # Handles end_document events with +version+, +tag_directives+, # and +implicit+ styling. # # See Psych::Handler#start_document def end_document implicit_end = !streaming? @last.implicit_end = implicit_end pop end def start_stream encoding @root = Nodes::Stream.new(encoding) push @root end def end_stream pop end def scalar value, anchor, tag, plain, quoted, style s = Nodes::Scalar.new(value,anchor,tag,plain,quoted,style) @last.children << s s end def alias anchor @last.children << Nodes::Alias.new(anchor) end private def push value @stack.push value @last = value end def pop x = @stack.pop @last = @stack.last x end end end psych-2.2.4/lib/psych/versions.rb000066400000000000000000000002731305404671600167460ustar00rootroot00000000000000# frozen_string_literal: false module Psych # The version is Psych you're using VERSION = '2.2.4' if RUBY_ENGINE == 'jruby' DEFAULT_SNAKEYAML_VERSION = '1.18'.freeze end end psych-2.2.4/lib/psych/visitors.rb000066400000000000000000000003551305404671600167610ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/visitors/visitor' require 'psych/visitors/to_ruby' require 'psych/visitors/emitter' require 'psych/visitors/yaml_tree' require 'psych/visitors/json_tree' require 'psych/visitors/depth_first' psych-2.2.4/lib/psych/visitors/000077500000000000000000000000001305404671600164315ustar00rootroot00000000000000psych-2.2.4/lib/psych/visitors/depth_first.rb000066400000000000000000000011631305404671600212720ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Visitors class DepthFirst < Psych::Visitors::Visitor def initialize block @block = block end private def nary o o.children.each { |x| visit x } @block.call o end alias :visit_Psych_Nodes_Stream :nary alias :visit_Psych_Nodes_Document :nary alias :visit_Psych_Nodes_Sequence :nary alias :visit_Psych_Nodes_Mapping :nary def terminal o @block.call o end alias :visit_Psych_Nodes_Scalar :terminal alias :visit_Psych_Nodes_Alias :terminal end end end psych-2.2.4/lib/psych/visitors/emitter.rb000066400000000000000000000027131305404671600204320ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Visitors class Emitter < Psych::Visitors::Visitor def initialize io, options = {} opts = [:indentation, :canonical, :line_width].find_all { |opt| options.key?(opt) } if opts.empty? @handler = Psych::Emitter.new io else du = Handler::DumperOptions.new opts.each { |option| du.send :"#{option}=", options[option] } @handler = Psych::Emitter.new io, du end end def visit_Psych_Nodes_Stream o @handler.start_stream o.encoding o.children.each { |c| accept c } @handler.end_stream end def visit_Psych_Nodes_Document o @handler.start_document o.version, o.tag_directives, o.implicit o.children.each { |c| accept c } @handler.end_document o.implicit_end end def visit_Psych_Nodes_Scalar o @handler.scalar o.value, o.anchor, o.tag, o.plain, o.quoted, o.style end def visit_Psych_Nodes_Sequence o @handler.start_sequence o.anchor, o.tag, o.implicit, o.style o.children.each { |c| accept c } @handler.end_sequence end def visit_Psych_Nodes_Mapping o @handler.start_mapping o.anchor, o.tag, o.implicit, o.style o.children.each { |c| accept c } @handler.end_mapping end def visit_Psych_Nodes_Alias o @handler.alias o.anchor end end end end psych-2.2.4/lib/psych/visitors/json_tree.rb000066400000000000000000000011141305404671600207430ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/json/ruby_events' module Psych module Visitors class JSONTree < YAMLTree include Psych::JSON::RubyEvents def self.create options = {} emitter = Psych::JSON::TreeBuilder.new class_loader = ClassLoader.new ss = ScalarScanner.new class_loader new(emitter, ss, options) end def accept target if target.respond_to?(:encode_with) dump_coder target else send(@dispatch_cache[target.class], target) end end end end end psych-2.2.4/lib/psych/visitors/to_ruby.rb000066400000000000000000000270061305404671600204460ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/scalar_scanner' require 'psych/class_loader' require 'psych/exception' unless defined?(Regexp::NOENCODING) Regexp::NOENCODING = 32 end module Psych module Visitors ### # This class walks a YAML AST, converting each node to Ruby class ToRuby < Psych::Visitors::Visitor def self.create class_loader = ClassLoader.new scanner = ScalarScanner.new class_loader new(scanner, class_loader) end attr_reader :class_loader def initialize ss, class_loader super() @st = {} @ss = ss @domain_types = Psych.domain_types @class_loader = class_loader end def accept target result = super return result if @domain_types.empty? || !target.tag key = target.tag.sub(/^[!\/]*/, '').sub(/(,\d+)\//, '\1:') key = "tag:#{key}" unless key =~ /^(?:tag:|x-private)/ if @domain_types.key? key value, block = @domain_types[key] return block.call value, result end result end def deserialize o if klass = resolve_class(Psych.load_tags[o.tag]) instance = klass.allocate if instance.respond_to?(:init_with) coder = Psych::Coder.new(o.tag) coder.scalar = o.value instance.init_with coder end return instance end return o.value if o.quoted return @ss.tokenize(o.value) unless o.tag case o.tag when '!binary', 'tag:yaml.org,2002:binary' o.value.unpack('m').first when /^!(?:str|ruby\/string)(?::(.*))?$/, 'tag:yaml.org,2002:str' klass = resolve_class($1) if klass klass.allocate.replace o.value else o.value end when '!ruby/object:BigDecimal' require 'bigdecimal' unless defined? BigDecimal class_loader.big_decimal._load o.value when "!ruby/object:DateTime" class_loader.date_time require 'date' unless defined? DateTime @ss.parse_time(o.value).to_datetime when '!ruby/encoding' ::Encoding.find o.value when "!ruby/object:Complex" class_loader.complex Complex(o.value) when "!ruby/object:Rational" class_loader.rational Rational(o.value) when "!ruby/class", "!ruby/module" resolve_class o.value when "tag:yaml.org,2002:float", "!float" Float(@ss.tokenize(o.value)) when "!ruby/regexp" klass = class_loader.regexp o.value =~ /^\/(.*)\/([mixn]*)$/m source = $1 options = 0 lang = nil ($2 || '').split('').each do |option| case option when 'x' then options |= Regexp::EXTENDED when 'i' then options |= Regexp::IGNORECASE when 'm' then options |= Regexp::MULTILINE when 'n' then options |= Regexp::NOENCODING else lang = option end end klass.new(*[source, options, lang].compact) when "!ruby/range" klass = class_loader.range args = o.value.split(/([.]{2,3})/, 2).map { |s| accept Nodes::Scalar.new(s) } args.push(args.delete_at(1) == '...') klass.new(*args) when /^!ruby\/sym(bol)?:?(.*)?$/ class_loader.symbolize o.value else @ss.tokenize o.value end end private :deserialize def visit_Psych_Nodes_Scalar o register o, deserialize(o) end def visit_Psych_Nodes_Sequence o if klass = resolve_class(Psych.load_tags[o.tag]) instance = klass.allocate if instance.respond_to?(:init_with) coder = Psych::Coder.new(o.tag) coder.seq = o.children.map { |c| accept c } instance.init_with coder end return instance end case o.tag when nil register_empty(o) when '!omap', 'tag:yaml.org,2002:omap' map = register(o, Psych::Omap.new) o.children.each { |a| map[accept(a.children.first)] = accept a.children.last } map when /^!(?:seq|ruby\/array):(.*)$/ klass = resolve_class($1) list = register(o, klass.allocate) o.children.each { |c| list.push accept c } list else register_empty(o) end end def visit_Psych_Nodes_Mapping o if Psych.load_tags[o.tag] return revive(resolve_class(Psych.load_tags[o.tag]), o) end return revive_hash(register(o, {}), o) unless o.tag case o.tag when /^!ruby\/struct:?(.*)?$/ klass = resolve_class($1) if $1 if klass s = register(o, klass.allocate) members = {} struct_members = s.members.map { |x| class_loader.symbolize x } o.children.each_slice(2) do |k,v| member = accept(k) value = accept(v) if struct_members.include?(class_loader.symbolize(member)) s.send("#{member}=", value) else members[member.to_s.sub(/^@/, '')] = value end end init_with(s, members, o) else klass = class_loader.struct members = o.children.map { |c| accept c } h = Hash[*members] s = klass.new(*h.map { |k,v| class_loader.symbolize k }).new(*h.map { |k,v| v }) register(o, s) s end when /^!ruby\/object:?(.*)?$/ name = $1 || 'Object' if name == 'Complex' class_loader.complex h = Hash[*o.children.map { |c| accept c }] register o, Complex(h['real'], h['image']) elsif name == 'Rational' class_loader.rational h = Hash[*o.children.map { |c| accept c }] register o, Rational(h['numerator'], h['denominator']) elsif name == 'Hash' revive_hash(register(o, {}), o) else obj = revive((resolve_class(name) || class_loader.object), o) obj end when /^!(?:str|ruby\/string)(?::(.*))?$/, 'tag:yaml.org,2002:str' klass = resolve_class($1) members = {} string = nil o.children.each_slice(2) do |k,v| key = accept k value = accept v if key == 'str' if klass string = klass.allocate.replace value else string = value end register(o, string) else members[key] = value end end init_with(string, members.map { |k,v| [k.to_s.sub(/^@/, ''),v] }, o) when /^!ruby\/array:(.*)$/ klass = resolve_class($1) list = register(o, klass.allocate) members = Hash[o.children.map { |c| accept c }.each_slice(2).to_a] list.replace members['internal'] members['ivars'].each do |ivar, v| list.instance_variable_set ivar, v end list when '!ruby/range' klass = class_loader.range h = Hash[*o.children.map { |c| accept c }] register o, klass.new(h['begin'], h['end'], h['excl']) when /^!ruby\/exception:?(.*)?$/ h = Hash[*o.children.map { |c| accept c }] e = build_exception((resolve_class($1) || class_loader.exception), h.delete('message')) init_with(e, h, o) when '!set', 'tag:yaml.org,2002:set' set = class_loader.psych_set.new @st[o.anchor] = set if o.anchor o.children.each_slice(2) do |k,v| set[accept(k)] = accept(v) end set when /^!ruby\/hash-with-ivars(?::(.*))?$/ hash = $1 ? resolve_class($1).allocate : {} register o, hash o.children.each_slice(2) do |key, value| case key.value when 'elements' revive_hash hash, value when 'ivars' value.children.each_slice(2) do |k,v| hash.instance_variable_set accept(k), accept(v) end end end hash when /^!map:(.*)$/, /^!ruby\/hash:(.*)$/ revive_hash register(o, resolve_class($1).allocate), o when '!omap', 'tag:yaml.org,2002:omap' map = register(o, class_loader.psych_omap.new) o.children.each_slice(2) do |l,r| map[accept(l)] = accept r end map when /^!ruby\/marshalable:(.*)$/ name = $1 klass = resolve_class(name) obj = register(o, klass.allocate) if obj.respond_to?(:init_with) init_with(obj, revive_hash({}, o), o) elsif obj.respond_to?(:marshal_load) marshal_data = o.children.map(&method(:accept)) obj.marshal_load(marshal_data) obj else raise ArgumentError, "Cannot deserialize #{name}" end else revive_hash(register(o, {}), o) end end def visit_Psych_Nodes_Document o accept o.root end def visit_Psych_Nodes_Stream o o.children.map { |c| accept c } end def visit_Psych_Nodes_Alias o @st.fetch(o.anchor) { raise BadAlias, "Unknown alias: #{o.anchor}" } end private def register node, object @st[node.anchor] = object if node.anchor object end def register_empty object list = register(object, []) object.children.each { |c| list.push accept c } list end SHOVEL = '<<' def revive_hash hash, o o.children.each_slice(2) { |k,v| key = accept(k) val = accept(v) if key == SHOVEL && k.tag != "tag:yaml.org,2002:str" case v when Nodes::Alias, Nodes::Mapping begin hash.merge! val rescue TypeError hash[key] = val end when Nodes::Sequence begin h = {} val.reverse_each do |value| h.merge! value end hash.merge! h rescue TypeError hash[key] = val end else hash[key] = val end else hash[key] = val end } hash end def merge_key hash, key, val end def revive klass, node s = register(node, klass.allocate) init_with(s, revive_hash({}, node), node) end def init_with o, h, node c = Psych::Coder.new(node.tag) c.map = h if o.respond_to?(:init_with) o.init_with c elsif o.respond_to?(:yaml_initialize) if $VERBOSE warn "Implementing #{o.class}#yaml_initialize is deprecated, please implement \"init_with(coder)\"" end o.yaml_initialize c.tag, c.map else h.each { |k,v| o.instance_variable_set(:"@#{k}", v) } end o end # Convert +klassname+ to a Class def resolve_class klassname class_loader.load klassname end end class NoAliasRuby < ToRuby def visit_Psych_Nodes_Alias o raise BadAlias, "Unknown alias: #{o.anchor}" end end end end psych-2.2.4/lib/psych/visitors/visitor.rb000066400000000000000000000005501305404671600204550ustar00rootroot00000000000000# frozen_string_literal: false module Psych module Visitors class Visitor def accept target visit target end private DISPATCH = Hash.new do |hash, klass| hash[klass] = "visit_#{klass.name.gsub('::', '_')}" end def visit target send DISPATCH[target.class], target end end end end psych-2.2.4/lib/psych/visitors/yaml_tree.rb000066400000000000000000000422451305404671600207460ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/tree_builder' require 'psych/scalar_scanner' require 'psych/class_loader' module Psych module Visitors ### # YAMLTree builds a YAML ast given a Ruby object. For example: # # builder = Psych::Visitors::YAMLTree.new # builder << { :foo => 'bar' } # builder.tree # => # true) end end rescue # public_method or source_location might be overridden, # and it's OK to skip it since it's only to emit a warning end end if target.respond_to?(:encode_with) dump_coder target else send(@dispatch_cache[target.class], target) end end def visit_Psych_Omap o seq = @emitter.start_sequence(nil, 'tag:yaml.org,2002:omap', false, Nodes::Sequence::BLOCK) register(o, seq) o.each { |k,v| visit_Hash k => v } @emitter.end_sequence end def visit_Encoding o tag = "!ruby/encoding" @emitter.scalar o.name, nil, tag, false, false, Nodes::Scalar::ANY end def visit_Object o tag = Psych.dump_tags[o.class] unless tag klass = o.class == Object ? nil : o.class.name tag = ['!ruby/object', klass].compact.join(':') end map = @emitter.start_mapping(nil, tag, false, Nodes::Mapping::BLOCK) register(o, map) dump_ivars o @emitter.end_mapping end def visit_Struct o tag = ['!ruby/struct', o.class.name].compact.join(':') register o, @emitter.start_mapping(nil, tag, false, Nodes::Mapping::BLOCK) o.members.each do |member| @emitter.scalar member.to_s, nil, nil, true, false, Nodes::Scalar::ANY accept o[member] end dump_ivars o @emitter.end_mapping end def visit_Exception o tag = ['!ruby/exception', o.class.name].join ':' @emitter.start_mapping nil, tag, false, Nodes::Mapping::BLOCK { 'message' => private_iv_get(o, 'mesg'), 'backtrace' => private_iv_get(o, 'backtrace'), }.each do |k,v| next unless v @emitter.scalar k, nil, nil, true, false, Nodes::Scalar::ANY accept v end dump_ivars o @emitter.end_mapping end def visit_NameError o tag = ['!ruby/exception', o.class.name].join ':' @emitter.start_mapping nil, tag, false, Nodes::Mapping::BLOCK { 'message' => o.message.to_s, 'backtrace' => private_iv_get(o, 'backtrace'), }.each do |k,v| next unless v @emitter.scalar k, nil, nil, true, false, Nodes::Scalar::ANY accept v end dump_ivars o @emitter.end_mapping end def visit_Regexp o register o, @emitter.scalar(o.inspect, nil, '!ruby/regexp', false, false, Nodes::Scalar::ANY) end def visit_DateTime o formatted = if o.offset.zero? o.strftime("%Y-%m-%d %H:%M:%S.%9N Z".freeze) else o.strftime("%Y-%m-%d %H:%M:%S.%9N %:z".freeze) end tag = '!ruby/object:DateTime' register o, @emitter.scalar(formatted, nil, tag, false, false, Nodes::Scalar::ANY) end def visit_Time o formatted = format_time o register o, @emitter.scalar(formatted, nil, nil, true, false, Nodes::Scalar::ANY) end def visit_Rational o register o, @emitter.start_mapping(nil, '!ruby/object:Rational', false, Nodes::Mapping::BLOCK) [ 'denominator', o.denominator.to_s, 'numerator', o.numerator.to_s ].each do |m| @emitter.scalar m, nil, nil, true, false, Nodes::Scalar::ANY end @emitter.end_mapping end def visit_Complex o register o, @emitter.start_mapping(nil, '!ruby/object:Complex', false, Nodes::Mapping::BLOCK) ['real', o.real.to_s, 'image', o.imag.to_s].each do |m| @emitter.scalar m, nil, nil, true, false, Nodes::Scalar::ANY end @emitter.end_mapping end def visit_Integer o @emitter.scalar o.to_s, nil, nil, true, false, Nodes::Scalar::ANY end alias :visit_TrueClass :visit_Integer alias :visit_FalseClass :visit_Integer alias :visit_Date :visit_Integer def visit_Float o if o.nan? @emitter.scalar '.nan', nil, nil, true, false, Nodes::Scalar::ANY elsif o.infinite? @emitter.scalar((o.infinite? > 0 ? '.inf' : '-.inf'), nil, nil, true, false, Nodes::Scalar::ANY) else @emitter.scalar o.to_s, nil, nil, true, false, Nodes::Scalar::ANY end end def visit_BigDecimal o @emitter.scalar o._dump, nil, '!ruby/object:BigDecimal', false, false, Nodes::Scalar::ANY end def visit_String o plain = true quote = true style = Nodes::Scalar::PLAIN tag = nil if binary?(o) o = [o].pack('m0') tag = '!binary' # FIXME: change to below when syck is removed #tag = 'tag:yaml.org,2002:binary' style = Nodes::Scalar::LITERAL plain = false quote = false elsif o =~ /\n(?!\Z)/ # match \n except blank line at the end of string style = Nodes::Scalar::LITERAL elsif o == '<<' style = Nodes::Scalar::SINGLE_QUOTED tag = 'tag:yaml.org,2002:str' plain = false quote = false elsif @line_width && o.length > @line_width style = Nodes::Scalar::FOLDED elsif o =~ /^[^[:word:]][^"]*$/ style = Nodes::Scalar::DOUBLE_QUOTED elsif not String === @ss.tokenize(o) or /\A0[0-7]*[89]/ =~ o style = Nodes::Scalar::SINGLE_QUOTED end is_primitive = o.class == ::String ivars = find_ivars o, is_primitive if ivars.empty? unless is_primitive tag = "!ruby/string:#{o.class}" plain = false quote = false end @emitter.scalar o, nil, tag, plain, quote, style else maptag = '!ruby/string' maptag << ":#{o.class}" unless o.class == ::String register o, @emitter.start_mapping(nil, maptag, false, Nodes::Mapping::BLOCK) @emitter.scalar 'str', nil, nil, true, false, Nodes::Scalar::ANY @emitter.scalar o, nil, tag, plain, quote, style dump_ivars o @emitter.end_mapping end end def visit_Module o raise TypeError, "can't dump anonymous module: #{o}" unless o.name register o, @emitter.scalar(o.name, nil, '!ruby/module', false, false, Nodes::Scalar::SINGLE_QUOTED) end def visit_Class o raise TypeError, "can't dump anonymous class: #{o}" unless o.name register o, @emitter.scalar(o.name, nil, '!ruby/class', false, false, Nodes::Scalar::SINGLE_QUOTED) end def visit_Range o register o, @emitter.start_mapping(nil, '!ruby/range', false, Nodes::Mapping::BLOCK) ['begin', o.begin, 'end', o.end, 'excl', o.exclude_end?].each do |m| accept m end @emitter.end_mapping end def visit_Hash o if o.class == ::Hash register(o, @emitter.start_mapping(nil, nil, true, Psych::Nodes::Mapping::BLOCK)) o.each do |k,v| accept k accept v end @emitter.end_mapping else visit_hash_subclass o end end def visit_Psych_Set o register(o, @emitter.start_mapping(nil, '!set', false, Psych::Nodes::Mapping::BLOCK)) o.each do |k,v| accept k accept v end @emitter.end_mapping end def visit_Array o if o.class == ::Array register o, @emitter.start_sequence(nil, nil, true, Nodes::Sequence::BLOCK) o.each { |c| accept c } @emitter.end_sequence else visit_array_subclass o end end def visit_NilClass o @emitter.scalar('', nil, 'tag:yaml.org,2002:null', true, false, Nodes::Scalar::ANY) end def visit_Symbol o if o.empty? @emitter.scalar "", nil, '!ruby/symbol', false, false, Nodes::Scalar::ANY else @emitter.scalar ":#{o}", nil, nil, true, false, Nodes::Scalar::ANY end end def visit_BasicObject o tag = Psych.dump_tags[o.class] tag ||= "!ruby/marshalable:#{o.class.name}" map = @emitter.start_mapping(nil, tag, false, Nodes::Mapping::BLOCK) register(o, map) o.marshal_dump.each(&method(:accept)) @emitter.end_mapping end private # FIXME: Remove the index and count checks in Psych 3.0 NULL = "\x00" BINARY_RANGE = "\x00-\x7F" WS_RANGE = "^ -~\t\r\n" def binary? string (string.encoding == Encoding::ASCII_8BIT && !string.ascii_only?) || string.index(NULL) || string.count(BINARY_RANGE, WS_RANGE).fdiv(string.length) > 0.3 end def visit_array_subclass o tag = "!ruby/array:#{o.class}" ivars = o.instance_variables if ivars.empty? node = @emitter.start_sequence(nil, tag, false, Nodes::Sequence::BLOCK) register o, node o.each { |c| accept c } @emitter.end_sequence else node = @emitter.start_mapping(nil, tag, false, Nodes::Sequence::BLOCK) register o, node # Dump the internal list accept 'internal' @emitter.start_sequence(nil, nil, true, Nodes::Sequence::BLOCK) o.each { |c| accept c } @emitter.end_sequence # Dump the ivars accept 'ivars' @emitter.start_mapping(nil, nil, true, Nodes::Sequence::BLOCK) ivars.each do |ivar| accept ivar accept o.instance_variable_get ivar end @emitter.end_mapping @emitter.end_mapping end end def visit_hash_subclass o ivars = o.instance_variables if ivars.any? tag = "!ruby/hash-with-ivars:#{o.class}" node = @emitter.start_mapping(nil, tag, false, Psych::Nodes::Mapping::BLOCK) register(o, node) # Dump the elements accept 'elements' @emitter.start_mapping nil, nil, true, Nodes::Mapping::BLOCK o.each do |k,v| accept k accept v end @emitter.end_mapping # Dump the ivars accept 'ivars' @emitter.start_mapping nil, nil, true, Nodes::Mapping::BLOCK o.instance_variables.each do |ivar| accept ivar accept o.instance_variable_get ivar end @emitter.end_mapping @emitter.end_mapping else tag = "!ruby/hash:#{o.class}" node = @emitter.start_mapping(nil, tag, false, Psych::Nodes::Mapping::BLOCK) register(o, node) o.each do |k,v| accept k accept v end @emitter.end_mapping end end def dump_list o end def format_time time if time.utc? time.strftime("%Y-%m-%d %H:%M:%S.%9N Z") else time.strftime("%Y-%m-%d %H:%M:%S.%9N %:z") end end # FIXME: remove this method once "to_yaml_properties" is removed def find_ivars target, is_primitive=false begin loc = target.method(:to_yaml_properties).source_location.first unless loc.start_with?(Psych::DEPRECATED) || loc.end_with?('rubytypes.rb') if $VERBOSE warn "#{loc}: to_yaml_properties is deprecated, please implement \"encode_with(coder)\"" end return target.to_yaml_properties end rescue # public_method or source_location might be overridden, # and it's OK to skip it since it's only to emit a warning. end is_primitive ? [] : target.instance_variables end def register target, yaml_obj @st.register target, yaml_obj yaml_obj end def dump_coder o @coders << o tag = Psych.dump_tags[o.class] unless tag klass = o.class == Object ? nil : o.class.name tag = ['!ruby/object', klass].compact.join(':') end c = Psych::Coder.new(tag) o.encode_with(c) emit_coder c, o end def emit_coder c, o case c.type when :scalar @emitter.scalar c.scalar, nil, c.tag, c.tag.nil?, false, Nodes::Scalar::ANY when :seq @emitter.start_sequence nil, c.tag, c.tag.nil?, Nodes::Sequence::BLOCK c.seq.each do |thing| accept thing end @emitter.end_sequence when :map register o, @emitter.start_mapping(nil, c.tag, c.implicit, c.style) c.map.each do |k,v| accept k accept v end @emitter.end_mapping when :object accept c.object end end def dump_ivars target ivars = find_ivars target ivars.each do |iv| @emitter.scalar("#{iv.to_s.sub(/^@/, '')}", nil, nil, true, false, Nodes::Scalar::ANY) accept target.instance_variable_get(iv) end end end end end psych-2.2.4/lib/psych/y.rb000066400000000000000000000002771305404671600153520ustar00rootroot00000000000000# frozen_string_literal: false module Kernel ### # An alias for Psych.dump_stream meant to be used with IRB. def y *objects puts Psych.dump_stream(*objects) end private :y end psych-2.2.4/lib/psych_jars.rb000066400000000000000000000002261305404671600161130ustar00rootroot00000000000000# frozen_string_literal: false require 'psych.jar' require 'jar-dependencies' require_jar('org.yaml', 'snakeyaml', Psych::DEFAULT_SNAKEYAML_VERSION) psych-2.2.4/psych.gemspec000066400000000000000000000070441305404671600153530ustar00rootroot00000000000000# -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = "psych" s.version = "2.2.4" s.authors = ["Aaron Patterson", "SHIBATA Hiroshi", "Charles Oliver Nutter"] s.email = ["aaron@tenderlovemaking.com", "hsbt@ruby-lang.org", "headius@headius.com"] s.date = "2016-11-14" s.summary = "Psych is a YAML parser and emitter" s.description = <<-DESCRIPTION Psych is a YAML parser and emitter. Psych leverages libyaml[http://pyyaml.org/wiki/LibYAML] for its YAML parsing and emitting capabilities. In addition to wrapping libyaml, Psych also knows how to serialize and de-serialize most Ruby objects to and from the YAML format. DESCRIPTION s.homepage = "https://github.com/ruby/psych" s.licenses = ["MIT"] s.require_paths = ["lib"] # for ruby core repository. It was generated by `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } s.files = [".gitignore", ".travis.yml", "CHANGELOG.rdoc", "Gemfile", "Mavenfile", "README.md", "Rakefile", "bin/console", "bin/setup", "ext/psych/.gitignore", "ext/psych/depend", "ext/psych/extconf.rb", "ext/psych/psych.c", "ext/psych/psych.h", "ext/psych/psych_emitter.c", "ext/psych/psych_emitter.h", "ext/psych/psych_parser.c", "ext/psych/psych_parser.h", "ext/psych/psych_to_ruby.c", "ext/psych/psych_to_ruby.h", "ext/psych/psych_yaml_tree.c", "ext/psych/psych_yaml_tree.h", "ext/psych/yaml/LICENSE", "ext/psych/yaml/api.c", "ext/psych/yaml/config.h", "ext/psych/yaml/dumper.c", "ext/psych/yaml/emitter.c", "ext/psych/yaml/loader.c", "ext/psych/yaml/parser.c", "ext/psych/yaml/reader.c", "ext/psych/yaml/scanner.c", "ext/psych/yaml/writer.c", "ext/psych/yaml/yaml.h", "ext/psych/yaml/yaml_private.h", "lib/psych.rb", "lib/psych/class_loader.rb", "lib/psych/coder.rb", "lib/psych/core_ext.rb", "lib/psych/deprecated.rb", "lib/psych/exception.rb", "lib/psych/handler.rb", "lib/psych/handlers/document_stream.rb", "lib/psych/handlers/recorder.rb", "lib/psych/json/ruby_events.rb", "lib/psych/json/stream.rb", "lib/psych/json/tree_builder.rb", "lib/psych/json/yaml_events.rb", "lib/psych/nodes.rb", "lib/psych/nodes/alias.rb", "lib/psych/nodes/document.rb", "lib/psych/nodes/mapping.rb", "lib/psych/nodes/node.rb", "lib/psych/nodes/scalar.rb", "lib/psych/nodes/sequence.rb", "lib/psych/nodes/stream.rb", "lib/psych/omap.rb", "lib/psych/parser.rb", "lib/psych/scalar_scanner.rb", "lib/psych/set.rb", "lib/psych/stream.rb", "lib/psych/streaming.rb", "lib/psych/syntax_error.rb", "lib/psych/tree_builder.rb", "lib/psych/versions.rb", "lib/psych/visitors.rb","lib/psych/visitors/depth_first.rb", "lib/psych/visitors/emitter.rb", "lib/psych/visitors/json_tree.rb", "lib/psych/visitors/to_ruby.rb", "lib/psych/visitors/visitor.rb", "lib/psych/visitors/yaml_tree.rb", "lib/psych/y.rb", "psych.gemspec"] s.rdoc_options = ["--main", "README.md"] s.extra_rdoc_files = ["CHANGELOG.rdoc", "README.md"] s.required_ruby_version = Gem::Requirement.new(">= 1.9.2") s.rubygems_version = "2.5.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") s.add_development_dependency 'rake-compiler', ">= 0.4.1" s.add_development_dependency 'minitest', "~> 5.0" if RUBY_ENGINE == 'jruby' s.platform = 'java' s.files.concat ["ext/java/PsychEmitter.java", "ext/java/PsychLibrary.java", "ext/java/PsychParser.java", "ext/java/PsychToRuby.java", "ext/java/PsychYamlTree.java", "lib/psych_jars.rb", "lib/psych.jar"] s.requirements = "jar org.yaml:snakeyaml, 1.18" s.add_dependency 'jar-dependencies', '>= 0.1.7' s.add_development_dependency 'ruby-maven' else s.extensions = ["ext/psych/extconf.rb"] end end psych-2.2.4/test/000077500000000000000000000000001305404671600136325ustar00rootroot00000000000000psych-2.2.4/test/psych/000077500000000000000000000000001305404671600147605ustar00rootroot00000000000000psych-2.2.4/test/psych/handlers/000077500000000000000000000000001305404671600165605ustar00rootroot00000000000000psych-2.2.4/test/psych/handlers/test_recorder.rb000066400000000000000000000011421305404671600217470ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/helper' require 'psych/handlers/recorder' module Psych module Handlers class TestRecorder < TestCase def test_replay yaml = "--- foo\n...\n" output = StringIO.new recorder = Psych::Handlers::Recorder.new parser = Psych::Parser.new recorder parser.parse yaml assert_equal 5, recorder.events.length emitter = Psych::Emitter.new output recorder.events.each do |m, args| emitter.send m, *args end assert_equal yaml, output.string end end end end psych-2.2.4/test/psych/helper.rb000066400000000000000000000061601305404671600165670ustar00rootroot00000000000000# frozen_string_literal: false require 'minitest/autorun' require 'stringio' require 'tempfile' require 'date' require 'psych' module Psych superclass = if defined?(Minitest::Test) Minitest::Test else MiniTest::Unit::TestCase end class TestCase < superclass def self.suppress_warning verbose, $VERBOSE = $VERBOSE, nil yield ensure $VERBOSE = verbose end def with_default_external(enc) verbose, $VERBOSE = $VERBOSE, nil origenc, Encoding.default_external = Encoding.default_external, enc $VERBOSE = verbose yield ensure verbose, $VERBOSE = $VERBOSE, nil Encoding.default_external = origenc $VERBOSE = verbose end def with_default_internal(enc) verbose, $VERBOSE = $VERBOSE, nil origenc, Encoding.default_internal = Encoding.default_internal, enc $VERBOSE = verbose yield ensure verbose, $VERBOSE = $VERBOSE, nil Encoding.default_internal = origenc $VERBOSE = verbose end # # Convert between Psych and the object to verify correct parsing and # emitting # def assert_to_yaml( obj, yaml ) assert_equal( obj, Psych::load( yaml ) ) assert_equal( obj, Psych::parse( yaml ).transform ) assert_equal( obj, Psych::load( obj.psych_to_yaml ) ) assert_equal( obj, Psych::parse( obj.psych_to_yaml ).transform ) assert_equal( obj, Psych::load( obj.psych_to_yaml( :UseVersion => true, :UseHeader => true, :SortKeys => true ) )) end # # Test parser only # def assert_parse_only( obj, yaml ) assert_equal( obj, Psych::load( yaml ) ) assert_equal( obj, Psych::parse( yaml ).transform ) end def assert_cycle( obj ) v = Visitors::YAMLTree.create v << obj assert_equal(obj, Psych.load(v.tree.yaml)) assert_equal( obj, Psych::load(Psych.dump(obj))) assert_equal( obj, Psych::load( obj.psych_to_yaml ) ) end # # Make a time with the time zone # def mktime( year, mon, day, hour, min, sec, usec, zone = "Z" ) usec = Rational(usec.to_s) * 1000000 val = Time::utc( year.to_i, mon.to_i, day.to_i, hour.to_i, min.to_i, sec.to_i, usec ) if zone != "Z" hour = zone[0,3].to_i * 3600 min = zone[3,2].to_i * 60 ofs = (hour + min) val = Time.at( val.tv_sec - ofs, val.tv_nsec / 1000.0 ) end return val end end end # backport so that tests will run on 2.0.0 unless Tempfile.respond_to? :create def Tempfile.create(basename, *rest) tmpfile = nil Dir::Tmpname.create(basename, *rest) do |tmpname, n, opts| mode = File::RDWR|File::CREAT|File::EXCL perm = 0600 if opts mode |= opts.delete(:mode) || 0 opts[:perm] = perm perm = nil else opts = perm end tmpfile = File.open(tmpname, mode, opts) end if block_given? begin yield tmpfile ensure tmpfile.close if !tmpfile.closed? File.unlink tmpfile end else tmpfile end end end psych-2.2.4/test/psych/json/000077500000000000000000000000001305404671600157315ustar00rootroot00000000000000psych-2.2.4/test/psych/json/test_stream.rb000066400000000000000000000050311305404671600206070ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/helper' module Psych module JSON class TestStream < TestCase def setup @io = StringIO.new @stream = Psych::JSON::Stream.new(@io) @stream.start end def test_explicit_documents @io = StringIO.new @stream = Psych::JSON::Stream.new(@io) @stream.start @stream.push({ 'foo' => 'bar' }) assert !@stream.finished?, 'stream not finished' @stream.finish assert @stream.finished?, 'stream finished' assert_match(/^---/, @io.string) assert_match(/\.\.\.$/, @io.string) end def test_null @stream.push(nil) assert_match(/^--- null/, @io.string) end def test_string @stream.push "foo" assert_match(/(["])foo\1/, @io.string) end def test_symbol @stream.push :foo assert_match(/(["])foo\1/, @io.string) end def test_int @stream.push 10 assert_match(/^--- 10/, @io.string) end def test_float @stream.push 1.2 assert_match(/^--- 1.2/, @io.string) end def test_hash hash = { 'one' => 'two' } @stream.push hash json = @io.string assert_match(/}$/, json) assert_match(/^--- \{/, json) assert_match(/["]one['"]/, json) assert_match(/["]two['"]/, json) end def test_list_to_json list = %w{ one two } @stream.push list json = @io.string assert_match(/\]$/, json) assert_match(/^--- \[/, json) assert_match(/["]one["]/, json) assert_match(/["]two["]/, json) end class Foo; end def test_json_dump_exclude_tag @stream << Foo.new json = @io.string refute_match('Foo', json) end class Bar def encode_with coder coder.represent_seq 'omg', %w{ a b c } end end def test_json_list_dump_exclude_tag @stream << Bar.new json = @io.string refute_match('omg', json) end def test_time time = Time.utc(2010, 10, 10) @stream.push({'a' => time }) json = @io.string assert_match "{\"a\": \"2010-10-10 00:00:00.000000000 Z\"}\n", json end def test_datetime time = Time.new(2010, 10, 10).to_datetime @stream.push({'a' => time }) json = @io.string assert_match "{\"a\": \"#{time.strftime("%Y-%m-%d %H:%M:%S.%9N %:z")}\"}\n", json end end end end psych-2.2.4/test/psych/nodes/000077500000000000000000000000001305404671600160705ustar00rootroot00000000000000psych-2.2.4/test/psych/nodes/test_enumerable.rb000066400000000000000000000016561305404671600216030ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/helper' module Psych module Nodes class TestEnumerable < TestCase def test_includes_enumerable yaml = '--- hello' assert_equal 3, Psych.parse_stream(yaml).to_a.length end def test_returns_enumerator yaml = '--- hello' assert_equal 3, Psych.parse_stream(yaml).each.map { |x| x }.length end def test_scalar assert_equal 3, calls('--- hello').length end def test_sequence assert_equal 4, calls("---\n- hello").length end def test_mapping assert_equal 5, calls("---\nhello: world").length end def test_alias assert_equal 5, calls("--- &yay\n- foo\n- *yay\n").length end private def calls yaml calls = [] Psych.parse_stream(yaml).each do |node| calls << node end calls end end end end psych-2.2.4/test/psych/test_alias_and_anchor.rb000066400000000000000000000037361305404671600216220ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' class ObjectWithInstanceVariables attr_accessor :var1, :var2 end class SubStringWithInstanceVariables < String attr_accessor :var1 end module Psych class TestAliasAndAnchor < TestCase def test_mri_compatibility yaml = < 'b' }, 'foo'] end def test_another_subclass_with_attributes y = Y.new.tap {|y| y.val = 1} y << "foo" << "bar" y = Psych.load Psych.dump y assert_equal %w{foo bar}, y assert_equal Y, y.class assert_equal 1, y.val end def test_subclass yaml = Psych.dump X.new assert_match X.name, yaml list = X.new list << 1 assert_equal X, list.class assert_equal 1, list.first end def test_subclass_with_attributes y = Psych.load Psych.dump Y.new.tap {|y| y.val = 1} assert_equal Y, y.class assert_equal 1, y.val end def test_backwards_with_syck x = Psych.load "--- !seq:#{X.name} []\n\n" assert_equal X, x.class end def test_self_referential @list << @list assert_cycle(@list) end def test_cycle assert_cycle(@list) end end end psych-2.2.4/test/psych/test_boolean.rb000066400000000000000000000017271305404671600177720ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych ### # Test booleans from YAML spec: # http://yaml.org/type/bool.html class TestBoolean < TestCase %w{ yes Yes YES true True TRUE on On ON }.each do |truth| define_method(:"test_#{truth}") do assert_equal true, Psych.load("--- #{truth}") end end %w{ no No NO false False FALSE off Off OFF }.each do |truth| define_method(:"test_#{truth}") do assert_equal false, Psych.load("--- #{truth}") end end ### # YAML spec says "y" and "Y" may be used as true, but Syck treats them # as literal strings def test_y assert_equal "y", Psych.load("--- y") assert_equal "Y", Psych.load("--- Y") end ### # YAML spec says "n" and "N" may be used as false, but Syck treats them # as literal strings def test_n assert_equal "n", Psych.load("--- n") assert_equal "N", Psych.load("--- N") end end end psych-2.2.4/test/psych/test_class.rb000066400000000000000000000011351305404671600174510ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestClass < TestCase module Foo end def test_cycle_anonymous_class assert_raises(::TypeError) do assert_cycle(Class.new) end end def test_cycle_anonymous_module assert_raises(::TypeError) do assert_cycle(Module.new) end end def test_cycle assert_cycle(TestClass) end def test_dump Psych.dump TestClass end def test_cycle_module assert_cycle(Foo) end def test_dump_module Psych.dump Foo end end end psych-2.2.4/test/psych/test_coder.rb000066400000000000000000000111621305404671600174410ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestCoder < TestCase class InitApi attr_accessor :implicit attr_accessor :style attr_accessor :tag attr_accessor :a, :b, :c def initialize @a = 1 @b = 2 @c = 3 end def init_with coder @a = coder['aa'] @b = coder['bb'] @implicit = coder.implicit @tag = coder.tag @style = coder.style end def encode_with coder coder['aa'] = @a coder['bb'] = @b end end class TaggingCoder < InitApi def encode_with coder super coder.tag = coder.tag.sub(/!/, '!hello') coder.implicit = false coder.style = Psych::Nodes::Mapping::FLOW end end class ScalarCoder def encode_with coder coder.scalar = "foo" end end class Represent yaml_tag 'foo' def encode_with coder coder.represent_scalar 'foo', 'bar' end end class RepresentWithInit yaml_tag name attr_accessor :str def init_with coder @str = coder.scalar end def encode_with coder coder.represent_scalar self.class.name, 'bar' end end class RepresentWithSeq yaml_tag name attr_accessor :seq def init_with coder @seq = coder.seq end def encode_with coder coder.represent_seq self.class.name, %w{ foo bar } end end class RepresentWithMap yaml_tag name attr_accessor :map def init_with coder @map = coder.map end def encode_with coder coder.represent_map self.class.name, { "string" => 'a', :symbol => 'b' } end end class RepresentWithObject def encode_with coder coder.represent_object self.class.name, 20 end end class Referential attr_reader :a def initialize @a = self end def encode_with(c) c['a'] = @a end def init_with(c) @a = c['a'] end end def test_self_referential x = Referential.new copy = Psych.load Psych.dump x assert_equal copy, copy.a end def test_represent_with_object thing = Psych.load(Psych.dump(RepresentWithObject.new)) assert_equal 20, thing end def test_json_dump_exclude_tag refute_match('TestCoder::InitApi', Psych.to_json(InitApi.new)) end def test_map_takes_block coder = Psych::Coder.new 'foo' tag = coder.tag style = coder.style coder.map { |map| map.add 'foo', 'bar' } assert_equal 'bar', coder['foo'] assert_equal tag, coder.tag assert_equal style, coder.style end def test_map_with_tag coder = Psych::Coder.new 'foo' coder.map('hello') { |map| map.add 'foo', 'bar' } assert_equal 'bar', coder['foo'] assert_equal 'hello', coder.tag end def test_map_with_tag_and_style coder = Psych::Coder.new 'foo' coder.map('hello', 'world') { |map| map.add 'foo', 'bar' } assert_equal 'bar', coder['foo'] assert_equal 'hello', coder.tag assert_equal 'world', coder.style end def test_represent_map thing = Psych.load(Psych.dump(RepresentWithMap.new)) assert_equal({ "string" => 'a', :symbol => 'b' }, thing.map) end def test_represent_sequence thing = Psych.load(Psych.dump(RepresentWithSeq.new)) assert_equal %w{ foo bar }, thing.seq end def test_represent_with_init thing = Psych.load(Psych.dump(RepresentWithInit.new)) assert_equal 'bar', thing.str end def test_represent! assert_match(/foo/, Psych.dump(Represent.new)) assert_instance_of(Represent, Psych.load(Psych.dump(Represent.new))) end def test_scalar_coder foo = Psych.load(Psych.dump(ScalarCoder.new)) assert_equal 'foo', foo end def test_load_dumped_tagging foo = InitApi.new bar = Psych.load(Psych.dump(foo)) assert_equal false, bar.implicit assert_equal "!ruby/object:Psych::TestCoder::InitApi", bar.tag assert_equal Psych::Nodes::Mapping::BLOCK, bar.style end def test_dump_with_tag foo = TaggingCoder.new assert_match(/hello/, Psych.dump(foo)) assert_match(/\{aa/, Psych.dump(foo)) end def test_dump_encode_with foo = InitApi.new assert_match(/aa/, Psych.dump(foo)) end def test_dump_init_with foo = InitApi.new bar = Psych.load(Psych.dump(foo)) assert_equal foo.a, bar.a assert_equal foo.b, bar.b assert_nil bar.c end end end psych-2.2.4/test/psych/test_date_time.rb000066400000000000000000000013651305404671600203040ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' require 'date' module Psych class TestDateTime < TestCase def test_negative_year time = Time.utc -1, 12, 16 assert_cycle time end def test_new_datetime assert_cycle DateTime.new end def test_invalid_date assert_cycle "2013-10-31T10:40:07-000000000000033" end def test_string_tag dt = DateTime.now yaml = Psych.dump dt assert_match(/DateTime/, yaml) end def test_round_trip dt = DateTime.now assert_cycle dt end def test_alias_with_time t = Time.now h = {:a => t, :b => t} yaml = Psych.dump h assert_match('&', yaml) assert_match('*', yaml) end end end psych-2.2.4/test/psych/test_deprecated.rb000066400000000000000000000124101305404671600204420ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestDeprecated < TestCase def teardown $VERBOSE = @orig_verbose Psych.domain_types.clear end class QuickEmitter attr_reader :name attr_reader :value def initialize @name = 'hello!!' @value = 'Friday!' end def to_yaml opts = {} Psych.quick_emit object_id, opts do |out| out.map taguri, to_yaml_style do |map| map.add 'name', @name map.add 'value', nil end end end end def setup @qe = QuickEmitter.new @orig_verbose, $VERBOSE = $VERBOSE, false end def test_quick_emit qe2 = Psych.load @qe.to_yaml assert_equal @qe.name, qe2.name assert_instance_of QuickEmitter, qe2 assert_nil qe2.value end def test_recursive_quick_emit hash = { :qe => @qe } hash2 = Psych.load Psych.dump hash qe = hash2[:qe] assert_equal @qe.name, qe.name assert_instance_of QuickEmitter, qe assert_nil qe.value end class QuickEmitterEncodeWith attr_reader :name attr_reader :value def initialize @name = 'hello!!' @value = 'Friday!' end def encode_with coder coder.map do |map| map.add 'name', @name map.add 'value', nil end end def to_yaml opts = {} raise end end ### # An object that defines both to_yaml and encode_with should only call # encode_with. def test_recursive_quick_emit_encode_with qeew = QuickEmitterEncodeWith.new hash = { :qe => qeew } hash2 = Psych.load Psych.dump hash qe = hash2[:qe] assert_equal qeew.name, qe.name assert_instance_of QuickEmitterEncodeWith, qe assert_nil qe.value end class YamlInit attr_reader :name attr_reader :value def initialize @name = 'hello!!' @value = 'Friday!' end def yaml_initialize tag, vals vals.each { |ivar, val| instance_variable_set "@#{ivar}", 'TGIF!' } end end def test_yaml_initialize hash = { :yi => YamlInit.new } hash2 = Psych.load Psych.dump hash yi = hash2[:yi] assert_equal 'TGIF!', yi.name assert_equal 'TGIF!', yi.value assert_instance_of YamlInit, yi end class YamlInitAndInitWith attr_reader :name attr_reader :value def initialize @name = 'shaners' @value = 'Friday!' end def init_with coder coder.map.each { |ivar, val| instance_variable_set "@#{ivar}", 'TGIF!' } end def yaml_initialize tag, vals raise end end ### # An object that implements both yaml_initialize and init_with should not # receive the yaml_initialize call. def test_yaml_initialize_and_init_with hash = { :yi => YamlInitAndInitWith.new } hash2 = Psych.load Psych.dump hash yi = hash2[:yi] assert_equal 'TGIF!', yi.name assert_equal 'TGIF!', yi.value assert_instance_of YamlInitAndInitWith, yi end def test_coder_scalar coder = Psych::Coder.new 'foo' coder.scalar('tag', 'some string', :plain) assert_equal 'tag', coder.tag assert_equal 'some string', coder.scalar assert_equal :scalar, coder.type end class YamlAs TestCase.suppress_warning do psych_yaml_as 'helloworld' # this should be yaml_as but to avoid syck end end def test_yaml_as assert_match(/helloworld/, Psych.dump(YamlAs.new)) end def test_ruby_type types = [] appender = lambda { |*args| types << args } Psych.add_ruby_type('foo', &appender) Psych.load <<-eoyml - !ruby.yaml.org,2002/foo bar eoyml assert_equal [["tag:ruby.yaml.org,2002:foo", "bar"]], types end def test_detect_implicit assert_equal '', Psych.detect_implicit(nil) assert_equal '', Psych.detect_implicit(Object.new) assert_equal '', Psych.detect_implicit(1.2) assert_equal 'null', Psych.detect_implicit('') assert_equal 'string', Psych.detect_implicit('foo') end def test_private_type types = [] Psych.add_private_type('foo') { |*args| types << args } Psych.load <<-eoyml - !x-private:foo bar eoyml assert_equal [["x-private:foo", "bar"]], types end def test_tagurize assert_nil Psych.tagurize nil assert_equal Psych, Psych.tagurize(Psych) assert_equal 'tag:yaml.org,2002:foo', Psych.tagurize('foo') end def test_read_type_class things = Psych.read_type_class 'tag:yaml.org,2002:int:Psych::TestDeprecated::QuickEmitter', Object assert_equal 'int', things.first assert_equal Psych::TestDeprecated::QuickEmitter, things.last end def test_read_type_class_no_class things = Psych.read_type_class 'tag:yaml.org,2002:int', Object assert_equal 'int', things.first assert_equal Object, things.last end def test_object_maker thing = Psych.object_maker(Object, { 'a' => 'b', 'c' => 'd' }) assert_instance_of(Object, thing) assert_equal 'b', thing.instance_variable_get(:@a) assert_equal 'd', thing.instance_variable_get(:@c) end end end psych-2.2.4/test/psych/test_document.rb000066400000000000000000000020361305404671600201630ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestDocument < TestCase def setup super @stream = Psych.parse_stream(<<-eoyml) %YAML 1.1 %TAG ! tag:tenderlovemaking.com,2009: --- !fun eoyml @doc = @stream.children.first end def test_parse_tag assert_equal([['!', 'tag:tenderlovemaking.com,2009:']], @doc.tag_directives) end def test_emit_tag assert_match('%TAG ! tag:tenderlovemaking.com,2009:', @stream.yaml) end def test_emit_multitag @doc.tag_directives << ['!!', 'foo.com,2009:'] yaml = @stream.yaml assert_match('%TAG ! tag:tenderlovemaking.com,2009:', yaml) assert_match('%TAG !! foo.com,2009:', yaml) end def test_emit_bad_tag assert_raises(RuntimeError) do @doc.tag_directives = [['!']] @stream.yaml end end def test_parse_version assert_equal([1,1], @doc.version) end def test_emit_version assert_match('%YAML 1.1', @stream.yaml) end end end psych-2.2.4/test/psych/test_emitter.rb000066400000000000000000000053401305404671600200170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # frozen_string_literal: false require_relative 'helper' module Psych class TestEmitter < TestCase def setup super @out = StringIO.new('') @emitter = Psych::Emitter.new @out end def test_line_width @emitter.line_width = 10 assert_equal 10, @emitter.line_width end def test_set_canonical @emitter.canonical = true assert_equal true, @emitter.canonical @emitter.canonical = false assert_equal false, @emitter.canonical end def test_indentation_set assert_equal 2, @emitter.indentation @emitter.indentation = 5 assert_equal 5, @emitter.indentation end def test_emit_utf_8 @emitter.start_stream Psych::Nodes::Stream::UTF8 @emitter.start_document [], [], false @emitter.scalar '日本語', nil, nil, false, true, 1 @emitter.end_document true @emitter.end_stream assert_match('日本語', @out.string) end def test_start_stream_arg_error assert_raises(TypeError) do @emitter.start_stream 'asdfasdf' end end def test_start_doc_arg_error @emitter.start_stream Psych::Nodes::Stream::UTF8 [ [nil, [], false], [[nil, nil], [], false], [[], 'foo', false], [[], ['foo'], false], [[], [nil,nil], false], [[1,1], [[nil, "tag:TALOS"]], 0], ].each do |args| assert_raises(TypeError) do @emitter.start_document(*args) end end end def test_scalar_arg_error @emitter.start_stream Psych::Nodes::Stream::UTF8 @emitter.start_document [], [], false [ [:foo, nil, nil, false, true, 1], ['foo', Object.new, nil, false, true, 1], ['foo', nil, Object.new, false, true, 1], ['foo', nil, nil, false, true, :foo], [nil, nil, nil, false, true, 1], ].each do |args| assert_raises(TypeError) do @emitter.scalar(*args) end end end def test_start_sequence_arg_error @emitter.start_stream Psych::Nodes::Stream::UTF8 @emitter.start_document [], [], false assert_raises(TypeError) do @emitter.start_sequence(nil, Object.new, true, 1) end assert_raises(TypeError) do @emitter.start_sequence(nil, nil, true, :foo) end end def test_resizing_tags @emitter.start_stream Psych::Nodes::Stream::UTF8 tags = [] version = [1,1] obj = Object.new obj.instance_variable_set(:@tags, tags) def obj.to_str (1..10).map{|x| @tags.push(["AAAA","BBBB"])} return "x" end tags.push([obj, "tag:TALOS"]) @emitter.start_document(version, tags, 0) assert(true) end end end psych-2.2.4/test/psych/test_encoding.rb000066400000000000000000000152531305404671600201400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # frozen_string_literal: false require_relative 'helper' module Psych class TestEncoding < TestCase class EncodingCatcher < Handler attr_reader :strings def initialize @strings = [] end (Handler.instance_methods(true) - Object.instance_methods).each do |m| class_eval %{ def #{m} *args @strings += args.flatten.find_all { |a| String === a } end } end end def setup super @buffer = StringIO.new @handler = EncodingCatcher.new @parser = Psych::Parser.new @handler @utf8 = Encoding.find('UTF-8') @emitter = Psych::Emitter.new @buffer end def test_dump_load_encoding_object assert_cycle Encoding::US_ASCII assert_cycle Encoding::UTF_8 end def test_transcode_shiftjis str = "こんにちは!" loaded = Psych.load("--- こんにちは!".encode('SHIFT_JIS')) assert_equal str, loaded end def test_transcode_utf16le str = "こんにちは!" loaded = Psych.load("--- こんにちは!".encode('UTF-16LE')) assert_equal str, loaded end def test_transcode_utf16be str = "こんにちは!" loaded = Psych.load("--- こんにちは!".encode('UTF-16BE')) assert_equal str, loaded end def test_io_shiftjis Tempfile.create(['shiftjis', 'yml'], :encoding => 'SHIFT_JIS') {|t| t.write '--- こんにちは!' t.close # If the external encoding isn't utf8, utf16le, or utf16be, we cannot # process the file. File.open(t.path, 'r', :encoding => 'SHIFT_JIS') do |f| assert_raises Psych::SyntaxError do Psych.load(f) end end } end def test_io_utf16le Tempfile.create(['utf16le', 'yml']) {|t| t.binmode t.write '--- こんにちは!'.encode('UTF-16LE') t.close File.open(t.path, 'rb', :encoding => 'UTF-16LE') do |f| assert_equal "こんにちは!", Psych.load(f) end } end def test_io_utf16be Tempfile.create(['utf16be', 'yml']) {|t| t.binmode t.write '--- こんにちは!'.encode('UTF-16BE') t.close File.open(t.path, 'rb', :encoding => 'UTF-16BE') do |f| assert_equal "こんにちは!", Psych.load(f) end } end def test_io_utf8 Tempfile.create(['utf8', 'yml']) {|t| t.binmode t.write '--- こんにちは!'.encode('UTF-8') t.close File.open(t.path, 'rb', :encoding => 'UTF-8') do |f| assert_equal "こんにちは!", Psych.load(f) end } end def test_emit_alias @emitter.start_stream Psych::Parser::UTF8 @emitter.start_document [], [], true e = assert_raises(RuntimeError) do @emitter.alias 'ドラえもん'.encode('EUC-JP') end assert_match(/alias value/, e.message) end def test_to_yaml_is_valid with_default_external(Encoding::US_ASCII) do with_default_internal(nil) do s = "こんにちは!" # If no encoding is specified, use UTF-8 assert_equal Encoding::UTF_8, Psych.dump(s).encoding assert_equal s, Psych.load(Psych.dump(s)) end end end def test_start_mapping foo = 'foo' bar = 'バー' @emitter.start_stream Psych::Parser::UTF8 @emitter.start_document [], [], true @emitter.start_mapping( foo.encode('Shift_JIS'), bar.encode('UTF-16LE'), false, Nodes::Sequence::ANY) @emitter.end_mapping @emitter.end_document false @emitter.end_stream @parser.parse @buffer.string assert_encodings @utf8, @handler.strings assert_equal [foo, bar], @handler.strings end def test_start_sequence foo = 'foo' bar = 'バー' @emitter.start_stream Psych::Parser::UTF8 @emitter.start_document [], [], true @emitter.start_sequence( foo.encode('Shift_JIS'), bar.encode('UTF-16LE'), false, Nodes::Sequence::ANY) @emitter.end_sequence @emitter.end_document false @emitter.end_stream @parser.parse @buffer.string assert_encodings @utf8, @handler.strings assert_equal [foo, bar], @handler.strings end def test_doc_tag_encoding key = '鍵' @emitter.start_stream Psych::Parser::UTF8 @emitter.start_document( [1, 1], [['!'.encode('EUC-JP'), key.encode('EUC-JP')]], true ) @emitter.scalar 'foo', nil, nil, true, false, Nodes::Scalar::ANY @emitter.end_document false @emitter.end_stream @parser.parse @buffer.string assert_encodings @utf8, @handler.strings assert_equal key, @handler.strings[1] end def test_emitter_encoding str = "壁に耳あり、障子に目あり" thing = Psych.load Psych.dump str.encode('EUC-JP') assert_equal str, thing end def test_default_internal with_default_internal(Encoding::EUC_JP) do str = "壁に耳あり、障子に目あり" assert_equal @utf8, str.encoding @parser.parse str assert_encodings Encoding::EUC_JP, @handler.strings assert_equal str, @handler.strings.first.encode('UTF-8') end end def test_scalar @parser.parse("--- a") assert_encodings @utf8, @handler.strings end def test_alias @parser.parse(<<-eoyml) %YAML 1.1 --- !!seq [ !!str "Without properties", &A !!str "Anchored", !!str "Tagged", *A, !!str "", ] eoyml assert_encodings @utf8, @handler.strings end def test_list_anchor list = %w{ a b } list << list @parser.parse(Psych.dump(list)) assert_encodings @utf8, @handler.strings end def test_map_anchor h = {} h['a'] = h @parser.parse(Psych.dump(h)) assert_encodings @utf8, @handler.strings end def test_map_tag @parser.parse(<<-eoyml) %YAML 1.1 --- !!map { a : b } eoyml assert_encodings @utf8, @handler.strings end def test_doc_tag @parser.parse(<<-eoyml) %YAML 1.1 %TAG ! tag:tenderlovemaking.com,2009: --- !fun eoyml assert_encodings @utf8, @handler.strings end def test_dump_non_ascii_string_to_file Tempfile.create(['utf8', 'yml'], :encoding => 'UTF-8') do |t| h = {'one' => 'いち'} Psych.dump(h, t) t.close assert_equal h, Psych.load_file(t.path) end end private def assert_encodings encoding, strings strings.each do |str| assert_equal encoding, str.encoding, str end end end end psych-2.2.4/test/psych/test_exception.rb000066400000000000000000000070501305404671600203440ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestException < TestCase class Wups < Exception attr_reader :foo, :bar def initialize *args super @foo = 1 @bar = 2 end end def setup super @wups = Wups.new end def test_naming_exception err = String.xxx rescue $! new_err = Psych.load(Psych.dump(err)) assert_equal err.message, new_err.message end def test_load_takes_file ex = assert_raises(Psych::SyntaxError) do Psych.load '--- `' end assert_nil ex.file ex = assert_raises(Psych::SyntaxError) do Psych.load '--- `', 'meow' end assert_equal 'meow', ex.file end def test_psych_parse_stream_takes_file ex = assert_raises(Psych::SyntaxError) do Psych.parse_stream '--- `' end assert_nil ex.file assert_match '()', ex.message ex = assert_raises(Psych::SyntaxError) do Psych.parse_stream '--- `', 'omg!' end assert_equal 'omg!', ex.file assert_match 'omg!', ex.message end def test_load_stream_takes_file ex = assert_raises(Psych::SyntaxError) do Psych.load_stream '--- `' end assert_nil ex.file assert_match '()', ex.message ex = assert_raises(Psych::SyntaxError) do Psych.load_stream '--- `', 'omg!' end assert_equal 'omg!', ex.file end def test_parse_file_exception Tempfile.create(['parsefile', 'yml']) {|t| t.binmode t.write '--- `' t.close ex = assert_raises(Psych::SyntaxError) do Psych.parse_file t.path end assert_equal t.path, ex.file } end def test_load_file_exception Tempfile.create(['loadfile', 'yml']) {|t| t.binmode t.write '--- `' t.close ex = assert_raises(Psych::SyntaxError) do Psych.load_file t.path end assert_equal t.path, ex.file } end def test_psych_parse_takes_file ex = assert_raises(Psych::SyntaxError) do Psych.parse '--- `' end assert_match '()', ex.message assert_nil ex.file ex = assert_raises(Psych::SyntaxError) do Psych.parse '--- `', 'omg!' end assert_match 'omg!', ex.message end def test_attributes e = assert_raises(Psych::SyntaxError) { Psych.load '--- `foo' } assert_nil e.file assert_equal 1, e.line assert_equal 5, e.column # FIXME: offset isn't being set correctly by libyaml # assert_equal 5, e.offset assert e.problem assert e.context end def test_convert w = Psych.load(Psych.dump(@wups)) assert_equal @wups, w assert_equal 1, w.foo assert_equal 2, w.bar end def test_to_yaml_properties class << @wups def to_yaml_properties [:@foo] end end w = Psych.load(Psych.dump(@wups)) assert_equal @wups, w assert_equal 1, w.foo assert_nil w.bar end def test_psych_syntax_error Tempfile.create(['parsefile', 'yml']) do |t| t.binmode t.write '--- `' t.close begin Psych.parse_file t.path rescue StandardError assert true # count assertion ensure return unless $! ancestors = $!.class.ancestors.inspect flunk "Psych::SyntaxError not rescued by StandardError: #{ancestors}" end end end end end psych-2.2.4/test/psych/test_hash.rb000066400000000000000000000037441305404671600172770ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestHash < TestCase class X < Hash end class HashWithCustomInit < Hash attr_reader :obj def initialize(obj) @obj = obj end end class HashWithCustomInitNoIvar < Hash def initialize(obj) # *shrug* end end def setup super @hash = { :a => 'b' } end def test_referenced_hash_with_ivar a = [1,2,3,4,5] t1 = [HashWithCustomInit.new(a)] t1 << t1.first assert_cycle t1 end def test_custom_initialized a = [1,2,3,4,5] t1 = HashWithCustomInit.new(a) t2 = Psych.load(Psych.dump(t1)) assert_equal t1, t2 assert_cycle t1 end def test_custom_initialize_no_ivar t1 = HashWithCustomInitNoIvar.new(nil) t2 = Psych.load(Psych.dump(t1)) assert_equal t1, t2 assert_cycle t1 end def test_hash_subclass_with_ivars x = X.new x[:a] = 'b' x.instance_variable_set :@foo, 'bar' dup = Psych.load Psych.dump x assert_cycle x assert_equal 'bar', dup.instance_variable_get(:@foo) assert_equal X, dup.class end def test_load_with_class_syck_compatibility hash = Psych.load "--- !ruby/object:Hash\n:user_id: 7\n:username: Lucas\n" assert_equal({ user_id: 7, username: 'Lucas'}, hash) end def test_empty_subclass assert_match "!ruby/hash:#{X}", Psych.dump(X.new) x = Psych.load Psych.dump X.new assert_equal X, x.class end def test_map x = Psych.load "--- !map:#{X} { }\n" assert_equal X, x.class end def test_self_referential @hash['self'] = @hash assert_cycle(@hash) end def test_cycles assert_cycle(@hash) end def test_ref_append hash = Psych.load(<<-eoyml) --- foo: &foo hello: world bar: <<: *foo eoyml assert_equal({"foo"=>{"hello"=>"world"}, "bar"=>{"hello"=>"world"}}, hash) end end end psych-2.2.4/test/psych/test_json_tree.rb000066400000000000000000000027711305404671600203430ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestJSONTree < TestCase def test_string assert_match(/"foo"/, Psych.to_json("foo")) end def test_symbol assert_match(/"foo"/, Psych.to_json(:foo)) end def test_nil assert_match(/^null/, Psych.to_json(nil)) end def test_int assert_match(/^10/, Psych.to_json(10)) end def test_float assert_match(/^1.2/, Psych.to_json(1.2)) end def test_hash hash = { 'one' => 'two' } json = Psych.to_json(hash) assert_match(/}$/, json) assert_match(/^\{/, json) assert_match(/['"]one['"]/, json) assert_match(/['"]two['"]/, json) end class Bar def encode_with coder coder.represent_seq 'omg', %w{ a b c } end end def test_json_list_dump_exclude_tag json = Psych.to_json Bar.new refute_match('omg', json) end def test_list_to_json list = %w{ one two } json = Psych.to_json(list) assert_match(/\]$/, json) assert_match(/^\[/, json) assert_match(/"one"/, json) assert_match(/"two"/, json) end def test_time time = Time.utc(2010, 10, 10) assert_equal "{\"a\": \"2010-10-10 00:00:00.000000000 Z\"}\n", Psych.to_json({'a' => time }) end def test_datetime time = Time.new(2010, 10, 10).to_datetime assert_equal "{\"a\": \"#{time.strftime("%Y-%m-%d %H:%M:%S.%9N %:z")}\"}\n", Psych.to_json({'a' => time }) end end end psych-2.2.4/test/psych/test_marshalable.rb000066400000000000000000000022201305404671600206130ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' require 'delegate' module Psych class TestMarshalable < TestCase def test_objects_defining_marshal_dump_and_marshal_load_can_be_dumped sd = SimpleDelegator.new(1) loaded = Psych.load(Psych.dump(sd)) assert_instance_of(SimpleDelegator, loaded) assert_equal(sd, loaded) end class PsychCustomMarshalable < BasicObject attr_reader :foo def initialize(foo) @foo = foo end def marshal_dump [foo] end def mashal_load(data) @foo = data[0] end def init_with(coder) @foo = coder['foo'] end def encode_with(coder) coder['foo'] = 2 end def respond_to?(method) [:marshal_dump, :marshal_load, :init_with, :encode_with].include?(method) end def class PsychCustomMarshalable end end def test_init_with_takes_priority_over_marshal_methods obj = PsychCustomMarshalable.new(1) loaded = Psych.load(Psych.dump(obj)) assert(PsychCustomMarshalable === loaded) assert_equal(2, loaded.foo) end end end psych-2.2.4/test/psych/test_merge_keys.rb000066400000000000000000000065031305404671600205020ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestMergeKeys < TestCase class Product attr_reader :bar end def test_merge_key_with_bare_hash doc = Psych.load <<-eodoc map: <<: hello: world eodoc hash = { "map" => { "hello" => "world" } } assert_equal hash, doc end def test_roundtrip_with_chevron_key h = {} v = { 'a' => h, '<<' => h } assert_cycle v end def test_explicit_string doc = Psych.load <<-eoyml a: &me { hello: world } b: { !!str '<<': *me } eoyml expected = { "a" => { "hello" => "world" }, "b" => { "<<" => { "hello" => "world" } } } assert_equal expected, doc end def test_mergekey_with_object s = <<-eoyml foo: &foo bar: 10 product: !ruby/object:#{Product.name} <<: *foo eoyml hash = Psych.load s assert_equal({"bar" => 10}, hash["foo"]) product = hash["product"] assert_equal 10, product.bar end def test_merge_nil yaml = <<-eoyml defaults: &defaults development: <<: *defaults eoyml assert_equal({'<<' => nil }, Psych.load(yaml)['development']) end def test_merge_array yaml = <<-eoyml foo: &hello - 1 baz: <<: *hello eoyml assert_equal({'<<' => [1]}, Psych.load(yaml)['baz']) end def test_merge_is_not_partial yaml = <<-eoyml default: &default hello: world foo: &hello - 1 baz: <<: [*hello, *default] eoyml doc = Psych.load yaml refute doc['baz'].key? 'hello' assert_equal({'<<' => [[1], {"hello"=>"world"}]}, Psych.load(yaml)['baz']) end def test_merge_seq_nil yaml = <<-eoyml foo: &hello baz: <<: [*hello] eoyml assert_equal({'<<' => [nil]}, Psych.load(yaml)['baz']) end def test_bad_seq_merge yaml = <<-eoyml defaults: &defaults [1, 2, 3] development: <<: *defaults eoyml assert_equal({'<<' => [1,2,3]}, Psych.load(yaml)['development']) end def test_missing_merge_key yaml = <<-eoyml bar: << : *foo eoyml exp = assert_raises(Psych::BadAlias) { Psych.load yaml } assert_match 'foo', exp.message end # [ruby-core:34679] def test_merge_key yaml = <<-eoyml foo: &foo hello: world bar: << : *foo baz: boo eoyml hash = { "foo" => { "hello" => "world"}, "bar" => { "hello" => "world", "baz" => "boo" } } assert_equal hash, Psych.load(yaml) end def test_multiple_maps yaml = <<-eoyaml --- - &CENTER { x: 1, y: 2 } - &LEFT { x: 0, y: 2 } - &BIG { r: 10 } - &SMALL { r: 1 } # All the following maps are equal: - # Merge multiple maps << : [ *CENTER, *BIG ] label: center/big eoyaml hash = { 'x' => 1, 'y' => 2, 'r' => 10, 'label' => 'center/big' } assert_equal hash, Psych.load(yaml)[4] end def test_override yaml = <<-eoyaml --- - &CENTER { x: 1, y: 2 } - &LEFT { x: 0, y: 2 } - &BIG { r: 10 } - &SMALL { r: 1 } # All the following maps are equal: - # Override << : [ *BIG, *LEFT, *SMALL ] x: 1 label: center/big eoyaml hash = { 'x' => 1, 'y' => 2, 'r' => 10, 'label' => 'center/big' } assert_equal hash, Psych.load(yaml)[4] end end end psych-2.2.4/test/psych/test_nil.rb000066400000000000000000000005741305404671600171340ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestNil < TestCase def test_nil yml = Psych.dump nil assert_match(/--- \n(?:\.\.\.\n)?/, yml) assert_nil Psych.load(yml) end def test_array_nil yml = Psych.dump [nil] assert_equal "---\n- \n", yml assert_equal [nil], Psych.load(yml) end end end psych-2.2.4/test/psych/test_null.rb000066400000000000000000000004611305404671600173170ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych ### # Test null from YAML spec: # http://yaml.org/type/null.html class TestNull < TestCase def test_null_list assert_equal [nil] * 5, Psych.load(<<-eoyml) --- - ~ - null - - Null - NULL eoyml end end end psych-2.2.4/test/psych/test_numeric.rb000066400000000000000000000017341305404671600200130ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' require 'bigdecimal' module Psych ### # Test numerics from YAML spec: # http://yaml.org/type/float.html # http://yaml.org/type/int.html class TestNumeric < TestCase def setup @old_debug = $DEBUG $DEBUG = true end def teardown $DEBUG = @old_debug end def test_load_float_with_dot assert_equal 1.0, Psych.load('--- 1.') end def test_non_float_with_0 str = Psych.load('--- 090') assert_equal '090', str end def test_big_decimal_tag decimal = BigDecimal("12.34") assert_match "!ruby/object:BigDecimal", Psych.dump(decimal) end def test_big_decimal_round_trip decimal = BigDecimal("12.34") assert_cycle decimal end def test_does_not_attempt_numeric str = Psych.load('--- 4 roses') assert_equal '4 roses', str str = Psych.load('--- 1.1.1') assert_equal '1.1.1', str end end end psych-2.2.4/test/psych/test_object.rb000066400000000000000000000014611305404671600176140ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class Tagged yaml_tag '!foo' attr_accessor :baz def initialize @baz = 'bar' end end class Foo attr_accessor :parent def initialize parent @parent = parent end end class TestObject < TestCase def test_dump_with_tag tag = Tagged.new assert_match('foo', Psych.dump(tag)) end def test_tag_round_trip tag = Tagged.new tag2 = Psych.load(Psych.dump(tag)) assert_equal tag.baz, tag2.baz assert_instance_of(Tagged, tag2) end def test_cyclic_references foo = Foo.new(nil) foo.parent = foo loaded = Psych.load Psych.dump foo assert_instance_of(Foo, loaded) assert_equal loaded, loaded.parent end end end psych-2.2.4/test/psych/test_object_references.rb000066400000000000000000000031121305404671600220100ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestObjectReferences < TestCase def test_range_has_references assert_reference_trip 1..2 end def test_module_has_references assert_reference_trip Psych end def test_class_has_references assert_reference_trip TestObjectReferences end def test_rational_has_references assert_reference_trip Rational('1.2') end def test_complex_has_references assert_reference_trip Complex(1, 2) end def test_datetime_has_references assert_reference_trip DateTime.now end def test_struct_has_references assert_reference_trip Struct.new(:foo).new(1) end def assert_reference_trip obj yml = Psych.dump([obj, obj]) assert_match(/\*-?\d+/, yml) data = Psych.load yml assert_equal data.first.object_id, data.last.object_id end def test_float_references data = Psych.load <<-eoyml ---\s - &name 1.2 - *name eoyml assert_equal data.first, data.last assert_equal data.first.object_id, data.last.object_id end def test_binary_references data = Psych.load <<-eoyml --- - &name !binary |- aGVsbG8gd29ybGQh - *name eoyml assert_equal data.first, data.last assert_equal data.first.object_id, data.last.object_id end def test_regexp_references data = Psych.load <<-eoyml ---\s - &name !ruby/regexp /pattern/i - *name eoyml assert_equal data.first, data.last assert_equal data.first.object_id, data.last.object_id end end end psych-2.2.4/test/psych/test_omap.rb000066400000000000000000000031641305404671600173040ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestOmap < TestCase def test_parse_as_map o = Psych.load "--- !!omap\na: 1\nb: 2" assert_kind_of Psych::Omap, o assert_equal 1, o['a'] assert_equal 2, o['b'] end def test_self_referential map = Psych::Omap.new map['foo'] = 'bar' map['self'] = map assert_equal(map, Psych.load(Psych.dump(map))) end def test_keys map = Psych::Omap.new map['foo'] = 'bar' assert_equal 'bar', map['foo'] end def test_order map = Psych::Omap.new map['a'] = 'b' map['b'] = 'c' assert_equal [%w{a b}, %w{b c}], map.to_a end def test_square list = [["a", "b"], ["b", "c"]] map = Psych::Omap[*list.flatten] assert_equal list, map.to_a assert_equal 'b', map['a'] assert_equal 'c', map['b'] end def test_dump map = Psych::Omap['a', 'b', 'c', 'd'] yaml = Psych.dump(map) assert_match('!omap', yaml) assert_match('- a: b', yaml) assert_match('- c: d', yaml) end def test_round_trip list = [["a", "b"], ["b", "c"]] map = Psych::Omap[*list.flatten] assert_cycle(map) end def test_load list = [["a", "b"], ["c", "d"]] map = Psych.load(<<-eoyml) --- !omap - a: b - c: d eoyml assert_equal list, map.to_a end # NOTE: This test will not work with Syck def test_load_shorthand list = [["a", "b"], ["c", "d"]] map = Psych.load(<<-eoyml) --- !!omap - a: b - c: d eoyml assert_equal list, map.to_a end end end psych-2.2.4/test/psych/test_parser.rb000066400000000000000000000213601305404671600176420ustar00rootroot00000000000000# coding: utf-8 # frozen_string_literal: false require_relative 'helper' module Psych class TestParser < TestCase class EventCatcher < Handler attr_accessor :parser attr_reader :calls, :marks def initialize @parser = nil @calls = [] @marks = [] end (Handler.instance_methods(true) - Object.instance_methods).each do |m| class_eval %{ def #{m} *args super @marks << @parser.mark if @parser @calls << [:#{m}, args] end } end end def setup super @handler = EventCatcher.new @parser = Psych::Parser.new @handler @handler.parser = @parser end def test_ast_roundtrip parser = Psych.parser parser.parse('null') ast = parser.handler.root assert_match(/^null/, ast.yaml) end def test_exception_memory_leak yaml = <<-eoyaml %YAML 1.1 %TAG ! tag:tenderlovemaking.com,2009: --- &ponies - first element - *ponies - foo: bar ... eoyaml [:start_stream, :start_document, :end_document, :alias, :scalar, :start_sequence, :end_sequence, :start_mapping, :end_mapping, :end_stream].each do |method| klass = Class.new(Psych::Handler) do define_method(method) do |*args| raise end end parser = Psych::Parser.new klass.new 2.times { assert_raises(RuntimeError, method.to_s) do parser.parse yaml end } end end def test_multiparse 3.times do @parser.parse '--- foo' end end def test_filename ex = assert_raises(Psych::SyntaxError) do @parser.parse '--- `', 'omg!' end assert_match 'omg!', ex.message end def test_line_numbers assert_equal 0, @parser.mark.line @parser.parse "---\n- hello\n- world" line_calls = @handler.marks.map(&:line).zip(@handler.calls.map(&:first)) assert_equal [[0, :start_stream], [0, :start_document], [1, :start_sequence], [2, :scalar], [3, :scalar], [3, :end_sequence], [3, :end_document], [3, :end_stream]], line_calls assert_equal 3, @parser.mark.line end def test_column_numbers assert_equal 0, @parser.mark.column @parser.parse "---\n- hello\n- world" col_calls = @handler.marks.map(&:column).zip(@handler.calls.map(&:first)) assert_equal [[0, :start_stream], [3, :start_document], [1, :start_sequence], [0, :scalar], [0, :scalar], [0, :end_sequence], [0, :end_document], [0, :end_stream]], col_calls assert_equal 0, @parser.mark.column end def test_index_numbers assert_equal 0, @parser.mark.index @parser.parse "---\n- hello\n- world" idx_calls = @handler.marks.map(&:index).zip(@handler.calls.map(&:first)) assert_equal [[0, :start_stream], [3, :start_document], [5, :start_sequence], [12, :scalar], [19, :scalar], [19, :end_sequence], [19, :end_document], [19, :end_stream]], idx_calls assert_equal 19, @parser.mark.index end def test_bom tadpole = 'おたまじゃくし' # BOM + text yml = "\uFEFF#{tadpole}".encode('UTF-16LE') @parser.parse yml assert_equal tadpole, @parser.handler.calls[2][1].first end def test_external_encoding tadpole = 'おたまじゃくし' @parser.external_encoding = Psych::Parser::UTF16LE @parser.parse tadpole.encode 'UTF-16LE' assert_equal tadpole, @parser.handler.calls[2][1].first end def test_bogus_io o = Object.new def o.external_encoding; nil end def o.read len; self end assert_raises(TypeError) do @parser.parse o end end def test_parse_io @parser.parse StringIO.new("--- a") assert_called :start_stream assert_called :scalar assert_called :end_stream end def test_syntax_error assert_raises(Psych::SyntaxError) do @parser.parse("---\n\"foo\"\n\"bar\"\n") end end def test_syntax_error_twice assert_raises(Psych::SyntaxError) do @parser.parse("---\n\"foo\"\n\"bar\"\n") end assert_raises(Psych::SyntaxError) do @parser.parse("---\n\"foo\"\n\"bar\"\n") end end def test_syntax_error_has_path_for_string e = assert_raises(Psych::SyntaxError) do @parser.parse("---\n\"foo\"\n\"bar\"\n") end assert_match '():', e.message end def test_syntax_error_has_path_for_io io = StringIO.new "---\n\"foo\"\n\"bar\"\n" def io.path; "hello!"; end e = assert_raises(Psych::SyntaxError) do @parser.parse(io) end assert_match "(#{io.path}):", e.message end def test_mapping_end @parser.parse("---\n!!map { key: value }") assert_called :end_mapping end def test_mapping_tag @parser.parse("---\n!!map { key: value }") assert_called :start_mapping, ["tag:yaml.org,2002:map", false, Nodes::Mapping::FLOW] end def test_mapping_anchor @parser.parse("---\n&A { key: value }") assert_called :start_mapping, ['A', true, Nodes::Mapping::FLOW] end def test_mapping_block @parser.parse("---\n key: value") assert_called :start_mapping, [true, Nodes::Mapping::BLOCK] end def test_mapping_start @parser.parse("---\n{ key: value }") assert_called :start_mapping assert_called :start_mapping, [true, Nodes::Mapping::FLOW] end def test_sequence_end @parser.parse("---\n&A [1, 2]") assert_called :end_sequence end def test_sequence_start_anchor @parser.parse("---\n&A [1, 2]") assert_called :start_sequence, ["A", true, Nodes::Sequence::FLOW] end def test_sequence_start_tag @parser.parse("---\n!!seq [1, 2]") assert_called :start_sequence, ["tag:yaml.org,2002:seq", false, Nodes::Sequence::FLOW] end def test_sequence_start_flow @parser.parse("---\n[1, 2]") assert_called :start_sequence, [true, Nodes::Sequence::FLOW] end def test_sequence_start_block @parser.parse("---\n - 1\n - 2") assert_called :start_sequence, [true, Nodes::Sequence::BLOCK] end def test_literal_scalar @parser.parse(<<-eoyml) %YAML 1.1 --- "literal\n\ \ttext\n" eoyml assert_called :scalar, ['literal text ', false, true, Nodes::Scalar::DOUBLE_QUOTED] end def test_scalar @parser.parse("--- foo\n") assert_called :scalar, ['foo', true, false, Nodes::Scalar::PLAIN] end def test_scalar_with_tag @parser.parse("---\n!!str foo\n") assert_called :scalar, ['foo', 'tag:yaml.org,2002:str', false, false, Nodes::Scalar::PLAIN] end def test_scalar_with_anchor @parser.parse("---\n&A foo\n") assert_called :scalar, ['foo', 'A', true, false, Nodes::Scalar::PLAIN] end def test_scalar_plain_implicit @parser.parse("---\n&A foo\n") assert_called :scalar, ['foo', 'A', true, false, Nodes::Scalar::PLAIN] end def test_alias @parser.parse(<<-eoyml) %YAML 1.1 --- !!seq [ !!str "Without properties", &A !!str "Anchored", !!str "Tagged", *A, !!str "", ] eoyml assert_called :alias, ['A'] end def test_end_stream @parser.parse("--- foo\n") assert_called :end_stream end def test_start_stream @parser.parse("--- foo\n") assert_called :start_stream end def test_end_document_implicit @parser.parse("\"foo\"\n") assert_called :end_document, [true] end def test_end_document_explicit @parser.parse("\"foo\"\n...") assert_called :end_document, [false] end def test_start_document_version @parser.parse("%YAML 1.1\n---\n\"foo\"\n") assert_called :start_document, [[1,1], [], false] end def test_start_document_tag @parser.parse("%TAG !yaml! tag:yaml.org,2002\n---\n!yaml!str \"foo\"\n") assert_called :start_document, [[], [['!yaml!', 'tag:yaml.org,2002']], false] end def assert_called call, with = nil, parser = @parser if with call = parser.handler.calls.find { |x| x.first == call && x.last.compact == with } assert(call, "#{[call,with].inspect} not in #{parser.handler.calls.inspect}" ) else assert parser.handler.calls.any? { |x| x.first == call } end end end end psych-2.2.4/test/psych/test_psych.rb000066400000000000000000000106631305404671600175000ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' require 'stringio' require 'tempfile' class TestPsych < Psych::TestCase def teardown Psych.domain_types.clear end def test_line_width_invalid assert_raises(ArgumentError) { Psych.dump('x', { :line_width => -2 }) } end def test_line_width_no_limit data = { 'a' => 'a b' * 50} expected = "---\na: #{'a b' * 50}\n" assert_equal(expected, Psych.dump(data, { :line_width => -1 })) end def test_line_width_limit yml = Psych.dump('123456 7', { :line_width => 5 }) assert_match(/^\s*7/, yml) end def test_indent yml = Psych.dump({:a => {'b' => 'c'}}, {:indentation => 5}) assert_match(/^[ ]{5}b/, yml) end def test_canonical yml = Psych.dump({:a => {'b' => 'c'}}, {:canonical => true}) assert_match(/\? "b/, yml) end def test_header yml = Psych.dump({:a => {'b' => 'c'}}, {:header => true}) assert_match(/YAML/, yml) end def test_version_array yml = Psych.dump({:a => {'b' => 'c'}}, {:version => [1,1]}) assert_match(/1.1/, yml) end def test_version_string yml = Psych.dump({:a => {'b' => 'c'}}, {:version => '1.1'}) assert_match(/1.1/, yml) end def test_version_bool yml = Psych.dump({:a => {'b' => 'c'}}, {:version => true}) assert_match(/1.1/, yml) end def test_load_argument_error assert_raises(TypeError) do Psych.load nil end end def test_non_existing_class_on_deserialize e = assert_raises(ArgumentError) do Psych.load("--- !ruby/object:NonExistent\nfoo: 1") end assert_equal 'undefined class/module NonExistent', e.message end def test_dump_stream things = [22, "foo \n", {}] stream = Psych.dump_stream(*things) assert_equal things, Psych.load_stream(stream) end def test_dump_file hash = {'hello' => 'TGIF!'} Tempfile.create('fun.yml') do |io| assert_equal io, Psych.dump(hash, io) io.rewind assert_equal Psych.dump(hash), io.read end end def test_dump_io hash = {'hello' => 'TGIF!'} stringio = StringIO.new '' assert_equal stringio, Psych.dump(hash, stringio) assert_equal Psych.dump(hash), stringio.string end def test_simple assert_equal 'foo', Psych.load("--- foo\n") end def test_libyaml_version assert Psych.libyaml_version assert_equal Psych.libyaml_version.join('.'), Psych::LIBYAML_VERSION end def test_load_documents docs = Psych.load_documents("--- foo\n...\n--- bar\n...") assert_equal %w{ foo bar }, docs end def test_parse_stream docs = Psych.parse_stream("--- foo\n...\n--- bar\n...") assert_equal %w{ foo bar }, docs.children.map { |x| x.transform } end def test_add_builtin_type got = nil Psych.add_builtin_type 'omap' do |type, val| got = val end Psych.load('--- !!omap hello') assert_equal 'hello', got ensure Psych.remove_type 'omap' end def test_domain_types got = nil Psych.add_domain_type 'foo.bar,2002', 'foo' do |type, val| got = val end Psych.load('--- !foo.bar,2002/foo hello') assert_equal 'hello', got Psych.load("--- !foo.bar,2002/foo\n- hello\n- world") assert_equal %w{ hello world }, got Psych.load("--- !foo.bar,2002/foo\nhello: world") assert_equal({ 'hello' => 'world' }, got) end def test_load_file Tempfile.create(['yikes', 'yml']) {|t| t.binmode t.write('--- hello world') t.close assert_equal 'hello world', Psych.load_file(t.path) } end def test_load_file_with_fallback Tempfile.create(['empty', 'yml']) {|t| assert_equal Hash.new, Psych.load_file(t.path, Hash.new) } end def test_parse_file Tempfile.create(['yikes', 'yml']) {|t| t.binmode t.write('--- hello world') t.close assert_equal 'hello world', Psych.parse_file(t.path).transform } end def test_degenerate_strings assert_equal false, Psych.load(' ') assert_equal false, Psych.parse(' ') assert_equal false, Psych.load('') assert_equal false, Psych.parse('') end def test_callbacks types = [] appender = lambda { |*args| types << args } Psych.add_builtin_type('foo', &appender) Psych.add_domain_type('example.com,2002', 'foo', &appender) Psych.load <<-eoyml - !tag:yaml.org,2002:foo bar - !tag:example.com,2002:foo bar eoyml assert_equal [ ["tag:yaml.org,2002:foo", "bar"], ["tag:example.com,2002:foo", "bar"] ], types end end psych-2.2.4/test/psych/test_safe_load.rb000066400000000000000000000044611305404671600202660ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/helper' module Psych class TestSafeLoad < TestCase class Foo; end [1, 2.2, {}, [], "foo"].each do |obj| define_method(:"test_basic_#{obj.class}") do assert_safe_cycle obj end end def test_no_recursion x = [] x << x assert_raises(Psych::BadAlias) do Psych.safe_load Psych.dump(x) end end def test_explicit_recursion x = [] x << x assert_equal(x, Psych.safe_load(Psych.dump(x), [], [], true)) end def test_symbol_whitelist yml = Psych.dump :foo assert_raises(Psych::DisallowedClass) do Psych.safe_load yml end assert_equal(:foo, Psych.safe_load(yml, [Symbol], [:foo])) end def test_symbol assert_raises(Psych::DisallowedClass) do assert_safe_cycle :foo end assert_raises(Psych::DisallowedClass) do Psych.safe_load '--- !ruby/symbol foo', [] end assert_safe_cycle :foo, [Symbol] assert_safe_cycle :foo, %w{ Symbol } assert_equal :foo, Psych.safe_load('--- !ruby/symbol foo', [Symbol]) end def test_foo assert_raises(Psych::DisallowedClass) do Psych.safe_load '--- !ruby/object:Foo {}', [Foo] end assert_raises(Psych::DisallowedClass) do assert_safe_cycle Foo.new end assert_kind_of(Foo, Psych.safe_load(Psych.dump(Foo.new), [Foo])) end X = Struct.new(:x) def test_struct_depends_on_sym assert_safe_cycle(X.new, [X, Symbol]) assert_raises(Psych::DisallowedClass) do cycle X.new, [X] end end def test_anon_struct assert Psych.safe_load(<<-eoyml, [Struct, Symbol]) --- !ruby/struct foo: bar eoyml assert_raises(Psych::DisallowedClass) do Psych.safe_load(<<-eoyml, [Struct]) --- !ruby/struct foo: bar eoyml end assert_raises(Psych::DisallowedClass) do Psych.safe_load(<<-eoyml, [Symbol]) --- !ruby/struct foo: bar eoyml end end private def cycle object, whitelist = [] Psych.safe_load(Psych.dump(object), whitelist) end def assert_safe_cycle object, whitelist = [] other = cycle object, whitelist assert_equal object, other end end end psych-2.2.4/test/psych/test_scalar.rb000066400000000000000000000003371305404671600176140ustar00rootroot00000000000000# -*- coding: utf-8 -*- # frozen_string_literal: false require_relative 'helper' module Psych class TestScalar < TestCase def test_utf_8 assert_equal "日本語", Psych.load("--- 日本語") end end end psych-2.2.4/test/psych/test_scalar_scanner.rb000066400000000000000000000053151305404671600213260ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' require 'date' module Psych class TestScalarScanner < TestCase attr_reader :ss def setup super @ss = Psych::ScalarScanner.new ClassLoader.new end def test_scan_time { '2001-12-15T02:59:43.1Z' => Time.utc(2001, 12, 15, 02, 59, 43, 100000), '2001-12-14t21:59:43.10-05:00' => Time.utc(2001, 12, 15, 02, 59, 43, 100000), '2001-12-14 21:59:43.10 -5' => Time.utc(2001, 12, 15, 02, 59, 43, 100000), '2001-12-15 2:59:43.10' => Time.utc(2001, 12, 15, 02, 59, 43, 100000), '2011-02-24 11:17:06 -0800' => Time.utc(2011, 02, 24, 19, 17, 06) }.each do |time_str, time| assert_equal time, @ss.tokenize(time_str) end end def test_scan_bad_time [ '2001-12-15T02:59:73.1Z', '2001-12-14t90:59:43.10-05:00', '2001-92-14 21:59:43.10 -5', '2001-12-15 92:59:43.10', '2011-02-24 81:17:06 -0800', ].each do |time_str| assert_equal time_str, @ss.tokenize(time_str) end end def test_scan_bad_dates x = '2000-15-01' assert_equal x, @ss.tokenize(x) x = '2000-10-51' assert_equal x, @ss.tokenize(x) x = '2000-10-32' assert_equal x, @ss.tokenize(x) end def test_scan_good_edge_date x = '2000-1-31' assert_equal Date.strptime(x, '%Y-%m-%d'), @ss.tokenize(x) end def test_scan_bad_edge_date x = '2000-11-31' assert_equal x, @ss.tokenize(x) end def test_scan_date date = '1980-12-16' token = @ss.tokenize date assert_equal 1980, token.year assert_equal 12, token.month assert_equal 16, token.day end def test_scan_inf assert_equal(1 / 0.0, ss.tokenize('.inf')) end def test_scan_minus_inf assert_equal(-1 / 0.0, ss.tokenize('-.inf')) end def test_scan_nan assert ss.tokenize('.nan').nan? end def test_scan_float_with_exponent_but_no_fraction assert_equal(0.0, ss.tokenize('0.E+0')) end def test_scan_null assert_equal nil, ss.tokenize('null') assert_equal nil, ss.tokenize('~') assert_equal nil, ss.tokenize('') end def test_scan_symbol assert_equal :foo, ss.tokenize(':foo') end def test_scan_sexagesimal_float assert_equal 685230.15, ss.tokenize('190:20:30.15') end def test_scan_sexagesimal_int assert_equal 685230, ss.tokenize('190:20:30') end def test_scan_float assert_equal 1.2, ss.tokenize('1.2') end def test_scan_true assert_equal true, ss.tokenize('true') end def test_scan_strings_starting_with_underscores assert_equal "_100", ss.tokenize('_100') end end end psych-2.2.4/test/psych/test_serialize_subclasses.rb000066400000000000000000000015051305404671600225630ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestSerializeSubclasses < TestCase class SomeObject def initialize one, two @one = one @two = two end def == other @one == other.instance_eval { @one } && @two == other.instance_eval { @two } end end def test_some_object so = SomeObject.new('foo', [1,2,3]) assert_equal so, Psych.load(Psych.dump(so)) end class StructSubclass < Struct.new(:foo) def initialize foo, bar super(foo) @bar = bar end def == other super(other) && @bar == other.instance_eval{ @bar } end end def test_struct_subclass so = StructSubclass.new('foo', [1,2,3]) assert_equal so, Psych.load(Psych.dump(so)) end end end psych-2.2.4/test/psych/test_set.rb000066400000000000000000000016201305404671600171360ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestSet < TestCase def setup super @set = Psych::Set.new @set['foo'] = 'bar' @set['bar'] = 'baz' end def test_dump assert_match(/!set/, Psych.dump(@set)) end def test_roundtrip assert_cycle(@set) end ### # FIXME: Syck should also support !!set as shorthand def test_load_from_yaml loaded = Psych.load(<<-eoyml) --- !set foo: bar bar: baz eoyml assert_equal(@set, loaded) end def test_loaded_class assert_instance_of(Psych::Set, Psych.load(Psych.dump(@set))) end def test_set_shorthand loaded = Psych.load(<<-eoyml) --- !!set foo: bar bar: baz eoyml assert_instance_of(Psych::Set, loaded) end def test_set_self_reference @set['self'] = @set assert_cycle(@set) end end end psych-2.2.4/test/psych/test_stream.rb000066400000000000000000000045051305404671600176430ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestStream < TestCase def test_parse_partial rb = Psych.parse("--- foo\n...\n--- `").to_ruby assert_equal 'foo', rb end def test_load_partial rb = Psych.load("--- foo\n...\n--- `") assert_equal 'foo', rb end def test_parse_stream_yields_documents list = [] Psych.parse_stream("--- foo\n...\n--- bar") do |doc| list << doc.to_ruby end assert_equal %w{ foo bar }, list end def test_parse_stream_break list = [] Psych.parse_stream("--- foo\n...\n--- `") do |doc| list << doc.to_ruby break end assert_equal %w{ foo }, list end def test_load_stream_yields_documents list = [] Psych.load_stream("--- foo\n...\n--- bar") do |ruby| list << ruby end assert_equal %w{ foo bar }, list end def test_load_stream_break list = [] Psych.load_stream("--- foo\n...\n--- `") do |ruby| list << ruby break end assert_equal %w{ foo }, list end def test_explicit_documents io = StringIO.new stream = Psych::Stream.new(io) stream.start stream.push({ 'foo' => 'bar' }) assert !stream.finished?, 'stream not finished' stream.finish assert stream.finished?, 'stream finished' assert_match(/^---/, io.string) assert_match(/\.\.\.$/, io.string) end def test_start_takes_block io = StringIO.new stream = Psych::Stream.new(io) stream.start do |emitter| emitter.push({ 'foo' => 'bar' }) end assert stream.finished?, 'stream finished' assert_match(/^---/, io.string) assert_match(/\.\.\.$/, io.string) end def test_no_backreferences io = StringIO.new stream = Psych::Stream.new(io) stream.start do |emitter| x = { 'foo' => 'bar' } emitter.push x emitter.push x end assert stream.finished?, 'stream finished' assert_match(/^---/, io.string) assert_match(/\.\.\.$/, io.string) assert_equal 2, io.string.scan('---').length assert_equal 2, io.string.scan('...').length assert_equal 2, io.string.scan('foo').length assert_equal 2, io.string.scan('bar').length end end end psych-2.2.4/test/psych/test_string.rb000066400000000000000000000137261305404671600176630ustar00rootroot00000000000000# encoding: UTF-8 # frozen_string_literal: false require_relative 'helper' module Psych class TestString < TestCase class X < String end class Y < String attr_accessor :val end class Z < String def initialize force_encoding Encoding::US_ASCII end end def test_string_with_newline assert_equal "1\n2", Psych.load("--- ! '1\n\n 2'\n") end def test_no_doublequotes_with_special_characters assert_equal 2, Psych.dump(%Q{<%= ENV["PATH"] %>}).count('"') end def test_no_quotes_when_start_with_non_ascii_character yaml = Psych.dump 'Český non-ASCII'.encode(Encoding::UTF_8) assert_match(/---\s*[^"'!]+$/, yaml) end def test_doublequotes_when_there_is_a_single str = "@123'abc" yaml = Psych.dump str assert_match /---\s*"/, yaml assert_equal str, Psych.load(yaml) end def test_plain_when_shorten_than_line_width_and_no_final_line_break str = "Lorem ipsum" yaml = Psych.dump str, line_width: 12 assert_match /---\s*[^>|]+\n/, yaml assert_equal str, Psych.load(yaml) end def test_plain_when_shorten_than_line_width_and_with_final_line_break str = "Lorem ipsum\n" yaml = Psych.dump str, line_width: 12 assert_match /---\s*[^>|]+\n/, yaml assert_equal str, Psych.load(yaml) end def test_folded_when_longer_than_line_width_and_with_final_line_break str = "Lorem ipsum dolor sit\n" yaml = Psych.dump str, line_width: 12 assert_match /---\s*>\n(.*\n){2}\Z/, yaml assert_equal str, Psych.load(yaml) end # http://yaml.org/spec/1.2/2009-07-21/spec.html#id2593651 def test_folded_strip_when_longer_than_line_width_and_no_newlines str = "Lorem ipsum dolor sit amet, consectetur" yaml = Psych.dump str, line_width: 12 assert_match /---\s*>-\n(.*\n){3}\Z/, yaml assert_equal str, Psych.load(yaml) end def test_literal_when_inner_and_final_line_break [ "Lorem ipsum\ndolor\n", "Lorem ipsum\nZolor\n", ].each do |str| yaml = Psych.dump str, line_width: 12 assert_match /---\s*\|\n(.*\n){2}\Z/, yaml assert_equal str, Psych.load(yaml) end end # http://yaml.org/spec/1.2/2009-07-21/spec.html#id2593651 def test_literal_strip_when_inner_line_break_and_no_final_line_break [ "Lorem ipsum\ndolor", "Lorem ipsum\nZolor", ].each do |str| yaml = Psych.dump str, line_width: 12 assert_match /---\s*\|-\n(.*\n){2}\Z/, yaml assert_equal str, Psych.load(yaml) end end def test_cycle_x str = X.new 'abc' assert_cycle str end def test_dash_dot assert_cycle '-.' assert_cycle '+.' end def test_float_with_no_fractional_before_exponent assert_cycle '0.E+0' end def test_string_subclass_with_anchor y = Psych.load <<-eoyml --- body: string: &70121654388580 !ruby/string str: ! 'foo' x: body: *70121654388580 eoyml assert_equal({"body"=>{"string"=>"foo", "x"=>{"body"=>"foo"}}}, y) end def test_self_referential_string y = Psych.load <<-eoyml --- string: &70121654388580 !ruby/string str: ! 'foo' body: *70121654388580 eoyml assert_equal({"string"=>"foo"}, y) value = y['string'] assert_equal value, value.instance_variable_get(:@body) end def test_another_subclass_with_attributes y = Psych.load Psych.dump Y.new("foo").tap {|y| y.val = 1} assert_equal "foo", y assert_equal Y, y.class assert_equal 1, y.val end def test_backwards_with_syck x = Psych.load "--- !str:#{X.name} foo\n\n" assert_equal X, x.class assert_equal 'foo', x end def test_empty_subclass assert_match "!ruby/string:#{X}", Psych.dump(X.new) x = Psych.load Psych.dump X.new assert_equal X, x.class end def test_empty_character_subclass assert_match "!ruby/string:#{Z}", Psych.dump(Z.new) x = Psych.load Psych.dump Z.new assert_equal Z, x.class end def test_subclass_with_attributes y = Psych.load Psych.dump Y.new.tap {|y| y.val = 1} assert_equal Y, y.class assert_equal 1, y.val end def test_string_with_base_60 yaml = Psych.dump '01:03:05' assert_match "'01:03:05'", yaml assert_equal '01:03:05', Psych.load(yaml) end def test_nonascii_string_as_binary string = "hello \x80 world!" string.force_encoding 'ascii-8bit' yml = Psych.dump string assert_match(/binary/, yml) assert_equal string, Psych.load(yml) end def test_binary_string_null string = "\x00" yml = Psych.dump string assert_match(/binary/, yml) assert_equal string, Psych.load(yml) end def test_binary_string string = binary_string yml = Psych.dump string assert_match(/binary/, yml) assert_equal string, Psych.load(yml) end def test_non_binary_string string = binary_string(0.29) yml = Psych.dump string refute_match(/binary/, yml) assert_equal string, Psych.load(yml) end def test_ascii_only_8bit_string string = "abc".encode(Encoding::ASCII_8BIT) yml = Psych.dump string refute_match(/binary/, yml) assert_equal string, Psych.load(yml) end def test_string_with_ivars food = "is delicious" ivar = "on rock and roll" food.instance_variable_set(:@we_built_this_city, ivar) Psych.load Psych.dump food assert_equal ivar, food.instance_variable_get(:@we_built_this_city) end def test_binary string = [0, 123,22, 44, 9, 32, 34, 39].pack('C*') assert_cycle string end def test_float_confusion assert_cycle '1.' end def binary_string percentage = 0.31, length = 100 string = '' (percentage * length).to_i.times do |i| string << "\b" end string << 'a' * (length - string.length) string end end end psych-2.2.4/test/psych/test_struct.rb000066400000000000000000000017541305404671600176770ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' class PsychStructWithIvar < Struct.new(:foo) attr_reader :bar def initialize *args super @bar = 'hello' end end module Psych class TestStruct < TestCase class StructSubclass < Struct.new(:foo) def initialize foo, bar super(foo) @bar = bar end end def test_self_referential_struct ss = StructSubclass.new(nil, 'foo') ss.foo = ss loaded = Psych.load(Psych.dump(ss)) assert_instance_of(StructSubclass, loaded.foo) assert_equal(ss, loaded) end def test_roundtrip thing = PsychStructWithIvar.new('bar') struct = Psych.load(Psych.dump(thing)) assert_equal 'hello', struct.bar assert_equal 'bar', struct.foo end def test_load obj = Psych.load(<<-eoyml) --- !ruby/struct:PsychStructWithIvar :foo: bar :@bar: hello eoyml assert_equal 'hello', obj.bar assert_equal 'bar', obj.foo end end end psych-2.2.4/test/psych/test_symbol.rb000066400000000000000000000006351305404671600176550ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestSymbol < TestCase def test_cycle_empty assert_cycle :'' end def test_cycle_colon assert_cycle :':' end def test_cycle assert_cycle :a end def test_stringy assert_cycle :"1" end def test_load_quoted assert_equal :"1", Psych.load("--- :'1'\n") end end end psych-2.2.4/test/psych/test_tainted.rb000066400000000000000000000055151305404671600200020ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestStringTainted < TestCase class Tainted < Handler attr_reader :tc def initialize tc @tc = tc end def start_document version, tags, implicit tags.flatten.each do |tag| assert_taintedness tag end end def alias name assert_taintedness name end def scalar value, anchor, tag, plain, quoted, style assert_taintedness value assert_taintedness tag if tag assert_taintedness anchor if anchor end def start_sequence anchor, tag, implicit, style assert_taintedness tag if tag assert_taintedness anchor if anchor end def start_mapping anchor, tag, implicit, style assert_taintedness tag if tag assert_taintedness anchor if anchor end def assert_taintedness thing, message = "'#{thing}' should be tainted" tc.assert thing.tainted?, message end end class Untainted < Tainted def assert_taintedness thing, message = "'#{thing}' should not be tainted" tc.assert !thing.tainted?, message end end def setup handler = Tainted.new self @parser = Psych::Parser.new handler end def test_tags_are_tainted assert_taintedness "%TAG !yaml! tag:yaml.org,2002:\n---\n!yaml!str \"foo\"" end def test_alias assert_taintedness "--- &ponies\n- foo\n- *ponies" end def test_scalar assert_taintedness "--- ponies" end def test_anchor assert_taintedness "--- &hi ponies" end def test_scalar_tag assert_taintedness "--- !str ponies" end def test_seq_start_tag assert_taintedness "--- !!seq [ a ]" end def test_seq_start_anchor assert_taintedness "--- &zomg [ a ]" end def test_seq_mapping_tag assert_taintedness "--- !!map { a: b }" end def test_seq_mapping_anchor assert_taintedness "--- &himom { a: b }" end def assert_taintedness string @parser.parse string.taint end end class TestStringUntainted < TestStringTainted def setup handler = Untainted.new self @parser = Psych::Parser.new handler end def assert_taintedness string @parser.parse string end end class TestStringIOUntainted < TestStringTainted def setup handler = Untainted.new self @parser = Psych::Parser.new handler end def assert_taintedness string @parser.parse StringIO.new(string) end end class TestIOTainted < TestStringTainted def assert_taintedness string Tempfile.create(['something', 'yml']) {|t| t.binmode t.write string t.close File.open(t.path, 'r:bom|utf-8') { |f| @parser.parse f } } end end end psych-2.2.4/test/psych/test_to_yaml_properties.rb000066400000000000000000000026171305404671600222720ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestToYamlProperties < Psych::TestCase class Foo attr_accessor :a, :b, :c def initialize @a = 1 @b = 2 @c = 3 end def to_yaml_properties [:@a, :@b] end end def test_object_dump_yaml_properties foo = Psych.load(Psych.dump(Foo.new)) assert_equal 1, foo.a assert_equal 2, foo.b assert_nil foo.c end class Bar < Struct.new(:foo, :bar) attr_reader :baz def initialize *args super @baz = 'hello' end def to_yaml_properties [] end end def test_struct_dump_yaml_properties bar = Psych.load(Psych.dump(Bar.new('a', 'b'))) assert_equal 'a', bar.foo assert_equal 'b', bar.bar assert_nil bar.baz end def test_string_dump string = "okonomiyaki" class << string def to_yaml_properties [:@tastes] end end string.instance_variable_set(:@tastes, 'delicious') v = Psych.load Psych.dump string assert_equal 'delicious', v.instance_variable_get(:@tastes) end def test_string_load_syck str = Psych.load("--- !str \nstr: okonomiyaki\n:@tastes: delicious\n") assert_equal 'okonomiyaki', str assert_equal 'delicious', str.instance_variable_get(:@tastes) end end end psych-2.2.4/test/psych/test_tree_builder.rb000066400000000000000000000035411305404671600210140ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' module Psych class TestTreeBuilder < TestCase def setup super @parser = Psych::Parser.new TreeBuilder.new @parser.parse(<<-eoyml) %YAML 1.1 --- - foo - { bar : &A !!str baz, boo : *A } - *A eoyml @tree = @parser.handler.root end def test_stream assert_instance_of Nodes::Stream, @tree end def test_documents assert_equal 1, @tree.children.length assert_instance_of Nodes::Document, @tree.children.first doc = @tree.children.first assert_equal [1,1], doc.version assert_equal [], doc.tag_directives assert_equal false, doc.implicit end def test_sequence doc = @tree.children.first assert_equal 1, doc.children.length seq = doc.children.first assert_instance_of Nodes::Sequence, seq assert_nil seq.anchor assert_nil seq.tag assert_equal true, seq.implicit assert_equal Nodes::Sequence::BLOCK, seq.style end def test_scalar doc = @tree.children.first seq = doc.children.first assert_equal 3, seq.children.length scalar = seq.children.first assert_instance_of Nodes::Scalar, scalar assert_equal 'foo', scalar.value assert_nil scalar.anchor assert_nil scalar.tag assert_equal true, scalar.plain assert_equal false, scalar.quoted assert_equal Nodes::Scalar::PLAIN, scalar.style end def test_mapping doc = @tree.children.first seq = doc.children.first map = seq.children[1] assert_instance_of Nodes::Mapping, map end def test_alias doc = @tree.children.first seq = doc.children.first assert_equal 3, seq.children.length al = seq.children[2] assert_instance_of Nodes::Alias, al assert_equal 'A', al.anchor end end end psych-2.2.4/test/psych/test_yaml.rb000066400000000000000000001006741305404671600173160ustar00rootroot00000000000000# -*- coding: us-ascii; mode: ruby; ruby-indent-level: 4; tab-width: 4 -*- # frozen_string_literal: false # vim:sw=4:ts=4 # $Id$ # require_relative 'helper' require 'ostruct' # [ruby-core:01946] module Psych_Tests StructTest = Struct::new( :c ) end class Psych_Unit_Tests < Psych::TestCase def teardown Psych.domain_types.clear end def test_y_method assert_raises(NoMethodError) do OpenStruct.new.y 1 end end def test_syck_compat time = Time.utc(2010, 10, 10) yaml = Psych.dump time assert_match "2010-10-10 00:00:00.000000000 Z", yaml end def test_multiline_regexp assert_cycle(Regexp.new("foo\nbar")) end # [ruby-core:34969] def test_regexp_with_n assert_cycle(Regexp.new('',0,'n')) end # # Tests modified from 00basic.t in Psych.pm # def test_basic_map # Simple map assert_parse_only( { 'one' => 'foo', 'three' => 'baz', 'two' => 'bar' }, < 'simple string', 2 => 42, 3 => '1 Single Quoted String', 4 => 'Psych\'s Double "Quoted" String', 5 => "A block\n with several\n lines.\n", 6 => "A \"chomped\" block", 7 => "A folded\n string\n", 8 => ": started string" }, < A folded string 8: ": started string" EOY ) end # # Test the specification examples # - Many examples have been changes because of whitespace problems that # caused the two to be inequivalent, or keys to be sorted wrong # def test_spec_simple_implicit_sequence # Simple implicit sequence assert_to_yaml( [ 'Mark McGwire', 'Sammy Sosa', 'Ken Griffey' ], < 65, 'avg' => 0.278, 'rbi' => 147 }, < [ 'Boston Red Sox', 'Detroit Tigers', 'New York Yankees' ], 'national' => [ 'New York Mets', 'Chicago Cubs', 'Atlanta Braves' ] }, < 'Mark McGwire', 'hr' => 65, 'avg' => 0.278}, {'name' => 'Sammy Sosa', 'hr' => 63, 'avg' => 0.288} ], < { 'hr' => 65, 'avg' => 0.278 }, 'Sammy Sosa' => { 'hr' => 63, 'avg' => 0.288 } }, < [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] }, < [ 'Mark McGwire', 'Sammy Sosa' ], 'rbi' => [ 'Sammy Sosa', 'Ken Griffey' ] }, <"EDI", "departure"=>"LAX", "fareref"=>"DOGMA", "currency"=>"GBP"}, {"arrival"=>"MEL", "departure"=>"SYD", "fareref"=>"MADF", "currency"=>"AUD"}, {"arrival"=>"MCO", "departure"=>"JFK", "fareref"=>"DFSF", "currency"=>"USD"}], <["fareref", "currency", "departure", "arrival"], "FARES"=>[{"arrival"=>"EDI", "departure"=>"LAX", "fareref"=>"DOGMA", "currency"=>"GBP"}, {"arrival"=>"MEL", "departure"=>"SYD", "fareref"=>"MADF", "currency"=>"AUD"}, {"arrival"=>"MCO", "departure"=>"JFK", "fareref"=>"DFSF", "currency"=>"USD"}]}, < [ Date.new( 2001, 7, 23 ) ], [ 'New York Yankees', 'Atlanta Braves' ] => [ Date.new( 2001, 7, 2 ), Date.new( 2001, 8, 12 ), Date.new( 2001, 8, 14 ) ] }, < [ Date.new( 2001, 7, 2 ), Date.new( 2001, 8, 12 ), Date.new( 2001, 8, 14 ) ], [ 'Detroit Tigers', 'Chicago Cubs' ] => [ Date.new( 2001, 7, 23 ) ] }, < 34843, 'date' => Date.new( 2001, 1, 23 ), 'bill-to' => 'Chris Dumars', 'product' => [ { 'item' => 'Super Hoop', 'quantity' => 1 }, { 'item' => 'Basketball', 'quantity' => 4 }, { 'item' => 'Big Shoes', 'quantity' => 1 } ] }, < nil }, [ { 'five' => [ 'six' ] } ], [ 'seven' ] ], [ 'eight', 'nine' ] ], < Mark McGwire\'s year was crippled by a knee injury. EOY ) end def test_spec_preserve_indent # Preserve indented spaces assert_parse_only( "Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n", < Sammy Sosa completed another fine season with great stats. 63 Home Runs 0.288 Batting Average What a year! EOY ) end def test_spec_indentation_determines_scope assert_parse_only( { 'name' => 'Mark McGwire', 'accomplishment' => "Mark set a major league home run record in 1998.\n", 'stats' => "65 Home Runs\n0.278 Batting Average\n" }, < Mark set a major league home run record in 1998. stats: | 65 Home Runs 0.278 Batting Average EOY ) end def test_spec_multiline_scalars # Multiline flow scalars assert_parse_only( { 'plain' => 'This unquoted scalar spans many lines.', 'quoted' => "So does this quoted scalar.\n" }, < 12345, 'decimal' => 12345, 'octal' => '014'.oct, 'hexadecimal' => '0xC'.hex }, < 685230, 'decimal' => 685230, 'octal' => 02472256, 'hexadecimal' => 0x0A74AE, 'sexagesimal' => 685230 }, < 1230.15, 'exponential' => 1230.15, 'fixed' => 1230.15, 'negative infinity' => -1.0/0.0 }, < nil, true => true, false => false, 'string' => '12345' }, < 'Chris', 'family' => 'Dumars', 'address' => { 'lines' => "458 Walkman Dr.\nSuite #292\n", 'city' => 'Royal Oak', 'state' => 'MI', 'postal' => 48046 } } assert_parse_only( { 'invoice' => 34843, 'date' => Date.new( 2001, 1, 23 ), 'bill-to' => id001, 'ship-to' => id001, 'product' => [ { 'sku' => 'BL394D', 'quantity' => 4, 'description' => 'Basketball', 'price' => 450.00 }, { 'sku' => 'BL4438H', 'quantity' => 1, 'description' => 'Super Hoop', 'price' => 2392.00 } ], 'tax' => 251.42, 'total' => 4443.52, 'comments' => "Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.\n" }, < Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338. EOY ) end def test_spec_log_file doc_ct = 0 Psych::load_documents( < This is an error message for the log file --- Time: 2001-11-23 15:02:31 -05:00 User: ed Warning: > A slightly different error message. --- Date: 2001-11-23 15:03:17 -05:00 User: ed Fatal: > Unknown variable "bar" Stack: - file: TopClass.py line: 23 code: | x = MoreObject("345\\n") - file: MoreClass.py line: 58 code: |- foo = bar EOY ) { |doc| case doc_ct when 0 assert_equal( doc, { 'Time' => mktime( 2001, 11, 23, 15, 01, 42, 00, "-05:00" ), 'User' => 'ed', 'Warning' => "This is an error message for the log file\n" } ) when 1 assert_equal( doc, { 'Time' => mktime( 2001, 11, 23, 15, 02, 31, 00, "-05:00" ), 'User' => 'ed', 'Warning' => "A slightly different error message.\n" } ) when 2 assert_equal( doc, { 'Date' => mktime( 2001, 11, 23, 15, 03, 17, 00, "-05:00" ), 'User' => 'ed', 'Fatal' => "Unknown variable \"bar\"\n", 'Stack' => [ { 'file' => 'TopClass.py', 'line' => 23, 'code' => "x = MoreObject(\"345\\n\")\n" }, { 'file' => 'MoreClass.py', 'line' => 58, 'code' => "foo = bar" } ] } ) end doc_ct += 1 } assert_equal( doc_ct, 3 ) end def test_spec_root_fold y = Psych::load( < 34843, 'date' => Date.new( 2001, 1, 23 ), 'total' => 4443.52 } ) end def test_spec_oneline_docs doc_ct = 0 Psych::load_documents( < { "customers"=> [ { "given"=>"Chris", "type"=>"domain customer", "family"=>"Dumars" } ], "type"=>"domain invoice" } }, <"contains three lines of text.\nThe third one starts with a\n# character. This isn't a comment.\n"}, < 12, 'also int' => 12, 'string' => '12' }, < [ 'ONE: value', 'ONE: value' ], 'different' => [ 'TWO: value' ] }, < 'This scalar has an anchor.', 'override' => a001, 'alias' => a001 }, < The alias node below is a repeated use of this value. alias : *A001 EOY ) end def test_spec_explicit_families Psych.add_domain_type( "somewhere.com,2002", 'type' ) { |type, val| "SOMEWHERE: #{val}" } assert_parse_only( { 'not-date' => '2002-04-28', 'picture' => "GIF89a\f\000\f\000\204\000\000\377\377\367\365\365\356\351\351\345fff\000\000\000\347\347\347^^^\363\363\355\216\216\216\340\340\340\237\237\237\223\223\223\247\247\247\236\236\236i^\020' \202\n\001\000;", 'hmm' => "SOMEWHERE: family above is short for\nhttp://somewhere.com/type\n" }, <7, "center"=>{"x"=>73, "y"=>129}, "TYPE"=>"Shape: graph/circle"}, {"finish"=>{"x"=>89, "y"=>102}, "TYPE"=>"Shape: graph/line", "start"=>{"x"=>73, "y"=>129}}, {"TYPE"=>"Shape: graph/text", "value"=>"Pretty vector drawing.", "start"=>{"x"=>73, "y"=>129}, "color"=>16772795}, "Shape Container"]], < [], 'in-line' => [ 'one', 'two', 'three', 'four', 'five' ], 'nested' => [ 'First item in top sequence', [ 'Subordinate sequence entry' ], "A multi-line sequence entry\n", 'Sixth item in top sequence' ] }, < A multi-line sequence entry - Sixth item in top sequence EOY ) end def test_spec_builtin_map # Assortment of mappings assert_parse_only( { 'empty' => {}, 'in-line' => { 'one' => 1, 'two' => 2 }, 'spanning' => { 'one' => 1, 'two' => 2 }, 'nested' => { 'first' => 'First entry', 'second' => { 'key' => 'Subordinate mapping' }, 'third' => [ 'Subordinate sequence', {}, 'Previous mapping is empty.', { 'A key' => 'value pair in a sequence.', 'A second' => 'key:value pair.' }, 'The previous entry is equal to the following one.', { 'A key' => 'value pair in a sequence.', 'A second' => 'key:value pair.' } ], 12.0 => 'This key is a float.', "?\n" => 'This key had to be protected.', "\a" => 'This key had to be escaped.', "This is a multi-line folded key\n" => "Whose value is also multi-line.\n", [ 'This key', 'is a sequence' ] => [ 'With a sequence value.' ] } }, < ? : This key had to be protected. "\\a" : This key had to be escaped. ? > This is a multi-line folded key : > Whose value is also multi-line. ? - This key - is a sequence : - With a sequence value. # The following parses correctly, # but Ruby 1.6.* fails the comparison! # ? # This: key # is a: mapping # : # with a: mapping value. EOY ) end def test_spec_builtin_literal_blocks # Assortment of literal scalar blocks assert_parse_only( {"both are equal to"=>" This has no newline.", "is equal to"=>"The \\ ' \" characters may be\nfreely used. Leading white\n space is significant.\n\nLine breaks are significant.\nThus this value contains one\nempty line and ends with a\nsingle line break, but does\nnot start with one.\n", "also written as"=>" This has no newline.", "indented and chomped"=>" This has no newline.", "empty"=>"", "literal"=>"The \\ ' \" characters may be\nfreely used. Leading white\n space is significant.\n\nLine breaks are significant.\nThus this value contains one\nempty line and ends with a\nsingle line break, but does\nnot start with one.\n"}, < str1, 'same as "clipped" above' => str1, 'stripped' => str2, 'same as "stripped" above' => str2, 'kept' => str3, 'same as "kept" above' => str3 }, <"a single quote ' must be escaped.", "second"=>"! : \\ etc. can be used freely.", "is same as"=>"this contains six spaces\nand one line break", "empty"=>"", "span"=>"this contains six spaces\nand one line break"}, <"this contains four spaces", "third"=>"a \" or a \\ must be escaped.", "second"=>"! : etc. can be used freely.", "empty"=>"", "fourth"=>"this value ends with an LF.\n", "span"=>"this contains four spaces"}, < mktime( 2001, 12, 14, 21, 59, 43, ".10", "-05:00" ), "canonical" => mktime( 2001, 12, 15, 2, 59, 43, ".10" ), "date (noon UTC)" => Date.new( 2002, 12, 14), "valid iso8601" => mktime( 2001, 12, 14, 21, 59, 43, ".10", "-05:00" ) }, < arrow_gif, 'base64' => arrow_gif, 'description' => "The binary value above is a tiny arrow encoded as a gif image.\n" }, < /George McFly/i }, < 2, :UseVersion => 0 ) # y.add( # { 'hi' => 'hello', 'map' => # { 'good' => 'two' }, # 'time' => Time.now, # 'try' => /^po(.*)$/, # 'bye' => 'goodbye' # } # ) # y.add( { 'po' => 'nil', 'oper' => 90 } ) # y.add( { 'hi' => 'wow!', 'bye' => 'wow!' } ) # y.add( { [ 'Red Socks', 'Boston' ] => [ 'One', 'Two', 'Three' ] } ) # y.add( [ true, false, false ] ) #end # # Test YPath choices parsing # #def test_ypath_parsing # assert_path_segments( "/*/((one|three)/name|place)|//place", # [ ["*", "one", "name"], # ["*", "three", "name"], # ["*", "place"], # ["/", "place"] ] # ) #end # # Tests from Tanaka Akira on [ruby-core] # def test_akira # Commas in plain scalars [ruby-core:1066] assert_to_yaml( {"A"=>"A,","B"=>"B"}, <2, "2"=>3}, <"b"}] * 2, <"b", "c"=>"d"} } # Psych::load( a.to_yaml ) end # # Test Time.now cycle # def test_time_now_cycle # # From Minero Aoki [ruby-core:2305] # #require 'yaml' t = Time.now t = Time.at(t.tv_sec, t.tv_usec) 5.times do assert_cycle(t) end end # # Test Range cycle # def test_range_cycle # # From Minero Aoki [ruby-core:02306] # assert_cycle("a".."z") # # From Nobu Nakada [ruby-core:02311] # assert_cycle(0..1) assert_cycle(1.0e20 .. 2.0e20) assert_cycle("0".."1") assert_cycle(".."..."...") assert_cycle(".rb"..".pl") assert_cycle(".rb"...".pl") assert_cycle('"'...".") assert_cycle("'"...".") end # # Circular references # def test_circular_references a = []; a[0] = a; a[1] = a inspect_str = "[[...], [...]]" assert_equal( inspect_str, Psych::load(Psych.dump(a)).inspect ) end # # Test Symbol cycle # def test_symbol_cycle # # From Aaron Schrab [ruby-Bugs:2535] # assert_cycle(:"^foo") end # # Test Numeric cycle # class NumericTest < Numeric def initialize(value) @value = value end def ==(other) @value == other.instance_eval{ @value } end end def test_numeric_cycle assert_cycle(1) # Fixnum assert_cycle(111111111111111111111111111111111) # Bignum assert_cycle(NumericTest.new(3)) # Subclass of Numeric end # # Test empty map/seq in map cycle # def test_empty_map_key # # empty seq as key # assert_cycle({[]=>""}) # # empty map as key # assert_cycle({{}=>""}) end # # contributed by riley lynch [ruby-Bugs-8548] # def test_object_id_collision omap = Psych::Omap.new 1000.times { |i| omap["key_#{i}"] = { "value" => i } } raise "id collision in ordered map" if Psych.dump(omap) =~ /id\d+/ end def test_date_out_of_range Psych::load('1900-01-01T00:00:00+00:00') end def test_normal_exit Psych.load("2000-01-01 00:00:00.#{"0"*1000} +00:00\n") # '[ruby-core:13735]' end def test_multiline_string_uses_literal_style yaml = Psych.dump("multi\nline\nstring") assert_match("|", yaml) end def test_string_starting_with_non_word_character_uses_double_quotes_without_exclamation_mark yaml = Psych.dump("@123'abc") refute_match("!", yaml) end def test_string_dump_with_colon yaml = Psych.dump 'x: foo' refute_match '!', yaml end def test_string_dump_starting_with_star yaml = Psych.dump '*foo' refute_match '!', yaml end end psych-2.2.4/test/psych/test_yamldbm.rb000066400000000000000000000117731305404671600200020ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' require 'tmpdir' begin require 'yaml/dbm' rescue LoadError end module Psych ::Psych::DBM = ::YAML::DBM unless defined?(::Psych::DBM) class YAMLDBMTest < TestCase def setup @dir = Dir.mktmpdir("rubytest-file") File.chown(-1, Process.gid, @dir) @yamldbm_file = make_tmp_filename("yamldbm") @yamldbm = YAML::DBM.new(@yamldbm_file) end def teardown @yamldbm.clear @yamldbm.close FileUtils.remove_entry_secure @dir end def make_tmp_filename(prefix) @dir + "/" + prefix + File.basename(__FILE__) + ".#{$$}.test" end def test_store @yamldbm.store('a','b') @yamldbm.store('c','d') assert_equal 'b', @yamldbm['a'] assert_equal 'd', @yamldbm['c'] assert_nil @yamldbm['e'] end def test_store_using_carret @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal 'b', @yamldbm['a'] assert_equal 'd', @yamldbm['c'] assert_nil @yamldbm['e'] end def test_to_a @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal([['a','b'],['c','d']], @yamldbm.to_a.sort) end def test_to_hash @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal({'a'=>'b','c'=>'d'}, @yamldbm.to_hash) end def test_has_value? @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal true, @yamldbm.has_value?('b') assert_equal true, @yamldbm.has_value?('d') assert_equal false, @yamldbm.has_value?('f') end # Note: # YAML::DBM#index makes warning from internal of ::DBM#index. # It says 'DBM#index is deprecated; use DBM#key', but DBM#key # behaves not same as DBM#index. # # def test_index # @yamldbm['a'] = 'b' # @yamldbm['c'] = 'd' # assert_equal 'a', @yamldbm.index('b') # assert_equal 'c', @yamldbm.index('d') # assert_nil @yamldbm.index('f') # end def test_key @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal 'a', @yamldbm.key('b') assert_equal 'c', @yamldbm.key('d') assert_nil @yamldbm.key('f') end def test_fetch assert_equal('bar', @yamldbm['foo']='bar') assert_equal('bar', @yamldbm.fetch('foo')) assert_nil @yamldbm.fetch('bar') assert_equal('baz', @yamldbm.fetch('bar', 'baz')) assert_equal('foobar', @yamldbm.fetch('bar') {|key| 'foo' + key }) end def test_shift @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal([['a','b'], ['c','d']], [@yamldbm.shift, @yamldbm.shift].sort) assert_nil @yamldbm.shift end def test_invert @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal({'b'=>'a','d'=>'c'}, @yamldbm.invert) end def test_update @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' @yamldbm.update({'c'=>'d','e'=>'f'}) assert_equal 'b', @yamldbm['a'] assert_equal 'd', @yamldbm['c'] assert_equal 'f', @yamldbm['e'] end def test_replace @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' @yamldbm.replace({'c'=>'d','e'=>'f'}) assert_nil @yamldbm['a'] assert_equal 'd', @yamldbm['c'] assert_equal 'f', @yamldbm['e'] end def test_delete @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal 'b', @yamldbm.delete('a') assert_nil @yamldbm['a'] assert_equal 'd', @yamldbm['c'] assert_nil @yamldbm.delete('e') end def test_delete_if @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' @yamldbm['e'] = 'f' @yamldbm.delete_if {|k,v| k == 'a'} assert_nil @yamldbm['a'] assert_equal 'd', @yamldbm['c'] assert_equal 'f', @yamldbm['e'] @yamldbm.delete_if {|k,v| v == 'd'} assert_nil @yamldbm['c'] assert_equal 'f', @yamldbm['e'] @yamldbm.delete_if {|k,v| false } assert_equal 'f', @yamldbm['e'] end def test_reject @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' @yamldbm['e'] = 'f' assert_equal({'c'=>'d','e'=>'f'}, @yamldbm.reject {|k,v| k == 'a'}) assert_equal({'a'=>'b','e'=>'f'}, @yamldbm.reject {|k,v| v == 'd'}) assert_equal({'a'=>'b','c'=>'d','e'=>'f'}, @yamldbm.reject {false}) end def test_values assert_equal [], @yamldbm.values @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal ['b','d'], @yamldbm.values.sort end def test_values_at @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' assert_equal ['b','d'], @yamldbm.values_at('a','c') end def test_selsct @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' @yamldbm['e'] = 'f' assert_equal(['b','d'], @yamldbm.select('a','c')) end def test_selsct_with_block @yamldbm['a'] = 'b' @yamldbm['c'] = 'd' @yamldbm['e'] = 'f' assert_equal([['a','b']], @yamldbm.select {|k,v| k == 'a'}) assert_equal([['c','d']], @yamldbm.select {|k,v| v == 'd'}) assert_equal([], @yamldbm.select {false}) end end end if defined?(YAML::DBM) && defined?(Psych) psych-2.2.4/test/psych/test_yamlstore.rb000066400000000000000000000043001305404671600203600ustar00rootroot00000000000000# frozen_string_literal: false require_relative 'helper' require 'yaml/store' require 'tmpdir' module Psych Psych::Store = YAML::Store unless defined?(Psych::Store) class YAMLStoreTest < TestCase def setup @dir = Dir.mktmpdir("rubytest-file") File.chown(-1, Process.gid, @dir) @yamlstore_file = make_tmp_filename("yamlstore") @yamlstore = YAML::Store.new(@yamlstore_file) end def teardown FileUtils.remove_entry_secure @dir end def make_tmp_filename(prefix) @dir + "/" + prefix + File.basename(__FILE__) + ".#{$$}.test" end def test_opening_new_file_in_readonly_mode_should_result_in_empty_values @yamlstore.transaction(true) do assert_nil @yamlstore[:foo] assert_nil @yamlstore[:bar] end end def test_opening_new_file_in_readwrite_mode_should_result_in_empty_values @yamlstore.transaction do assert_nil @yamlstore[:foo] assert_nil @yamlstore[:bar] end end def test_data_should_be_loaded_correctly_when_in_readonly_mode @yamlstore.transaction do @yamlstore[:foo] = "bar" end @yamlstore.transaction(true) do assert_equal "bar", @yamlstore[:foo] end end def test_data_should_be_loaded_correctly_when_in_readwrite_mode @yamlstore.transaction do @yamlstore[:foo] = "bar" end @yamlstore.transaction do assert_equal "bar", @yamlstore[:foo] end end def test_changes_after_commit_are_discarded @yamlstore.transaction do @yamlstore[:foo] = "bar" @yamlstore.commit @yamlstore[:foo] = "baz" end @yamlstore.transaction(true) do assert_equal "bar", @yamlstore[:foo] end end def test_changes_are_not_written_on_abort @yamlstore.transaction do @yamlstore[:foo] = "bar" @yamlstore.abort end @yamlstore.transaction(true) do assert_nil @yamlstore[:foo] end end def test_writing_inside_readonly_transaction_raises_error assert_raises(PStore::Error) do @yamlstore.transaction(true) do @yamlstore[:foo] = "bar" end end end end end if defined?(Psych) psych-2.2.4/test/psych/visitors/000077500000000000000000000000001305404671600166425ustar00rootroot00000000000000psych-2.2.4/test/psych/visitors/test_depth_first.rb000066400000000000000000000023261305404671600225440ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/helper' module Psych module Visitors class TestDepthFirst < TestCase class Collector < Struct.new(:calls) def initialize(calls = []) super end def call obj calls << obj end end def test_scalar collector = Collector.new visitor = Visitors::DepthFirst.new collector visitor.accept Psych.parse_stream '--- hello' assert_equal 3, collector.calls.length end def test_sequence collector = Collector.new visitor = Visitors::DepthFirst.new collector visitor.accept Psych.parse_stream "---\n- hello" assert_equal 4, collector.calls.length end def test_mapping collector = Collector.new visitor = Visitors::DepthFirst.new collector visitor.accept Psych.parse_stream "---\nhello: world" assert_equal 5, collector.calls.length end def test_alias collector = Collector.new visitor = Visitors::DepthFirst.new collector visitor.accept Psych.parse_stream "--- &yay\n- foo\n- *yay\n" assert_equal 5, collector.calls.length end end end end psych-2.2.4/test/psych/visitors/test_emitter.rb000066400000000000000000000072231305404671600217030ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/helper' module Psych module Visitors class TestEmitter < TestCase def setup super @io = StringIO.new @visitor = Visitors::Emitter.new @io end def test_options io = StringIO.new visitor = Visitors::Emitter.new io, :indentation => 3 s = Nodes::Stream.new doc = Nodes::Document.new mapping = Nodes::Mapping.new m2 = Nodes::Mapping.new m2.children << Nodes::Scalar.new('a') m2.children << Nodes::Scalar.new('b') mapping.children << Nodes::Scalar.new('key') mapping.children << m2 doc.children << mapping s.children << doc visitor.accept s assert_match(/^[ ]{3}a/, io.string) end def test_stream s = Nodes::Stream.new @visitor.accept s assert_equal '', @io.string end def test_document s = Nodes::Stream.new doc = Nodes::Document.new [1,1] scalar = Nodes::Scalar.new 'hello world' doc.children << scalar s.children << doc @visitor.accept s assert_match(/1.1/, @io.string) assert_equal @io.string, s.yaml end def test_document_implicit_end s = Nodes::Stream.new doc = Nodes::Document.new mapping = Nodes::Mapping.new mapping.children << Nodes::Scalar.new('key') mapping.children << Nodes::Scalar.new('value') doc.children << mapping s.children << doc @visitor.accept s assert_match(/key: value/, @io.string) assert_equal @io.string, s.yaml assert(/\.\.\./ !~ s.yaml) end def test_scalar s = Nodes::Stream.new doc = Nodes::Document.new scalar = Nodes::Scalar.new 'hello world' doc.children << scalar s.children << doc @visitor.accept s assert_match(/hello/, @io.string) assert_equal @io.string, s.yaml end def test_scalar_with_tag s = Nodes::Stream.new doc = Nodes::Document.new scalar = Nodes::Scalar.new 'hello world', nil, '!str', false, false, 5 doc.children << scalar s.children << doc @visitor.accept s assert_match(/str/, @io.string) assert_match(/hello/, @io.string) assert_equal @io.string, s.yaml end def test_sequence s = Nodes::Stream.new doc = Nodes::Document.new scalar = Nodes::Scalar.new 'hello world' seq = Nodes::Sequence.new seq.children << scalar doc.children << seq s.children << doc @visitor.accept s assert_match(/- hello/, @io.string) assert_equal @io.string, s.yaml end def test_mapping s = Nodes::Stream.new doc = Nodes::Document.new mapping = Nodes::Mapping.new mapping.children << Nodes::Scalar.new('key') mapping.children << Nodes::Scalar.new('value') doc.children << mapping s.children << doc @visitor.accept s assert_match(/key: value/, @io.string) assert_equal @io.string, s.yaml end def test_alias s = Nodes::Stream.new doc = Nodes::Document.new mapping = Nodes::Mapping.new mapping.children << Nodes::Scalar.new('key', 'A') mapping.children << Nodes::Alias.new('A') doc.children << mapping s.children << doc @visitor.accept s assert_match(/&A key: \*A/, @io.string) assert_equal @io.string, s.yaml end end end end psych-2.2.4/test/psych/visitors/test_to_ruby.rb000066400000000000000000000247111305404671600217160ustar00rootroot00000000000000# coding: US-ASCII # frozen_string_literal: false require 'psych/helper' module Psych module Visitors class TestToRuby < TestCase def setup super @visitor = ToRuby.create end def test_object mapping = Nodes::Mapping.new nil, "!ruby/object" mapping.children << Nodes::Scalar.new('foo') mapping.children << Nodes::Scalar.new('bar') o = mapping.to_ruby assert_equal 'bar', o.instance_variable_get(:@foo) end def test_tz_00_00_loads_without_error assert Psych.load('1900-01-01T00:00:00+00:00') end def test_legacy_struct foo = Struct.new('AWESOME', :bar) assert_equal foo.new('baz'), Psych.load(<<-eoyml) !ruby/struct:AWESOME bar: baz eoyml end def test_binary gif = "GIF89a\f\x00\f\x00\x84\x00\x00\xFF\xFF\xF7\xF5\xF5\xEE\xE9\xE9\xE5fff\x00\x00\x00\xE7\xE7\xE7^^^\xF3\xF3\xED\x8E\x8E\x8E\xE0\xE0\xE0\x9F\x9F\x9F\x93\x93\x93\xA7\xA7\xA7\x9E\x9E\x9Eiiiccc\xA3\xA3\xA3\x84\x84\x84\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9!\xFE\x0EMade with GIMP\x00,\x00\x00\x00\x00\f\x00\f\x00\x00\x05, \x8E\x810\x9E\xE3@\x14\xE8i\x10\xC4\xD1\x8A\b\x1C\xCF\x80M$z\xEF\xFF0\x85p\xB8\xB01f\r\e\xCE\x01\xC3\x01\x1E\x10' \x82\n\x01\x00;" hash = Psych.load(<<-'eoyaml') canonical: !!binary "\ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=" generic: !binary | R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5 OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs= description: The binary value above is a tiny arrow encoded as a gif image. eoyaml assert_equal gif, hash['canonical'] assert_equal gif, hash['generic'] end A = Struct.new(:foo) def test_struct s = A.new('bar') mapping = Nodes::Mapping.new nil, "!ruby/struct:#{s.class}" mapping.children << Nodes::Scalar.new('foo') mapping.children << Nodes::Scalar.new('bar') ruby = mapping.to_ruby assert_equal s.class, ruby.class assert_equal s.foo, ruby.foo assert_equal s, ruby end def test_anon_struct_legacy s = Struct.new(:foo).new('bar') mapping = Nodes::Mapping.new nil, '!ruby/struct:' mapping.children << Nodes::Scalar.new('foo') mapping.children << Nodes::Scalar.new('bar') assert_equal s.foo, mapping.to_ruby.foo end def test_anon_struct s = Struct.new(:foo).new('bar') mapping = Nodes::Mapping.new nil, '!ruby/struct' mapping.children << Nodes::Scalar.new('foo') mapping.children << Nodes::Scalar.new('bar') assert_equal s.foo, mapping.to_ruby.foo end def test_exception exc = ::Exception.new 'hello' mapping = Nodes::Mapping.new nil, '!ruby/exception' mapping.children << Nodes::Scalar.new('message') mapping.children << Nodes::Scalar.new('hello') ruby = mapping.to_ruby assert_equal exc.class, ruby.class assert_equal exc.message, ruby.message end def test_regexp node = Nodes::Scalar.new('/foo/', nil, '!ruby/regexp') assert_equal(/foo/, node.to_ruby) node = Nodes::Scalar.new('/foo/m', nil, '!ruby/regexp') assert_equal(/foo/m, node.to_ruby) node = Nodes::Scalar.new('/foo/ix', nil, '!ruby/regexp') assert_equal(/foo/ix, node.to_ruby) end def test_time now = Time.now zone = now.strftime('%z') zone = " #{zone[0,3]}:#{zone[3,5]}" formatted = now.strftime("%Y-%m-%d %H:%M:%S.%9N") + zone assert_equal now, Nodes::Scalar.new(formatted).to_ruby end def test_time_utc now = Time.now.utc formatted = now.strftime("%Y-%m-%d %H:%M:%S") + ".%09dZ" % [now.nsec] assert_equal now, Nodes::Scalar.new(formatted).to_ruby end def test_time_utc_no_z now = Time.now.utc formatted = now.strftime("%Y-%m-%d %H:%M:%S") + ".%09d" % [now.nsec] assert_equal now, Nodes::Scalar.new(formatted).to_ruby end def test_date d = '1980-12-16' actual = Date.strptime(d, '%Y-%m-%d') date = Nodes::Scalar.new(d, nil, 'tag:yaml.org,2002:timestamp', false) assert_equal actual, date.to_ruby end def test_rational mapping = Nodes::Mapping.new nil, '!ruby/object:Rational' mapping.children << Nodes::Scalar.new('denominator') mapping.children << Nodes::Scalar.new('2') mapping.children << Nodes::Scalar.new('numerator') mapping.children << Nodes::Scalar.new('1') assert_equal Rational(1,2), mapping.to_ruby end def test_complex mapping = Nodes::Mapping.new nil, '!ruby/object:Complex' mapping.children << Nodes::Scalar.new('image') mapping.children << Nodes::Scalar.new('2') mapping.children << Nodes::Scalar.new('real') mapping.children << Nodes::Scalar.new('1') assert_equal Complex(1,2), mapping.to_ruby end def test_complex_string node = Nodes::Scalar.new '3+4i', nil, "!ruby/object:Complex" assert_equal Complex(3, 4), node.to_ruby end def test_rational_string node = Nodes::Scalar.new '1/2', nil, "!ruby/object:Rational" assert_equal Rational(1, 2), node.to_ruby end def test_range_string node = Nodes::Scalar.new '1..2', nil, "!ruby/range" assert_equal 1..2, node.to_ruby end def test_range_string_triple node = Nodes::Scalar.new '1...3', nil, "!ruby/range" assert_equal 1...3, node.to_ruby end def test_integer i = Nodes::Scalar.new('1', nil, 'tag:yaml.org,2002:int') assert_equal 1, i.to_ruby assert_equal 1, Nodes::Scalar.new('1').to_ruby i = Nodes::Scalar.new('-1', nil, 'tag:yaml.org,2002:int') assert_equal(-1, i.to_ruby) assert_equal(-1, Nodes::Scalar.new('-1').to_ruby) assert_equal 1, Nodes::Scalar.new('+1').to_ruby end def test_int_ignore ['1,000', '1_000'].each do |num| i = Nodes::Scalar.new(num, nil, 'tag:yaml.org,2002:int') assert_equal 1000, i.to_ruby assert_equal 1000, Nodes::Scalar.new(num).to_ruby end end def test_float_ignore ['1,000.3', '1_000.3'].each do |num| i = Nodes::Scalar.new(num, nil, 'tag:yaml.org,2002:float') assert_equal 1000.3, i.to_ruby i = Nodes::Scalar.new(num, nil, '!float') assert_equal 1000.3, i.to_ruby assert_equal 1000.3, Nodes::Scalar.new(num).to_ruby end end # http://yaml.org/type/bool.html def test_boolean_true %w{ yes Yes YES true True TRUE on On ON }.each do |t| i = Nodes::Scalar.new(t, nil, 'tag:yaml.org,2002:bool') assert_equal true, i.to_ruby assert_equal true, Nodes::Scalar.new(t).to_ruby end end # http://yaml.org/type/bool.html def test_boolean_false %w{ no No NO false False FALSE off Off OFF }.each do |t| i = Nodes::Scalar.new(t, nil, 'tag:yaml.org,2002:bool') assert_equal false, i.to_ruby assert_equal false, Nodes::Scalar.new(t).to_ruby end end def test_float i = Nodes::Scalar.new('12', nil, 'tag:yaml.org,2002:float') assert_equal 12.0, i.to_ruby i = Nodes::Scalar.new('1.2', nil, 'tag:yaml.org,2002:float') assert_equal 1.2, i.to_ruby i = Nodes::Scalar.new('1.2') assert_equal 1.2, i.to_ruby assert_equal 1, Nodes::Scalar.new('.Inf').to_ruby.infinite? assert_equal 1, Nodes::Scalar.new('.inf').to_ruby.infinite? assert_equal 1, Nodes::Scalar.new('.Inf', nil, 'tag:yaml.org,2002:float').to_ruby.infinite? assert_equal(-1, Nodes::Scalar.new('-.inf').to_ruby.infinite?) assert_equal(-1, Nodes::Scalar.new('-.Inf').to_ruby.infinite?) assert_equal(-1, Nodes::Scalar.new('-.Inf', nil, 'tag:yaml.org,2002:float').to_ruby.infinite?) assert Nodes::Scalar.new('.NaN').to_ruby.nan? assert Nodes::Scalar.new('.NaN', nil, 'tag:yaml.org,2002:float').to_ruby.nan? end def test_exp_float exp = 1.2e+30 i = Nodes::Scalar.new(exp.to_s, nil, 'tag:yaml.org,2002:float') assert_equal exp, i.to_ruby assert_equal exp, Nodes::Scalar.new(exp.to_s).to_ruby end def test_scalar scalar = Nodes::Scalar.new('foo') assert_equal 'foo', @visitor.accept(scalar) assert_equal 'foo', scalar.to_ruby end def test_sequence seq = Nodes::Sequence.new seq.children << Nodes::Scalar.new('foo') seq.children << Nodes::Scalar.new('bar') assert_equal %w{ foo bar }, seq.to_ruby end def test_mapping mapping = Nodes::Mapping.new mapping.children << Nodes::Scalar.new('foo') mapping.children << Nodes::Scalar.new('bar') assert_equal({'foo' => 'bar'}, mapping.to_ruby) end def test_document doc = Nodes::Document.new doc.children << Nodes::Scalar.new('foo') assert_equal 'foo', doc.to_ruby end def test_stream a = Nodes::Document.new a.children << Nodes::Scalar.new('foo') b = Nodes::Document.new b.children << Nodes::Scalar.new('bar') stream = Nodes::Stream.new stream.children << a stream.children << b assert_equal %w{ foo bar }, stream.to_ruby end def test_alias seq = Nodes::Sequence.new seq.children << Nodes::Scalar.new('foo', 'A') seq.children << Nodes::Alias.new('A') list = seq.to_ruby assert_equal %w{ foo foo }, list assert_equal list[0].object_id, list[1].object_id end def test_mapping_with_str_tag mapping = Nodes::Mapping.new(nil, '!strawberry') mapping.children << Nodes::Scalar.new('foo') mapping.children << Nodes::Scalar.new('bar') assert_equal({'foo' => 'bar'}, mapping.to_ruby) end end end end psych-2.2.4/test/psych/visitors/test_yaml_tree.rb000066400000000000000000000110731305404671600222110ustar00rootroot00000000000000# frozen_string_literal: false require 'psych/helper' module Psych module Visitors class TestYAMLTree < TestCase def setup super @v = Visitors::YAMLTree.create end def test_tree_can_be_called_twice @v.start @v << Object.new t = @v.tree assert_equal t, @v.tree end def test_yaml_tree_can_take_an_emitter io = StringIO.new e = Psych::Emitter.new io v = Visitors::YAMLTree.create({}, e) v.start v << "hello world" v.finish assert_match "hello world", io.string end def test_binary_formatting gif = "GIF89a\f\x00\f\x00\x84\x00\x00\xFF\xFF\xF7\xF5\xF5\xEE\xE9\xE9\xE5fff\x00\x00\x00\xE7\xE7\xE7^^^\xF3\xF3\xED\x8E\x8E\x8E\xE0\xE0\xE0\x9F\x9F\x9F\x93\x93\x93\xA7\xA7\xA7\x9E\x9E\x9Eiiiccc\xA3\xA3\xA3\x84\x84\x84\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9\xFF\xFE\xF9!\xFE\x0EMade with GIMP\x00,\x00\x00\x00\x00\f\x00\f\x00\x00\x05, \x8E\x810\x9E\xE3@\x14\xE8i\x10\xC4\xD1\x8A\b\x1C\xCF\x80M$z\xEF\xFF0\x85p\xB8\xB01f\r\e\xCE\x01\xC3\x01\x1E\x10' \x82\n\x01\x00;" @v << gif scalar = @v.tree.children.first.children.first assert_equal Psych::Nodes::Scalar::LITERAL, scalar.style end def test_object_has_no_class yaml = Psych.dump(Object.new) assert(Psych.dump(Object.new) !~ /Object/, yaml) end def test_struct_const foo = Struct.new("Foo", :bar) assert_cycle foo.new('bar') Struct.instance_eval { remove_const(:Foo) } end A = Struct.new(:foo) def test_struct assert_cycle A.new('bar') end def test_struct_anon s = Struct.new(:foo).new('bar') obj = Psych.load(Psych.dump(s)) assert_equal s.foo, obj.foo end def test_override_method s = Struct.new(:method).new('override') obj = Psych.load(Psych.dump(s)) assert_equal s.method, obj.method end def test_exception ex = Exception.new 'foo' loaded = Psych.load(Psych.dump(ex)) assert_equal ex.message, loaded.message assert_equal ex.class, loaded.class end def test_regexp assert_cycle(/foo/) assert_cycle(/foo/i) assert_cycle(/foo/mx) end def test_time t = Time.now assert_equal t, Psych.load(Psych.dump(t)) end def test_date date = Date.strptime('2002-12-14', '%Y-%m-%d') assert_cycle date end def test_rational assert_cycle Rational(1,2) end def test_complex assert_cycle Complex(1,2) end def test_scalar assert_cycle 'foo' assert_cycle ':foo' assert_cycle '' assert_cycle ':' end def test_boolean assert_cycle true assert_cycle 'true' assert_cycle false assert_cycle 'false' end def test_range_inclusive assert_cycle 1..2 end def test_range_exclusive assert_cycle 1...2 end def test_anon_class assert_raises(TypeError) do @v.accept Class.new end assert_raises(TypeError) do Psych.dump(Class.new) end end def test_hash assert_cycle('a' => 'b') end def test_list assert_cycle(%w{ a b }) assert_cycle([1, 2.2]) end def test_symbol assert_cycle :foo end def test_int assert_cycle 1 assert_cycle(-1) assert_cycle '1' assert_cycle '-1' end def test_float assert_cycle 1.2 assert_cycle '1.2' assert Psych.load(Psych.dump(0.0 / 0.0)).nan? assert_equal 1, Psych.load(Psych.dump(1 / 0.0)).infinite? assert_equal(-1, Psych.load(Psych.dump(-1 / 0.0)).infinite?) end def test_string assert_match(/'017'/, Psych.dump({'a' => '017'})) assert_match(/'019'/, Psych.dump({'a' => '019'})) assert_match(/'01818'/, Psych.dump({'a' => '01818'})) end # http://yaml.org/type/null.html def test_nil assert_cycle nil assert_equal nil, Psych.load('null') assert_equal nil, Psych.load('Null') assert_equal nil, Psych.load('NULL') assert_equal nil, Psych.load('~') assert_equal({'foo' => nil}, Psych.load('foo: ')) assert_cycle 'null' assert_cycle 'nUll' assert_cycle '~' end end end end